УУф... Только я наконец закончил придумывать этот алгоритм и почти написал его, как вдруг я обнаружил ошибку и начал всё переосмыслять. Только я пошел по прямой, как наступила ночь, а завтра в институт ((((

This commit is contained in:
Андреев Григорий 2025-11-17 03:32:35 +03:00
parent 1a45c22952
commit 310b56e372
13 changed files with 915 additions and 583 deletions

View File

@ -10,7 +10,7 @@ void generate_l1_headers_for_l1_5() {
SpanU8 ns = cstr("embassy_l1_5");
generate_eve_span_company_for_primitive(l, ns, cstr("NamedVariableRecordRef"), false, true);
generate_eve_span_company_for_primitive(l, ns, cstr("NamedMethodSignatureRecordRef"), false, true);
generate_eve_span_company_for_primitive(l, ns, cstr("RBTreeNode"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("BuffRBTreeNode"), true, false);
}
#endif

View File

@ -30,6 +30,9 @@ void generate_margaret_eve_for_vulkan_utils() {
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretBufferKindDescription"), false, true);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretOldBufferResizeRecord"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretResizeToNascentRecord"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretSubBufRelocationRequest"), true, false);
generate_eve_span_company_for_non_primitive_non_clonable(l, ns, cstr("VecMargaretSubBufRelocationRequest"), true, false);
generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
/* We won't need to clone this type, like, at all... It is actually clonable, but we just made
* it non-clonable */
@ -62,7 +65,8 @@ void generate_margaret_eve_for_vulkan_utils() {
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestFreeSubBuffer"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestFreeImage"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestResizeSubBuffer"), true, false);
generate_util_templ_inst_eve_header(l, ns, (util_templates_instantiation_options){
.T = cstr("MargaretMemAllocatorRequestResizeSubBuffer"), .t_primitive = true, .vec_extended = true});
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestAllocSubBuffer"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestAllocImage"), true, false);
generate_eve_span_company_for_non_primitive_non_clonable(l, ns,

View File

@ -85,4 +85,6 @@ NODISCARD VecU8 prepend_spaces_to_SpanU8_lines(SpanU8 lines, int tabulation){
return res;
}
#define EVE_MESSAGE "/* Automatically generated file. Don't edit it.\n * Don't include it in more than one place */\n\n"
#endif

View File

@ -1,13 +1,13 @@
#ifndef prototype1_src_l1_5_anne_l1_5_templ_very_base_h
#define prototype1_src_l1_5_anne_l1_5_templ_very_base_h
#include "../codegen/rb_tree_set_map_template_inst.h"
#include "../codegen/buff_rbtree_set_map_template_inst.h"
void generate_l1_5_template_instantiation_for_base_types(){
SpanU8 l = cstr("l1_5"), ns = cstr("");
generate_rb_tree_Set_templ_inst_guarded_header(l, ns,cstr("#include \"../l1/VecAndSpan_U64.h\""),
generate_buff_rbtree_Set_templ_inst_guarded_header(l, ns,cstr("#include \"../l1/VecAndSpan_U64.h\""),
(set_instantiation_op){.T = cstr("U64"), .t_integer = true});
generate_rb_tree_Set_templ_inst_guarded_header(l, ns, cstr("#include \"../l1/VecAndSpan_S64.h\""),
generate_buff_rbtree_Set_templ_inst_guarded_header(l, ns, cstr("#include \"../l1/VecAndSpan_S64.h\""),
(set_instantiation_op){.T = cstr("S64"), .t_integer = true});
}

View File

@ -1,7 +1,7 @@
#ifndef prototype1_src_l1_5_anne_margaret_h
#define prototype1_src_l1_5_anne_margaret_h
#include "../codegen/rb_tree_set_map_template_inst.h"
#include "../codegen/buff_rbtree_set_map_template_inst.h"
void generate_l1_5_template_instantiations_for_margaret(){
SpanU8 l = cstr("l1_5"), ns = cstr("margaret");
@ -9,15 +9,14 @@ void generate_l1_5_template_instantiations_for_margaret(){
mkdir_nofail("l1_5/eve/margaret");
/* For MargaretMemAllocator */
generate_rb_tree_Set_templ_inst_eve_header(l, ns, (set_instantiation_op){
generate_buff_rbtree_Set_templ_inst_eve_header(l, ns, (set_instantiation_op){
.T = cstr("MargaretFreeMemSegment"),
.t_primitive = true,
.alternative_less = cstr("MargaretFreeMemSegment_less"),
.alternative_equal = cstr("MargaretFreeMemSegment_equal"),
.alternative_comp_set_name_embed = cstr("Len"),
.unconditional_equality = true,
});
generate_rb_tree_Set_templ_inst_eve_header(l, ns, (set_instantiation_op){
generate_buff_rbtree_Set_templ_inst_eve_header(l, ns, (set_instantiation_op){
.T = cstr("MargaretFreeMemSegment"),
.t_primitive = true,
/* comparison takes additional U8 parameter */
@ -25,15 +24,12 @@ void generate_l1_5_template_instantiations_for_margaret(){
.alternative_equal = cstr("MargaretFreeMemSegment_equal_resp_align"),
.alternative_comp_set_name_embed = cstr("LenRespAlign"),
.guest_data_T = cstr("U8"),
.unconditional_equality = true,
});
generate_rb_tree_Map_templ_inst_eve_header(l, ns, (map_instantiation_op){
generate_buff_rbtree_Map_templ_inst_eve_header(l, ns, (map_instantiation_op){
.K = cstr("U64"), .k_integer = true, .V = cstr("MargaretMemoryOccupation"), /* MargaretMemoryOccupation is not primitive */
.unconditional_equality = true
});
generate_rb_tree_Map_templ_inst_eve_header(l, ns, (map_instantiation_op){
generate_buff_rbtree_Map_templ_inst_eve_header(l, ns, (map_instantiation_op){
.K = cstr("U64"), .k_integer = true, .V = cstr("MargaretBufferOccupationSubBuffer"), .v_primitive = true,
.unconditional_equality = true
});
}

View File

@ -17,12 +17,6 @@ typedef struct {
* Leave empty if you don't need guest data (GT = void)
* GT must be primitive, or, even better, be integer */
SpanU8 guest_data_T;
/* If `unconditional_equality` is set, methods, that are needed to return value T
* or reference to T are not generated.
* I, alas, wrote support for `unconditional_equality = false` but it should be noted, that
* `unconditional_equality = false` it is absolutely useless */
bool unconditional_equality;
} set_instantiation_op;
void set_instantiation_op_fix(set_instantiation_op* self){
@ -40,8 +34,6 @@ void set_instantiation_op_fix(set_instantiation_op* self){
assert(self->alternative_comp_set_name_embed.len > 0);
assert(self->alternative_equal.len > 0 && self->alternative_less.len > 0);
}
if (self->t_integer && self->alternative_comp_set_name_embed.len == 0)
self->unconditional_equality = true;
}
/* We assume K and V are trivially movable */
@ -60,20 +52,21 @@ typedef struct {
SpanU8 alternative_comp_map_name_embed;
SpanU8 guest_data_T;
/* If `unconditional_equality` is set, methods, that are needed to return value (K, V)
* or reference to K and V are generated such that they return only V part. I don't plan to support
* `unconditional_equality=false`
*/
bool unconditional_equality;
} map_instantiation_op;
void map_instantiation_op_fix(map_instantiation_op* self){
assert(self->K.len > 0);
if (self->k_integer)
self->k_primitive = true;
if (self->k_primitive)
self->k_clonable = true;
assert(self->K.len > 0 && self->V.len > 0);
if (self->V.len == 0)
self->v_primitive = true;
if (self->v_integer)
self->v_primitive = true;
if (self->v_primitive)
self->v_clonable = true;
assert((self->alternative_less.len == 0 && self->alternative_equal.len == 0
&& self->alternative_comp_map_name_embed.len == 0
)||(
@ -83,10 +76,6 @@ void map_instantiation_op_fix(map_instantiation_op* self){
assert(self->alternative_comp_map_name_embed.len > 0);
assert(self->alternative_equal.len > 0 && self->alternative_less.len > 0);
}
if (self->k_integer && self->alternative_comp_map_name_embed.len == 0)
self->unconditional_equality = true;
if (!self->unconditional_equality)
abortf("map_instantiation_op_fix::unconditional_equality = false isn't supported\n");
}
#endif

View File

@ -3,99 +3,99 @@
#include "all_set_map_templ_util_inst.h"
SpanU8 codegen_rb_tree_map__key_of_cur_el(map_instantiation_op op){
SpanU8 codegen_buff_rbtree_map__key_of_cur_el(map_instantiation_op op){
return op.V.len > 0 ? cstr("self->el.buf[cur - 1].key") : cstr("self->el.buf[cur - 1]");
}
/* When key is given by value into some method of Buff_RBTreeSet */
NODISCARD VecU8 codegen_rb_tree_map__key_value_NOT_EQUAL_element(map_instantiation_op op){
NODISCARD VecU8 codegen_buff_rbtree_map__key_value_NOT_EQUAL_element(map_instantiation_op op){
if (op.guest_data_T.len > 0) {
assert(op.alternative_equal.len > 0);
if (op.k_integer)
return VecU8_fmt("!%s(key, %s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(&key, &%s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(key, %s, self->guest)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(&key, &%s, self->guest)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.alternative_equal.len > 0) {
if (op.k_integer)
return VecU8_fmt("!%s(key, %s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(&key, &%s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(key, %s)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(&key, &%s)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.k_integer)
return VecU8_fmt("key != %s", codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s_equal_%s(&key, &%s)", op.K, op.K, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("key != %s", codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("!%s_equal_%s(&key, &%s)", op.K, op.K, codegen_buff_rbtree_map__key_of_cur_el(op));
}
/* When key is given by value into some method of Buff_RBTreeSet */
NODISCARD VecU8 codegen_rb_tree_map__key_value_LESS_element(map_instantiation_op op){
NODISCARD VecU8 codegen_buff_rbtree_map__key_value_LESS_element(map_instantiation_op op){
if (op.guest_data_T.len > 0) {
assert(op.alternative_less.len > 0);
if (op.k_integer)
return VecU8_fmt("%s(key, %s, self->guest)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(&key, &%s, self->guest)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, %s, self->guest)", op.alternative_less, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s(&key, &%s, self->guest)", op.alternative_less, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.alternative_less.len > 0) {
if (op.k_integer)
return VecU8_fmt("%s(key, %s)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(&key, &%s)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, %s)", op.alternative_less, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s(&key, &%s)", op.alternative_less, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.k_integer)
return VecU8_fmt("key < %s", codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s_less_%s(&key, &%s)", op.K, op.K, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("key < %s", codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s_less_%s(&key, &%s)", op.K, op.K, codegen_buff_rbtree_map__key_of_cur_el(op));
}
/* When key is given by ref into some method of Buff_RBTreeSet
* Ofk when op.T is integer, argument is still taken by a value */
NODISCARD VecU8 codegen_rb_tree_map__key_ref_NOT_EQUAL_element(map_instantiation_op op){
NODISCARD VecU8 codegen_buff_rbtree_map__key_ref_NOT_EQUAL_element(map_instantiation_op op){
if (op.guest_data_T.len > 0) {
assert(op.alternative_equal.len > 0);
if (op.k_integer)
return VecU8_fmt("!%s(key, %s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(key, &%s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(key, %s, self->guest)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(key, &%s, self->guest)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.alternative_equal.len > 0) {
if (op.k_integer)
return VecU8_fmt("!%s(key, %s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(key, &%s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(key, %s)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(key, &%s)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.k_integer)
return VecU8_fmt("key != %s", codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s_equal_%s(key, &%s)", op.K, op.K, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("key != %s", codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("!%s_equal_%s(key, &%s)", op.K, op.K, codegen_buff_rbtree_map__key_of_cur_el(op));
}
/* When key is given by a pointer into some method of Buff_RBTreeSet */
NODISCARD VecU8 codegen_rb_tree_map__key_ref_EQUAL_element(map_instantiation_op op){
NODISCARD VecU8 codegen_buff_rbtree_map__key_ref_EQUAL_element(map_instantiation_op op){
if (op.guest_data_T.len > 0) {
assert(op.alternative_equal.len > 0);
if (op.k_integer)
return VecU8_fmt("%s(key, %s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, &%s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, %s, self->guest)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, &%s, self->guest)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.alternative_equal.len > 0) {
if (op.k_integer)
return VecU8_fmt("%s(key, %s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, &%s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, %s)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, &%s)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.k_integer)
return VecU8_fmt("key == %s", codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s_equal_%s(key, &%s)", op.K, op.K, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("key == %s", codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s_equal_%s(key, &%s)", op.K, op.K, codegen_buff_rbtree_map__key_of_cur_el(op));
}
/* When key is given by a pointer into some method of Buff_RBTreeSet */
NODISCARD VecU8 codegen_rb_tree_map__key_ref_LESS_element(map_instantiation_op op){
NODISCARD VecU8 codegen_buff_rbtree_map__key_ref_LESS_element(map_instantiation_op op){
if (op.guest_data_T.len > 0) {
assert(op.alternative_less.len > 0);
if (op.k_integer)
return VecU8_fmt("%s(key, %s, self->guest)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, &%s, self->guest)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, %s, self->guest)", op.alternative_less, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, &%s, self->guest)", op.alternative_less, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.alternative_less.len > 0) {
if (op.k_integer)
return VecU8_fmt("%s(key, %s)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, &%s)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, %s)", op.alternative_less, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, &%s)", op.alternative_less, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.k_integer)
return VecU8_fmt("key < %s", codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s_less_%s(key, &%s)", op.K, op.K, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("key < %s", codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s_less_%s(key, &%s)", op.K, op.K, codegen_buff_rbtree_map__key_of_cur_el(op));
}
@ -104,11 +104,11 @@ NODISCARD VecU8 codegen_rb_tree_map__key_ref_LESS_element(map_instantiation_op o
* integer is better than pointer to integer. (Though, notice that _pop family of methods don't exist for
* sets of integers
*/
NODISCARD VecU8 codegen_rb_tree_map__taking_ref_k_argument(map_instantiation_op op){
NODISCARD VecU8 codegen_buff_rbtree_map__taking_ref_k_argument(map_instantiation_op op){
return op.k_integer ? VecU8_from_span(op.K) : VecU8_fmt("const %s*", op.K);
}
NODISCARD VecU8 codegen_rb_tree_map__taking_t_argument(map_instantiation_op op){
NODISCARD VecU8 codegen_buff_rbtree_map__taking_t_argument(map_instantiation_op op){
return op.V.len > 0 ? VecU8_fmt("%s key, %s value", op.K, op.V) : VecU8_fmt("%s key", op.K);
}
@ -116,12 +116,12 @@ NODISCARD VecU8 codegen_rb_tree_map__taking_t_argument(map_instantiation_op op){
* set is either a set name or a map name. If we are instantiating set, TT is op.T from set options, if we are
* instantiating a map, TT is KVP{op.K}To{op.V} from map options
* */
void codegen_append_rb_tree_map__structure_and_simplest_methods(
void codegen_append_buff_rbtree_map__structure_and_simplest_methods(
VecU8* res, map_instantiation_op op, SpanU8 set, SpanU8 TT
){
VecU8_append_vec(res, VecU8_fmt(
"typedef struct {\n"
SPACE "VecRBTreeNode tree;\n"
SPACE "VecBufRBTreeNode tree;\n"
SPACE "U64 root;\n"
SPACE "Vec%s el;\n"
"%v"
@ -131,14 +131,14 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
if (op.guest_data_T.len > 0) {
VecU8_append_vec(res, VecU8_fmt(
"NODISCARD %s %s_new(%s guest) {\n" /* set, set, op.guest_data_T */
SPACE "return (%s){.tree = VecRBTreeNode_new_zeroinit(1), .root = 0, .el = Vec%s_new(), .guest = guest};\n" /* set, TT */
SPACE "return (%s){.tree = VecBufRBTreeNode_new_zeroinit(1), .root = 0, .el = Vec%s_new(), .guest = guest};\n" /* set, TT */
"}\n\n",
set, set, op.guest_data_T,
set, TT));
VecU8_append_vec(res, VecU8_fmt(
"NODISCARD %s %s_new_reserved(%s guest, size_t size) {\n" /* set, set, op.guest_data_T */
SPACE "return (%s){.tree = (VecRBTreeNode){\n" /* set */
SPACE SPACE ".buf = (RBTreeNode*)safe_calloc(size + 1, sizeof(RBTreeNode)), .len = 1, .capacity = size + 1},\n"
SPACE "return (%s){.tree = (VecBufRBTreeNode){\n" /* set */
SPACE SPACE ".buf = (BufRBTreeNode*)safe_calloc(size + 1, sizeof(BufRBTreeNode)), .len = 1, .capacity = size + 1},\n"
SPACE SPACE ".root = 0, .el = Vec%s_new_reserved(size), .guest = guest};\n" /* TT */
"}\n\n",
set, set, op.guest_data_T,
@ -146,14 +146,14 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
} else {
VecU8_append_vec(res, VecU8_fmt(
"NODISCARD %s %s_new() {\n" /* set, set */
SPACE "return (%s){.tree = VecRBTreeNode_new_zeroinit(1), .root = 0, .el = Vec%s_new()};\n" /* set, TT */
SPACE "return (%s){.tree = VecBufRBTreeNode_new_zeroinit(1), .root = 0, .el = Vec%s_new()};\n" /* set, TT */
"}\n\n",
set, set,
set, TT));
VecU8_append_vec(res, VecU8_fmt(
"NODISCARD %s %s_new_reserved(size_t size) {\n" /* set, set */
SPACE "return (%s){.tree = (VecRBTreeNode){\n"
SPACE SPACE ".buf = (RBTreeNode*)safe_calloc(size + 1, sizeof(RBTreeNode)), .len = 1, .capacity = size + 1},\n"
SPACE "return (%s){.tree = (VecBufRBTreeNode){\n"
SPACE SPACE ".buf = (BufRBTreeNode*)safe_calloc(size + 1, sizeof(BufRBTreeNode)), .len = 1, .capacity = size + 1},\n"
SPACE SPACE ".root = 0, .el = Vec%s_new_reserved(size)};\n" /* set, TT */
"}\n\n",
set, set,
@ -162,10 +162,17 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
VecU8_append_vec(res, VecU8_fmt(
"void %s_drop(%s self) {\n" /* set, set */
SPACE "VecRBTreeNode_drop(self.tree);\n"
SPACE "VecBufRBTreeNode_drop(self.tree);\n"
SPACE "Vec%s_drop(self.el);\n" /* TT */
"}\n\n", set, set, TT));
VecU8_append_vec(res, VecU8_fmt(
"void %s_sink(%s* self) {\n" /* set, set */
SPACE "self->tree.len = 1;\n"
SPACE "self->tree.buf[0] = (BufRBTreeNode){0};\n"
SPACE "Vec%s_sink(&self->el, 0);\n" /* TT */
"}\n\n", set, set, TT));
VecU8_append_vec(res, VecU8_fmt(
"U64 %s_find(const %s* self, %v key) {\n" /* set, set, taking_ref_t_argument */
SPACE "U64 cur = self->root;\n"
@ -178,16 +185,16 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
SPACE "}\n"
SPACE "return cur;\n"
"}\n\n",
set, set, codegen_rb_tree_map__taking_ref_k_argument(op),
codegen_rb_tree_map__key_ref_NOT_EQUAL_element(op),
codegen_rb_tree_map__key_ref_LESS_element(op)
set, set, codegen_buff_rbtree_map__taking_ref_k_argument(op),
codegen_buff_rbtree_map__key_ref_NOT_EQUAL_element(op),
codegen_buff_rbtree_map__key_ref_LESS_element(op)
));
if (op.k_clonable && op.v_clonable) {
VecU8_append_vec(res, VecU8_fmt(
"NODISCARD %s %s_clone(const %s* self){\n" /* set, set, set */
SPACE "return (%s){.tree = VecRBTreeNode_clone(&self->tree), .root = self->root,\n" /* set */
SPACE "return (%s){.tree = VecBufRBTreeNode_clone(&self->tree), .root = self->root,\n" /* set */
SPACE SPACE ".el = Vec%s_clone(&self->el)%s};\n" /* TT, whether to clone guest or no */
"}\n\n",
set, set, set,
@ -214,7 +221,7 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
"U64 %s_find_prev(const %s* self, U64 x){\n"
SPACE "assert(x != 0 && x < self->tree.len);\n"
SPACE "if (self->tree.buf[x].left != 0)\n"
SPACE SPACE "return RBTree_maximum_in_subtree(self->tree.buf, self->tree.buf[x].left);\n"
SPACE SPACE "return BufRBTree_maximum_in_subtree(self->tree.buf, self->tree.buf[x].left);\n"
SPACE "while (true) {\n"
SPACE SPACE "U64 p = self->tree.buf[x].parent;\n"
SPACE SPACE "if (p == 0)\n"
@ -227,12 +234,12 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
VecU8_append_vec(res, VecU8_fmt(
"U64 %s_find_min(const %s* self) {\n"
SPACE "return self->root != 0 ? RBTree_minimum_in_subtree(self->tree.buf, self->root) : 0;\n"
SPACE "return self->root != 0 ? BufRBTree_minimum_in_subtree(self->tree.buf, self->root) : 0;\n"
"}\n\n", set, set));
VecU8_append_vec(res, VecU8_fmt(
"U64 %s_find_max(const %s* self) {\n"
SPACE "return self->root != 0 ? RBTree_maximum_in_subtree(self->tree.buf, self->root) : 0;\n"
SPACE "return self->root != 0 ? BufRBTree_maximum_in_subtree(self->tree.buf, self->root) : 0;\n"
"}\n\n", set, set));
VecU8_append_vec(res, VecU8_fmt(
@ -256,9 +263,9 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
SPACE "}\n"
SPACE "return last_less;\n"
"}\n\n",
set, set, codegen_rb_tree_map__taking_ref_k_argument(op),
codegen_rb_tree_map__key_ref_EQUAL_element(op),
codegen_rb_tree_map__key_ref_LESS_element(op)
set, set, codegen_buff_rbtree_map__taking_ref_k_argument(op),
codegen_buff_rbtree_map__key_ref_EQUAL_element(op),
codegen_buff_rbtree_map__key_ref_LESS_element(op)
));
VecU8_append_vec(res, VecU8_fmt(
@ -277,9 +284,9 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
SPACE "}\n"
SPACE "return last_less;\n"
"}\n\n",
set, set, codegen_rb_tree_map__taking_ref_k_argument(op),
codegen_rb_tree_map__key_ref_EQUAL_element(op),
codegen_rb_tree_map__key_ref_LESS_element(op)
set, set, codegen_buff_rbtree_map__taking_ref_k_argument(op),
codegen_buff_rbtree_map__key_ref_EQUAL_element(op),
codegen_buff_rbtree_map__key_ref_LESS_element(op)
));
VecU8_append_vec(res, VecU8_fmt(
@ -303,9 +310,9 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
SPACE "}\n"
SPACE "return last_grtr;\n"
"}\n\n",
set, set, codegen_rb_tree_map__taking_ref_k_argument(op),
codegen_rb_tree_map__key_ref_EQUAL_element(op),
codegen_rb_tree_map__key_ref_LESS_element(op)
set, set, codegen_buff_rbtree_map__taking_ref_k_argument(op),
codegen_buff_rbtree_map__key_ref_EQUAL_element(op),
codegen_buff_rbtree_map__key_ref_LESS_element(op)
));
@ -325,15 +332,15 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
SPACE "}\n"
SPACE "return last_grtr;\n"
"}\n\n",
set, set, codegen_rb_tree_map__taking_ref_k_argument(op),
codegen_rb_tree_map__key_ref_EQUAL_element(op),
codegen_rb_tree_map__key_ref_LESS_element(op)
set, set, codegen_buff_rbtree_map__taking_ref_k_argument(op),
codegen_buff_rbtree_map__key_ref_EQUAL_element(op),
codegen_buff_rbtree_map__key_ref_LESS_element(op)
));
}
/* Generates methods _insert() _pop_substitute() _erase_substitute() for SetT
* Takes ownership of strings Tc, Fc */
void codegen_append_rb_tree_map__insert_kind_method(
void codegen_append_buff_rbtree_map__insert_kind_method(
VecU8* res, map_instantiation_op op, SpanU8 set, SpanU8 method_name, VecU8 RT, VecU8 Tc, VecU8 Fc
){
VecU8 Tc_root = prepend_spaces_to_SpanU8_lines(VecU8_to_span(&Tc), 2);
@ -351,7 +358,7 @@ void codegen_append_rb_tree_map__insert_kind_method(
"%v %s_%s(%s* self, %v) {\n" /* RT, set, method_name, set, taking_t_argument */
SPACE "if (self->root == 0) {\n"
SPACE SPACE "assert(self->tree.len == 1);\n"
SPACE SPACE "VecRBTreeNode_append(&self->tree, (RBTreeNode){.color = RBTree_black});\n"
SPACE SPACE "VecBufRBTreeNode_append(&self->tree, (BufRBTreeNode){.color = RBTree_black});\n"
SPACE SPACE "%s\n" /* line_that_appends_new_el_to_el_vec */
SPACE SPACE "self->root = 1;\n"
"%v" /* Tc_root */
@ -365,9 +372,9 @@ void codegen_append_rb_tree_map__insert_kind_method(
SPACE SPACE SPACE "} else { \n"
/* We are inserting to the left of cur */
SPACE SPACE SPACE SPACE "U64 n = self->tree.len;\n"
SPACE SPACE SPACE SPACE "VecRBTreeNode_append(&self->tree, (RBTreeNode){.parent = cur, .color = RBTree_red});\n"
SPACE SPACE SPACE SPACE "VecBufRBTreeNode_append(&self->tree, (BufRBTreeNode){.parent = cur, .color = RBTree_red});\n"
SPACE SPACE SPACE SPACE "self->tree.buf[cur].left = n;\n"
SPACE SPACE SPACE SPACE "RBTree_fix_after_insert(self->tree.buf, &self->root, n);\n"
SPACE SPACE SPACE SPACE "BufRBTree_fix_after_insert(self->tree.buf, &self->root, n);\n"
SPACE SPACE SPACE SPACE "%s\n" /* line_that_appends_new_el_to_el_vec */
"%v" /* Tc_on_left */
/* Should have returned by now in Tc*/
@ -378,9 +385,9 @@ void codegen_append_rb_tree_map__insert_kind_method(
SPACE SPACE SPACE "} else {\n"
/* We are inserting to the right of cur */
SPACE SPACE SPACE SPACE "U64 n = self->tree.len;\n"
SPACE SPACE SPACE SPACE "VecRBTreeNode_append(&self->tree, (RBTreeNode){.parent = cur, .color = RBTree_red});\n"
SPACE SPACE SPACE SPACE "VecBufRBTreeNode_append(&self->tree, (BufRBTreeNode){.parent = cur, .color = RBTree_red});\n"
SPACE SPACE SPACE SPACE "self->tree.buf[cur].right = n;\n"
SPACE SPACE SPACE SPACE "RBTree_fix_after_insert(self->tree.buf, &self->root, n);\n"
SPACE SPACE SPACE SPACE "BufRBTree_fix_after_insert(self->tree.buf, &self->root, n);\n"
SPACE SPACE SPACE SPACE "%s\n" /* line_that_appends_new_el_to_el_vec */
"%v" /* Tc_on_right */
/* Should have returned by now in Tc*/
@ -390,11 +397,11 @@ void codegen_append_rb_tree_map__insert_kind_method(
"%v" /* Fc_exists */
/* Should have returned by now in Tc*/
"}\n\n",
RT, set, method_name, set, codegen_rb_tree_map__taking_t_argument(op),
RT, set, method_name, set, codegen_buff_rbtree_map__taking_t_argument(op),
VecU8_to_span(&line_that_appends_new_el_to_el_vec), // !!
Tc_root,
codegen_rb_tree_map__key_value_NOT_EQUAL_element(op),
codegen_rb_tree_map__key_value_LESS_element(op),
codegen_buff_rbtree_map__key_value_NOT_EQUAL_element(op),
codegen_buff_rbtree_map__key_value_LESS_element(op),
VecU8_to_span(&line_that_appends_new_el_to_el_vec),
Tc_on_left,
VecU8_to_span(&line_that_appends_new_el_to_el_vec),
@ -405,12 +412,12 @@ void codegen_append_rb_tree_map__insert_kind_method(
VecU8_drop(line_that_appends_new_el_to_el_vec);
}
void codegen_append_rb_tree_map__method_empty_index_erase(VecU8* res, SpanU8 set){
void codegen_append_buff_rbtree_map__method_empty_index_erase(VecU8* res, SpanU8 set){
VecU8_append_vec(res, VecU8_fmt(
"/* UNSAFE. Use when you dropped the symbol that is about to be deleted */\n"
"void %s_empty_index_erase(%s* self, U64 z) {\n" /* set, set */
SPACE "assert(z != 0 && z < self->tree.len);\n"
SPACE "U64 y = (self->tree.buf[z].left == 0 || self->tree.buf[z].right == 0) ? z : RBTree_minimum_in_subtree(self->tree.buf, self->tree.buf[z].right);\n"
SPACE "U64 y = (self->tree.buf[z].left == 0 || self->tree.buf[z].right == 0) ? z : BufRBTree_minimum_in_subtree(self->tree.buf, self->tree.buf[z].right);\n"
SPACE "U64 x = self->tree.buf[y].left != 0 ? self->tree.buf[y].left : self->tree.buf[y].right;\n"
SPACE "assert(x != y && x != z);\n"
SPACE "U64 x_adopter = self->tree.buf[y].parent;\n"
@ -423,13 +430,13 @@ void codegen_append_rb_tree_map__method_empty_index_erase(VecU8* res, SpanU8 set
SPACE SPACE "self->tree.buf[x_adopter].right = x;\n"
SPACE "RBTreeColor y_org_clr = self->tree.buf[y].color;\n"
SPACE "if (z != y) {\n"
SPACE SPACE "RBTree_steal_neighbours(self->tree.buf, &self->root, z, y);\n"
SPACE SPACE "BufRBTree_steal_neighbours(self->tree.buf, &self->root, z, y);\n"
SPACE SPACE "if (x_adopter == z)\n"
SPACE SPACE SPACE "x_adopter = y;\n"
SPACE "}\n"
SPACE "U64 L = self->el.len;\n"
SPACE "if (L != z) {\n"
SPACE SPACE "RBTree_steal_neighbours(self->tree.buf, &self->root, L, z);\n"
SPACE SPACE "BufRBTree_steal_neighbours(self->tree.buf, &self->root, L, z);\n"
SPACE SPACE "self->el.buf[z-1] = self->el.buf[L-1];\n"
SPACE SPACE "if (L == x)\n"
SPACE SPACE SPACE "x = z;\n"
@ -440,12 +447,12 @@ void codegen_append_rb_tree_map__method_empty_index_erase(VecU8* res, SpanU8 set
SPACE "self->tree.len--;\n"
SPACE "self->el.len--;\n"
SPACE "if (y_org_clr == RBTree_black)\n"
SPACE SPACE "RBTree_fix_after_delete(self->tree.buf, &self->root, x);\n"
SPACE SPACE "BufRBTree_fix_after_delete(self->tree.buf, &self->root, x);\n"
"}\n\n",
set, set));
}
void codegen_append_rb_tree_map__erase_kind_method(
void codegen_append_buff_rbtree_map__erase_kind_method(
VecU8* res, map_instantiation_op op, SpanU8 set, SpanU8 method_name, VecU8 RT,
VecU8 Fc, VecU8 Tc_cur_available, VecU8 Tc_returning
){
@ -474,68 +481,39 @@ void codegen_append_rb_tree_map__erase_kind_method(
SPACE "%s_empty_index_erase(self, cur);\n" /* set */
"%v" /* ret_found_case */
"}\n\n",
RT, set, method_name, set, codegen_rb_tree_map__taking_ref_k_argument(op),
RT, set, method_name, set, codegen_buff_rbtree_map__taking_ref_k_argument(op),
not_found_case,
codegen_rb_tree_map__key_ref_EQUAL_element(op),
codegen_rb_tree_map__key_ref_LESS_element(op),
codegen_buff_rbtree_map__key_ref_EQUAL_element(op),
codegen_buff_rbtree_map__key_ref_LESS_element(op),
saving_prev,
set,
ret_found_case));
}
/* When method returns constant pointer to found key (wrapped in Option) we will use this type
* Of course, it can turn out that it is not generated. So be careful and generate it by yourself
*/
NODISCARD VecU8 codegen_rb_tree_map__option_returned_ref_t(map_instantiation_op op, bool mut){
/* Constant pointer to an integer is an integer */
// Because we don't generate such methods if V is void and we have unconditional_equality
assert(!op.unconditional_equality || op.V.len > 0);
// Because we don't support it
assert(!(op.V.len > 0) || op.unconditional_equality);
// Key can't be mutable
assert(!mut || op.V.len > 0);
if (op.V.len > 0) {
if (op.v_integer)
return VecU8_fmt("Option%s", op.V);
return mut ? VecU8_fmt("OptionRefMut%s", op.V) : VecU8_fmt("OptionRef%s", op.V);
}
return op.k_integer ? VecU8_fmt("Option%s", op.K) : VecU8_fmt("OptionRef%s", op.K);
NODISCARD VecU8 codegen_buff_rbtree_map__option_returned_ref_v(map_instantiation_op op, bool mut){
assert(op.V.len > 0);
if (op.v_integer)
return VecU8_fmt("Option%s", op.V);
return mut ? VecU8_fmt("OptionRefMut%s", op.V) : VecU8_fmt("OptionRef%s", op.V);
}
NODISCARD VecU8 codegen_rb_tree_map__some_ref_t(map_instantiation_op op, bool mut){
assert(!op.unconditional_equality || op.V.len > 0);
assert(!(op.V.len > 0) || op.unconditional_equality);
assert(!mut || op.V.len > 0);
if (op.V.len > 0) {
if (op.v_integer)
return VecU8_fmt("Some_%s(self->el.buf[cur - 1].value)", op.V);
if (mut)
return VecU8_fmt("Some_RefMut%s(&self->el.buf[cur - 1].value)", op.V);
return VecU8_fmt("Some_Ref%s(&self->el.buf[cur - 1].value)", op.V);
}
if (op.k_integer)
return VecU8_fmt("Some_%s(self->el.buf[cur - 1])", op.K);
return VecU8_fmt("Some_Ref%s(&self->el.buf[cur - 1])", op.K);
NODISCARD VecU8 codegen_buff_rbtree_map__some_ref_v(map_instantiation_op op, bool mut){
assert(op.V.len > 0);
if (op.v_integer)
return VecU8_fmt("Some_%s(self->el.buf[cur - 1].value)", op.V);
if (mut)
return VecU8_fmt("Some_RefMut%s(&self->el.buf[cur - 1].value)", op.V);
return VecU8_fmt("Some_Ref%s(&self->el.buf[cur - 1].value)", op.V);
}
NODISCARD VecU8 codegen_rb_tree_map__none_ref_t(map_instantiation_op op, bool mut){
assert(!op.unconditional_equality || op.V.len > 0);
assert(!(op.V.len > 0) || op.unconditional_equality);
assert(!mut || op.V.len > 0);
if (op.V.len > 0) {
if (op.v_integer)
return VecU8_fmt("None_%s()", op.V);
return mut ? VecU8_fmt("None_RefMut%s()", op.V) : VecU8_fmt("None_Ref%s()", op.V) ;
}
return op.k_integer ? VecU8_fmt("None_%s()", op.K) : VecU8_fmt("None_Ref%s()", op.K);
NODISCARD VecU8 codegen_buff_rbtree_map__none_ref_v(map_instantiation_op op, bool mut){
assert(op.V.len > 0);
if (op.v_integer)
return VecU8_fmt("None_%s()", op.V);
return mut ? VecU8_fmt("None_RefMut%s()", op.V) : VecU8_fmt("None_Ref%s()", op.V) ;
}
/* Implementing it for a set was the biggest mistake of my day */
void codegen_append_rb_tree_map__method_at(VecU8* res, map_instantiation_op op, SpanU8 set, bool mut){
void codegen_append_buff_rbtree_map__method_at(VecU8* res, map_instantiation_op op, SpanU8 set, bool mut){
VecU8_append_vec(res, VecU8_fmt(
"%v %s_%s(%s%s* self, %v key) {\n" /* option_returned_ref_t, set, mat/at, e/const, set, taking_ref_t_argument */
SPACE "U64 cur = self->root;\n"
@ -550,17 +528,17 @@ void codegen_append_rb_tree_map__method_at(VecU8* res, map_instantiation_op op,
SPACE "}\n"
SPACE "return %v;\n" /* none_ref_t */
"}\n\n",
codegen_rb_tree_map__option_returned_ref_t(op, mut), set, mut ? cstr("mat") : cstr("at"),
mut ? cstr("") : cstr("const "), set, codegen_rb_tree_map__taking_ref_k_argument(op),
codegen_buff_rbtree_map__option_returned_ref_v(op, mut), set, mut ? cstr("mat") : cstr("at"),
mut ? cstr("") : cstr("const "), set, codegen_buff_rbtree_map__taking_ref_k_argument(op),
codegen_rb_tree_map__key_ref_EQUAL_element(op),
codegen_rb_tree_map__some_ref_t(op, mut),
codegen_rb_tree_map__key_ref_LESS_element(op),
codegen_rb_tree_map__none_ref_t(op, mut)
codegen_buff_rbtree_map__key_ref_EQUAL_element(op),
codegen_buff_rbtree_map__some_ref_v(op, mut),
codegen_buff_rbtree_map__key_ref_LESS_element(op),
codegen_buff_rbtree_map__none_ref_v(op, mut)
));
}
NODISCARD VecU8 get_name_of_rb_tree_set_structure(set_instantiation_op op){
NODISCARD VecU8 get_name_of_buff_rbtree_set_structure(set_instantiation_op op){
if (op.alternative_comp_set_name_embed.len)
return VecU8_fmt("BuffRBTreeBy%s_Set%s", op.alternative_comp_set_name_embed, op.T);
return VecU8_fmt("BuffRBTree_Set%s", op.T);
@ -569,10 +547,10 @@ NODISCARD VecU8 get_name_of_rb_tree_set_structure(set_instantiation_op op){
/* src/l1_5/core/rb_tree_node.h is a dependency of all instantiations of rb_tree_set template
* Don't forget to include them
* */
NODISCARD VecU8 generate_rb_tree_Set_template_instantiation(set_instantiation_op op){
NODISCARD VecU8 generate_buff_rbtree_Set_template_instantiation(set_instantiation_op op){
set_instantiation_op_fix(&op);
VecU8 res = VecU8_new();
VecU8 g_set = get_name_of_rb_tree_set_structure(op);
VecU8 g_set = get_name_of_buff_rbtree_set_structure(op);
SpanU8 set = VecU8_to_span(&g_set);
map_instantiation_op map_op = {.K = op.T,
@ -580,14 +558,13 @@ NODISCARD VecU8 generate_rb_tree_Set_template_instantiation(set_instantiation_op
.V = cstr(""), .v_primitive = true, .v_clonable = true,
.alternative_equal = op.alternative_equal, .alternative_less = op.alternative_less,
.alternative_comp_map_name_embed = op.alternative_comp_set_name_embed, .guest_data_T = op.guest_data_T,
.unconditional_equality = op.unconditional_equality
};
codegen_append_rb_tree_map__structure_and_simplest_methods(&res, map_op, set, op.T);
codegen_append_buff_rbtree_map__structure_and_simplest_methods(&res, map_op, set, op.T);
/* Method _insert() does not try to replace the existing element with equal key,
* it returns true if insertion was done, false if collision happened and key was not inserted */
codegen_append_rb_tree_map__insert_kind_method(&res, map_op, set, cstr("insert"), vcstr("bool"),
codegen_append_buff_rbtree_map__insert_kind_method(&res, map_op, set, cstr("insert"), vcstr("bool"),
vcstr("return true;\n"),
op.t_primitive ?
vcstr("return false;\n") :
@ -596,48 +573,13 @@ NODISCARD VecU8 generate_rb_tree_Set_template_instantiation(set_instantiation_op
"return false;\n",
op.T));
if (!op.unconditional_equality) {
/* Method _erase_substitute() is a more bald version of _insert() method. It will substitute
* previous element with equal key if it was found. It still returns true if no conflict has happened, though */
codegen_append_rb_tree_map__insert_kind_method(&res, map_op, set, cstr("erase_substitute"), vcstr("bool"),
vcstr("return true;\n"),
op.t_primitive ?
vcstr("return false;\n") :
VecU8_fmt(
"%s_drop(self->el.buf[cur - 1]);\n"
"self->el.buf[cur - 1] = key;\n"
"return false;\n",
op.T));
codegen_append_buff_rbtree_map__method_empty_index_erase(&res, set);
/* Method _pop_substitute() is just like _erase_substitute(), but it returns a previous key
* that was overthrown after collision. Wrapped in option, ofcourse */
codegen_append_rb_tree_map__insert_kind_method(&res, map_op, set, cstr("pop_substitute"),
VecU8_fmt("Option%s", op.T),
VecU8_fmt("return None_%s();\n", op.T),
VecU8_fmt(
"%s saved = self->el.buf[cur - 1];\n" /* op.T */
"self->el.buf[cur - 1] = key;\n"
"return Some_%s(saved);", /* op.V */
op.T, op.T));
}
codegen_append_rb_tree_map__method_empty_index_erase(&res, set);
codegen_append_rb_tree_map__erase_kind_method(&res, map_op, set, cstr("erase"), vcstr("bool"),
codegen_append_buff_rbtree_map__erase_kind_method(&res, map_op, set, cstr("erase"), vcstr("bool"),
vcstr("return false;\n"),
op.t_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1]);\n", op.T),
vcstr("return true;\n"));
if (!op.unconditional_equality) {
codegen_append_rb_tree_map__erase_kind_method(&res, map_op, set, cstr("pop"),
VecU8_fmt("Option%s", op.T),
VecU8_fmt("return None_%s();\n", op.T),
VecU8_fmt("%s saved = self->el.buf[cur - 1];\n", op.T),
VecU8_fmt("return Some_%s(saved);\n", op.T));
codegen_append_rb_tree_map__method_at(&res, map_op, set, false);
}
VecU8_append_vec(&res, VecU8_fmt(
"const %s* %s_at_iter(const %s* self, U64 it) {\n" /* op.T, set, set */
SPACE "assert(0 < it && it < self->tree.len);\n"
@ -650,38 +592,37 @@ NODISCARD VecU8 generate_rb_tree_Set_template_instantiation(set_instantiation_op
}
void generate_rb_tree_Set_templ_inst_eve_header(SpanU8 layer, SpanU8 bonus_ns, set_instantiation_op op) {
VecU8 text = VecU8_from_cstr("/* Automatically generated file. Do not edit it.\n"
" * Do not include it in more than one place */\n\n");
VecU8_append_vec(&text, generate_rb_tree_Set_template_instantiation(op));
VecU8 nt_path = VecU8_fmt("%s/eve/%s/%v.h%c", layer, bonus_ns, get_name_of_rb_tree_set_structure(op), 0);
void generate_buff_rbtree_Set_templ_inst_eve_header(SpanU8 layer, SpanU8 bonus_ns, set_instantiation_op op) {
VecU8 text = vcstr(EVE_MESSAGE);
VecU8_append_vec(&text, generate_buff_rbtree_Set_template_instantiation(op));
VecU8 nt_path = VecU8_fmt("%s/eve/%s/%v.h%c", layer, bonus_ns, get_name_of_buff_rbtree_set_structure(op), 0);
write_whole_file_or_abort((const char*)nt_path.buf, VecU8_to_span(&text));
VecU8_drop(nt_path);
VecU8_drop(text);
}
void generate_rb_tree_Set_templ_inst_guarded_header(
void generate_buff_rbtree_Set_templ_inst_guarded_header(
SpanU8 layer, SpanU8 bonus_ns, SpanU8 dependencies, set_instantiation_op op
){
assert(layer.len > 1);
VecU8 path = VecU8_fmt("%s/%s%s%v.h", layer, bonus_ns, bonus_ns.len ? cstr("/") : cstr(""),
get_name_of_rb_tree_set_structure(op));
get_name_of_buff_rbtree_set_structure(op));
GeneratedHeader head = begin_header(VecU8_to_span(&path));
VecU8_drop(path);
VecU8_append_span(&head.result, cstr("#include \"../../"));
int to_my_layer = get_number_of_parts_in_header_namespace(bonus_ns);
for (int i = 0; i < to_my_layer; i++)
VecU8_append_span(&head.result, cstr("../"));
VecU8_append_span(&head.result, cstr("src/l1_5/core/rb_tree_node.h\"\n"));
VecU8_append_span(&head.result, cstr("src/l1_5/core/buff_rb_tree_node.h\"\n"));
VecU8_append_span(&head.result, dependencies);
VecU8_append_span(&head.result, cstr("\n\n"));
VecU8_append_vec(&head.result, generate_rb_tree_Set_template_instantiation(op));
VecU8_append_vec(&head.result, generate_buff_rbtree_Set_template_instantiation(op));
finish_header(head);
}
/* ========= Now we add Map<K, V> into the picture ======== */
void codegen_append_rb_tree_map__method_at_iter(VecU8* res, map_instantiation_op op, SpanU8 set, bool mut){
void codegen_append_buff_rbtree_map__method_at_iter(VecU8* res, map_instantiation_op op, SpanU8 set, bool mut){
assert(op.V.len > 0);
VecU8_append_vec(res, VecU8_fmt(
"void %s_%s(%s%s* self, U64 it, %v* ret_key, %v* ret_value) {\n" /* set, method name, self access modifier, set, key ret ptr, value ret ptr */
@ -696,30 +637,31 @@ void codegen_append_rb_tree_map__method_at_iter(VecU8* res, map_instantiation_op
op.k_integer ? cstr("") : cstr("&"), op.v_integer ? cstr("") : cstr("&")));
}
NODISCARD VecU8 get_name_of_rb_tree_map_structure(map_instantiation_op op){
NODISCARD VecU8 get_name_of_buff_rbtree_map_structure(map_instantiation_op op){
if (op.alternative_comp_map_name_embed.len)
return VecU8_fmt("BuffRBTreeBy%s_Map%sTo%s", op.alternative_comp_map_name_embed, op.K, op.V);
return VecU8_fmt("BuffRBTree_Map%sTo%s", op.K, op.V);
}
NODISCARD VecU8 generate_rb_tree_Map_template_instantiation(map_instantiation_op op){
NODISCARD VecU8 generate_buff_rbtree_Map_template_instantiation(map_instantiation_op op){
assert(op.V.len > 0);
map_instantiation_op_fix(&op);
VecU8 res = VecU8_new();
VecU8 map_g = get_name_of_rb_tree_map_structure(op);
VecU8 map_g = get_name_of_buff_rbtree_map_structure(op);
SpanU8 map = VecU8_to_span(&map_g);
VecU8 kvp_g = VecU8_fmt("KVP%sTo%s", op.K, op.V);
codegen_append_rb_tree_map__structure_and_simplest_methods(&res, op, map, VecU8_to_span(&kvp_g));
codegen_append_buff_rbtree_map__structure_and_simplest_methods(&res, op, map, VecU8_to_span(&kvp_g));
VecU8_drop(kvp_g);
codegen_append_rb_tree_map__insert_kind_method(&res, op, map, cstr("insert"), vcstr("bool"),
codegen_append_buff_rbtree_map__insert_kind_method(&res, op, map, cstr("insert"), vcstr("bool"),
vcstr("return true;\n"),
VecU8_fmt("%v%v" "return false;\n",
op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(key);\n", op.K),
op.v_primitive ? vcstr("") : VecU8_fmt("%s_drop(value);\n", op.V)));
codegen_append_rb_tree_map__insert_kind_method(&res, op, map, cstr("erase_substitute"), vcstr("bool"),
codegen_append_buff_rbtree_map__insert_kind_method(&res, op, map, cstr("erase_substitute"), vcstr("bool"),
vcstr("return true;\n"),
VecU8_fmt("%v%v"
"self->el.buf[cur - 1].key = key;\n"
@ -729,7 +671,7 @@ NODISCARD VecU8 generate_rb_tree_Map_template_instantiation(map_instantiation_op
op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].value);\n", op.V)
));
codegen_append_rb_tree_map__insert_kind_method(&res, op, map, cstr("pop_substitute"),
codegen_append_buff_rbtree_map__insert_kind_method(&res, op, map, cstr("pop_substitute"),
VecU8_fmt("Option%s", op.V),
VecU8_fmt("return None_%s();\n", op.V),
VecU8_fmt(
@ -741,16 +683,16 @@ NODISCARD VecU8 generate_rb_tree_Map_template_instantiation(map_instantiation_op
op.V, op.V));
/* Erasing time!!!! */
codegen_append_rb_tree_map__method_empty_index_erase(&res, map);
codegen_append_buff_rbtree_map__method_empty_index_erase(&res, map);
codegen_append_rb_tree_map__erase_kind_method(&res, op, map, cstr("erase"), vcstr("bool"),
codegen_append_buff_rbtree_map__erase_kind_method(&res, op, map, cstr("erase"), vcstr("bool"),
vcstr("return false;\n"),
VecU8_fmt("%v%v",
op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].key);\n", op.K),
op.v_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].value);\n", op.V)),
vcstr("return true;\n"));
codegen_append_rb_tree_map__erase_kind_method(&res, op, map, cstr("pop"),
codegen_append_buff_rbtree_map__erase_kind_method(&res, op, map, cstr("pop"),
VecU8_fmt("Option%s", op.V),
VecU8_fmt("return None_%s();\n", op.V),
VecU8_fmt("%v" "%s saved = self->el.buf[cur - 1].value;\n",
@ -759,42 +701,41 @@ NODISCARD VecU8 generate_rb_tree_Map_template_instantiation(map_instantiation_op
VecU8_fmt("return Some_%s(saved);\n", op.V));
/* We erased enough */
codegen_append_rb_tree_map__method_at(&res, op, map, false);
codegen_append_rb_tree_map__method_at(&res, op, map, true);
codegen_append_buff_rbtree_map__method_at(&res, op, map, false);
codegen_append_buff_rbtree_map__method_at(&res, op, map, true);
/* These functions breaks by design and returns answer through pointers given in arguments. For greater good ofk */
codegen_append_rb_tree_map__method_at_iter(&res, op, map, false);
codegen_append_rb_tree_map__method_at_iter(&res, op, map, true);
/* These functions break my design and return answer through pointers given in arguments. For greater good ofk */
codegen_append_buff_rbtree_map__method_at_iter(&res, op, map, false);
codegen_append_buff_rbtree_map__method_at_iter(&res, op, map, true);
return res;
}
void generate_rb_tree_Map_templ_inst_eve_header(SpanU8 layer, SpanU8 bonus_ns, map_instantiation_op op) {
VecU8 text = VecU8_from_cstr("/* Automatically generated file. Do not edit it.\n"
" * Do not include it in more than one place */\n\n");
VecU8_append_vec(&text, generate_rb_tree_Map_template_instantiation(op));
VecU8 nt_path = VecU8_fmt("%s/eve/%s/%v.h%c", layer, bonus_ns, get_name_of_rb_tree_map_structure(op), 0);
void generate_buff_rbtree_Map_templ_inst_eve_header(SpanU8 layer, SpanU8 bonus_ns, map_instantiation_op op) {
VecU8 text = vcstr(EVE_MESSAGE);
VecU8_append_vec(&text, generate_buff_rbtree_Map_template_instantiation(op));
VecU8 nt_path = VecU8_fmt("%s/eve/%s/%v.h%c", layer, bonus_ns, get_name_of_buff_rbtree_map_structure(op), 0);
write_whole_file_or_abort((const char*)nt_path.buf, VecU8_to_span(&text));
VecU8_drop(nt_path);
VecU8_drop(text);
}
void generate_rb_tree_Map_templ_inst_guarded_header(
void generate_buff_rbtree_Map_templ_inst_guarded_header(
SpanU8 layer, SpanU8 bonus_ns, SpanU8 dependencies, map_instantiation_op op
){
assert(layer.len > 1);
VecU8 path = VecU8_fmt("%s/%s%s%v.h", layer, bonus_ns, bonus_ns.len ? cstr("/") : cstr(""),
get_name_of_rb_tree_map_structure(op));
get_name_of_buff_rbtree_map_structure(op));
GeneratedHeader head = begin_header(VecU8_to_span(&path));
VecU8_drop(path);
VecU8_append_span(&head.result, cstr("#include \"../../"));
int to_my_layer = get_number_of_parts_in_header_namespace(bonus_ns);
for (int i = 0; i < to_my_layer; i++)
VecU8_append_span(&head.result, cstr("../"));
VecU8_append_span(&head.result, cstr("src/l1_5/core/rb_tree_node.h\"\n"));
VecU8_append_span(&head.result, cstr("src/l1_5/core/buff_rb_tree_node.h\"\n"));
VecU8_append_span(&head.result, dependencies);
VecU8_append_span(&head.result, cstr("\n\n"));
VecU8_append_vec(&head.result, generate_rb_tree_Map_template_instantiation(op));
VecU8_append_vec(&head.result, generate_buff_rbtree_Map_template_instantiation(op));
finish_header(head);
}

View File

@ -0,0 +1,77 @@
#ifndef prototype1_src_l1_5_codegen_rbtree_set_map_template_inst_h
#define prototype1_src_l1_5_codegen_rbtree_set_map_template_inst_h
#include "all_set_map_templ_util_inst.h"
NODISCARD VecU8 codegen_rbtree__node_struct_name(map_instantiation_op op){
return (op.V.len > 0) ? VecU8_fmt("RBTreeNode_KVP%sTo%s", op.K, op.V) : VecU8_fmt("RBTreeNode_%s", op.K);
}
NODISCARD VecU8 codegen_rbtree__node_structure(map_instantiation_op op){
map_instantiation_op_fix(&op);
VecU8 node_g = codegen_rbtree__node_struct_name(op);
SpanU8 node = VecU8_to_span(&node_g);
VecU8 res = VecU8_fmt(
"typedef struct %s %s;\n\n" /* node, node */
"struct {\n"
SPACE "%s* left;\n" /* node */
SPACE "%s* right;\n" /* node */
SPACE "%s* parent;\n" /* node */
SPACE "int color;\n"
SPACE "%v;\n" /* Key key[] / KVP w[] */
"} %s;\n\n",
node, node, node, node, node,
op.V.len > 0 ? VecU8_fmt("KVP%sTo%s w[]", op.K, op.V) : VecU8_fmt("%s key[]", op.K),
node);
VecU8_append_vec(&res, VecU8_fmt(
"%s* %s_minimum_in_subtree(const %s* x, const %s* NIL) {\n"
SPACE "assert(x != NIL);\n"
SPACE ""
"}\n"));
VecU8_drop(node_g);
return res;
}
void codegen_append_rbtree_map__structure_and_simplest_methods(
VecU8* res, map_instantiation_op op, SpanU8 set, SpanU8 TT
){
VecU8_append_vec(res, VecU8_fmt(
"typedef struct {\n"
SPACE "RBTreeNode_%s* root;\n" /* TT */
SPACE "RBTreeNode_%s* NIL;\n" /* TT */
"%v" /* "" / guest field */
"} %s;\n\n", TT, TT,
op.guest_data_T.len == 0 ? vcstr("") : VecU8_fmt("%s guest;\n", op.guest_data_T),
set));
VecU8_append_vec(res, VecU8_fmt(
"NODISCARD %s %s_new(" "%v" ") {\n" /* set, set, "" / GT guest */
SPACE "RBTreeNode_%s* NIL = (RBTreeNode_%s*)safe_malloc(sizeof(RBTreeNode_%s));\n" /* TT, TT, TT*/
SPACE "return (%s){.root = NIL, .NIL = NIL" "%s" "};\n" /* set, "" / , .guest = guest */
"}\n\n",
set, set, op.guest_data_T.len == 0 ? vcstr("") : VecU8_fmt("%s guest", op.guest_data_T),
TT, TT, TT,
set, op.guest_data_T.len == 0 ? cstr("") : cstr(", .guest = guest")));
VecU8_append_vec(res, VecU8_fmt(
"NODISCARD %s %s_drop(%s self) {\n"));
}
NODISCARD VecU8 get_name_of_rbtree_map_structure(map_instantiation_op op){
if (op.alternative_comp_map_name_embed.len > 0)
return VecU8_fmt("RBTreeBy%s_Map%sTo%s", op.alternative_comp_map_name_embed, op.K, op.V);
return VecU8_fmt("RBTree_Map%sTo%s", op.K, op.V);
}
NODISCARD VecU8 generate_rbtree_Map_template_instantiation(map_instantiation_op op){
map_instantiation_op_fix(&op);
VecU8 res = VecU8_new();
VecU8 map_g = get_name_of_rbtree_map_structure(op);
SpanU8 map = VecU8_to_span(&map_g);
return res;
}
#endif

View File

@ -0,0 +1,222 @@
#ifndef PROTOTYPE1_SRC_L1_5_CORE_RB_TREE_NODE_H
#define PROTOTYPE1_SRC_L1_5_CORE_RB_TREE_NODE_H
#include "../../l1/core/util.h"
typedef enum {
RBTree_black = 0,
RBTree_red = 1,
} RBTreeColor;
typedef struct {
size_t left;
size_t right;
size_t parent;
/* 0 is black, 1 is red */
RBTreeColor color;
} BufRBTreeNode;
#include "../../../gen/l1/eve/embassy_l1_5/VecBuffRBTreeNode.h"
void BufRBTree_left_rotate(BufRBTreeNode* tree, U64* root, U64 x){
assert(x != 0);
U64 y = tree[x].right;
assert(y != 0);
tree[x].right = tree[y].left;
if (tree[x].right != 0)
tree[tree[x].right].parent = x;
tree[y].parent = tree[x].parent;
if (tree[y].parent == 0) {
*root = y;
} else if (x == tree[tree[x].parent].left) {
tree[tree[x].parent].left = y;
} else {
tree[tree[x].parent].right = y;
}
tree[x].parent = y;
tree[y].left = x;
}
void BufRBTree_right_rotate(BufRBTreeNode* tree, U64* root, U64 x){
assert(x != 0);
U64 y = tree[x].left;
assert(y != 0);
tree[x].left = tree[y].right;
if (tree[x].left != 0)
tree[tree[x].left].parent = x;
tree[y].parent = tree[x].parent;
if (tree[y].parent == 0) {
*root = y;
} else if (x == tree[tree[x].parent].right) {
tree[tree[x].parent].right = y;
} else {
tree[tree[x].parent].left = y;
}
tree[x].parent = y;
tree[y].right = x;
}
/* Helper function. Called in automatically generated code */
void BufRBTree_fix_after_insert(BufRBTreeNode* tree, U64* root, U64 me){
assert(me);
while (true) {
U64 mom = tree[me].parent;
if (mom == 0)
break;
if (tree[mom].color == RBTree_black)
return;
U64 grandma = tree[mom].parent;
U64 aunt = tree[grandma].left == mom ? tree[grandma].right : tree[grandma].left;
assert(aunt != mom);
if (tree[aunt].color == RBTree_red) {
/* Easy case */
tree[mom].color = RBTree_black;
tree[aunt].color = RBTree_black;
tree[grandma].color = RBTree_red;
me = grandma;
} else if (tree[grandma].left == mom) {
/* Hard case: firstborn orientation */
if (tree[mom].right == me) {
BufRBTree_left_rotate(tree, root, mom);
tree[me].color = RBTree_black;
} else {
tree[mom].color = RBTree_black;
}
BufRBTree_right_rotate(tree, root, grandma);
tree[grandma].color = RBTree_red;
return;
} else {
/* Hard case: benjamin orientation */
if (tree[mom].left == me) {
BufRBTree_right_rotate(tree, root, mom);
tree[me].color = RBTree_black;
} else {
tree[mom].color = RBTree_black;
}
BufRBTree_left_rotate(tree, root, grandma);
tree[grandma].color = RBTree_red;
return;
}
}
assert(*root == me);
tree[me].color = RBTree_black;
}
/* fr index will be forgotten. to fields will be overwritten (all fields replaced by fr's values)
* If you need the old values of `to` position, you better save them on stack */
void BufRBTree_steal_neighbours(BufRBTreeNode* tree, U64* root, U64 fr, U64 to){
if (tree[fr].parent == 0)
*root = to;
else if (tree[tree[fr].parent].left == fr)
tree[tree[fr].parent].left = to;
else
tree[tree[fr].parent].right = to;
tree[tree[fr].left].parent = to;
tree[tree[fr].right].parent = to;
tree[to] = tree[fr];
}
/* helper function (used in _erase, _find_min methods). It is assumed that s is not null.
* Guaranteed to return no-null
*/
U64 BufRBTree_minimum_in_subtree(const BufRBTreeNode* tree, U64 s){
assert(s != 0);
while (tree[s].left != 0)
s = tree[s].left;
return s;
}
/* helper function (used in _find_max, _find_prev methods). It is assumed that s is not null.
* Guaranteed to return no-null
*/
U64 BufRBTree_maximum_in_subtree(const BufRBTreeNode* tree, U64 s){
assert(s != 0);
while (tree[s].right != 0)
s = tree[s].right;
return s;
}
void BufRBTree_fix_after_delete(BufRBTreeNode* tree, U64* root, U64 me){
assert(tree[*root].parent == 0);
while (me != *root && tree[me].color == RBTree_black) {
U64 mom = tree[me].parent;
if (me == tree[mom].left) { /* We are on the left */
U64 sister = tree[mom].right;
if (tree[sister].color == RBTree_red) { /* Case 1 */
tree[mom].color = RBTree_red;
tree[sister].color = RBTree_black;
BufRBTree_left_rotate(tree, root, mom);
/* Reassignation required */
sister = tree[mom].right;
}
/* Cases 2,3,4 (every instance of red-black tree has an itchy substring in source code containing 2,3,4) */
assert(sister != 0);
U64 nephew_firstborn = tree[sister].left;
U64 nephew_benjamin = tree[sister].right;
if (tree[nephew_firstborn].color == RBTree_black && tree[nephew_benjamin].color == RBTree_black) {
/* Case 2 */
tree[sister].color = RBTree_red;
me = mom;
continue;
}
/* Cases 3,4 */
if (tree[nephew_benjamin].color == RBTree_black) {
/* Case 3 */
tree[nephew_firstborn].color = RBTree_black;
tree[sister].color = RBTree_red;
BufRBTree_right_rotate(tree, root, sister);
/* Reassignation required */
nephew_benjamin = sister;
sister = nephew_firstborn;
nephew_firstborn = tree[sister].left;
}
/* Case 4 */
tree[sister].color = tree[mom].color;
tree[mom].color = RBTree_black;
tree[nephew_benjamin].color = RBTree_black;
BufRBTree_left_rotate(tree, root, mom);
me = *root;
} else if (me == tree[mom].right) { /* We are on the right */
U64 sister = tree[mom].left;
if (tree[sister].color == RBTree_red) { /* Case 1 */
tree[mom].color = RBTree_red;
tree[sister].color = RBTree_black;
BufRBTree_right_rotate(tree, root, mom);
/* Reassignation required */
sister = tree[mom].left;
}
/* Cases 2,3,4 (every instance of red-black tree has an itchy substring in source code containing 2,3,4) */
assert(sister != 0);
U64 nephew_firstborn = tree[sister].left;
U64 nephew_benjamin = tree[sister].right;
if (tree[nephew_firstborn].color == RBTree_black && tree[nephew_benjamin].color == RBTree_black) {
/* Case 2 */
tree[sister].color = RBTree_red;
me = mom;
continue;
}
/* Cases 3,4 */
if (tree[nephew_firstborn].color == RBTree_black) {
/* Case 3 */
tree[nephew_benjamin].color = RBTree_black;
tree[sister].color = RBTree_red;
BufRBTree_left_rotate(tree, root, sister);
/* Reassignation required */
nephew_firstborn = sister;
sister = nephew_benjamin;
nephew_benjamin = tree[sister].right;
}
/* Case 4 */
tree[sister].color = tree[mom].color;
tree[mom].color = RBTree_black;
tree[nephew_firstborn].color = RBTree_black;
BufRBTree_right_rotate(tree, root, mom);
me = *root;
}
}
tree[me].color = RBTree_black;
}
#endif

View File

@ -1,222 +1,55 @@
#ifndef PROTOTYPE1_SRC_L1_5_CORE_RB_TREE_NODE_H
#define PROTOTYPE1_SRC_L1_5_CORE_RB_TREE_NODE_H
#ifndef prototype1_src_l1_5_core_rb_tree_node_h
#define prototype1_src_l1_5_core_rb_tree_node_h
#include "../../l1/core/util.h"
typedef enum {
RBTree_black = 0,
RBTree_red = 1,
} RBTreeColor;
} RBTreeClr;
typedef struct {
size_t left;
size_t right;
size_t parent;
/* 0 is black, 1 is red */
RBTreeColor color;
} RBTreeNode;
typedef struct RBTreeNode RBTreeNode;
struct RBTreeNode{
RBTreeNode* parent;
RBTreeNode* left;
RBTreeNode* right;
RBTreeClr color;
};
#include "../../../gen/l1/eve/embassy_l1_5/VecRBTreeNode.h"
void RBTree_left_rotate(RBTreeNode* tree, U64* root, U64 x){
assert(x != 0);
U64 y = tree[x].right;
assert(y != 0);
tree[x].right = tree[y].left;
if (tree[x].right != 0)
tree[tree[x].right].parent = x;
tree[y].parent = tree[x].parent;
if (tree[y].parent == 0) {
*root = y;
} else if (x == tree[tree[x].parent].left) {
tree[tree[x].parent].left = y;
} else {
tree[tree[x].parent].right = y;
}
tree[x].parent = y;
tree[y].left = x;
// todo: implement and then use
void BufRBTree_left_rotate(RBTreeNode** root, RBTreeNode* NIL, RBTreeNode* x){
// assert(x != 0);
// U64 y = tree[x].right;
// assert(y != 0);
// tree[x].right = tree[y].left;
// if (tree[x].right != 0)
// tree[tree[x].right].parent = x;
//
// tree[y].parent = tree[x].parent;
// if (tree[y].parent == 0) {
// *root = y;
// } else if (x == tree[tree[x].parent].left) {
// tree[tree[x].parent].left = y;
// } else {
// tree[tree[x].parent].right = y;
// }
// tree[x].parent = y;
// tree[y].left = x;
}
void RBTree_right_rotate(RBTreeNode* tree, U64* root, U64 x){
assert(x != 0);
U64 y = tree[x].left;
assert(y != 0);
tree[x].left = tree[y].right;
if (tree[x].left != 0)
tree[tree[x].left].parent = x;
tree[y].parent = tree[x].parent;
if (tree[y].parent == 0) {
*root = y;
} else if (x == tree[tree[x].parent].right) {
tree[tree[x].parent].right = y;
} else {
tree[tree[x].parent].left = y;
}
tree[x].parent = y;
tree[y].right = x;
RBTreeNode* RBTree_minimum_in_subtree(RBTreeNode* x, RBTreeNode* NIL){
assert(x != NIL);
while (x->left != NIL)
x = x->left;
return x;
}
/* Helper function. Called in automatically generated code */
void RBTree_fix_after_insert(RBTreeNode* tree, U64* root, U64 me){
assert(me);
while (true) {
U64 mom = tree[me].parent;
if (mom == 0)
break;
if (tree[mom].color == RBTree_black)
return;
U64 grandma = tree[mom].parent;
U64 aunt = tree[grandma].left == mom ? tree[grandma].right : tree[grandma].left;
assert(aunt != mom);
if (tree[aunt].color == RBTree_red) {
/* Easy case */
tree[mom].color = RBTree_black;
tree[aunt].color = RBTree_black;
tree[grandma].color = RBTree_red;
me = grandma;
} else if (tree[grandma].left == mom) {
/* Hard case: firstborn orientation */
if (tree[mom].right == me) {
RBTree_left_rotate(tree, root, mom);
tree[me].color = RBTree_black;
} else {
tree[mom].color = RBTree_black;
}
RBTree_right_rotate(tree, root, grandma);
tree[grandma].color = RBTree_red;
return;
} else {
/* Hard case: benjamin orientation */
if (tree[mom].left == me) {
RBTree_right_rotate(tree, root, mom);
tree[me].color = RBTree_black;
} else {
tree[mom].color = RBTree_black;
}
RBTree_left_rotate(tree, root, grandma);
tree[grandma].color = RBTree_red;
return;
}
}
assert(*root == me);
tree[me].color = RBTree_black;
RBTreeNode* RBTreeNode_maximum_in_subtree(RBTreeNode* x, RBTreeNode* NIL){
assert(x != NIL);
while (x->right != NIL)
x = x->right;
return x;
}
/* fr index will be forgotten. to fields will be overwritten (all fields replaced by fr's values)
* If you need the old values of `to` position, you better save them on stack */
void RBTree_steal_neighbours(RBTreeNode* tree, U64* root, U64 fr, U64 to){
if (tree[fr].parent == 0)
*root = to;
else if (tree[tree[fr].parent].left == fr)
tree[tree[fr].parent].left = to;
else
tree[tree[fr].parent].right = to;
tree[tree[fr].left].parent = to;
tree[tree[fr].right].parent = to;
tree[to] = tree[fr];
}
/* helper function (used in _erase, _find_min methods). It is assumed that s is not null.
* Guaranteed to return no-null
*/
U64 RBTree_minimum_in_subtree(const RBTreeNode* tree, U64 s){
assert(s != 0);
while (tree[s].left != 0)
s = tree[s].left;
return s;
}
/* helper function (used in _find_max, _find_prev methods). It is assumed that s is not null.
* Guaranteed to return no-null
*/
U64 RBTree_maximum_in_subtree(const RBTreeNode* tree, U64 s){
assert(s != 0);
while (tree[s].right != 0)
s = tree[s].right;
return s;
}
void RBTree_fix_after_delete(RBTreeNode* tree, U64* root, U64 me){
assert(tree[*root].parent == 0);
while (me != *root && tree[me].color == RBTree_black) {
U64 mom = tree[me].parent;
if (me == tree[mom].left) { /* We are on the left */
U64 sister = tree[mom].right;
if (tree[sister].color == RBTree_red) { /* Case 1 */
tree[mom].color = RBTree_red;
tree[sister].color = RBTree_black;
RBTree_left_rotate(tree, root, mom);
/* Reassignation required */
sister = tree[mom].right;
}
/* Cases 2,3,4 (every instance of red-black tree has an itchy substring in source code containing 2,3,4) */
assert(sister != 0);
U64 nephew_firstborn = tree[sister].left;
U64 nephew_benjamin = tree[sister].right;
if (tree[nephew_firstborn].color == RBTree_black && tree[nephew_benjamin].color == RBTree_black) {
/* Case 2 */
tree[sister].color = RBTree_red;
me = mom;
continue;
}
/* Cases 3,4 */
if (tree[nephew_benjamin].color == RBTree_black) {
/* Case 3 */
tree[nephew_firstborn].color = RBTree_black;
tree[sister].color = RBTree_red;
RBTree_right_rotate(tree, root, sister);
/* Reassignation required */
nephew_benjamin = sister;
sister = nephew_firstborn;
nephew_firstborn = tree[sister].left;
}
/* Case 4 */
tree[sister].color = tree[mom].color;
tree[mom].color = RBTree_black;
tree[nephew_benjamin].color = RBTree_black;
RBTree_left_rotate(tree, root, mom);
me = *root;
} else if (me == tree[mom].right) { /* We are on the right */
U64 sister = tree[mom].left;
if (tree[sister].color == RBTree_red) { /* Case 1 */
tree[mom].color = RBTree_red;
tree[sister].color = RBTree_black;
RBTree_right_rotate(tree, root, mom);
/* Reassignation required */
sister = tree[mom].left;
}
/* Cases 2,3,4 (every instance of red-black tree has an itchy substring in source code containing 2,3,4) */
assert(sister != 0);
U64 nephew_firstborn = tree[sister].left;
U64 nephew_benjamin = tree[sister].right;
if (tree[nephew_firstborn].color == RBTree_black && tree[nephew_benjamin].color == RBTree_black) {
/* Case 2 */
tree[sister].color = RBTree_red;
me = mom;
continue;
}
/* Cases 3,4 */
if (tree[nephew_firstborn].color == RBTree_black) {
/* Case 3 */
tree[nephew_benjamin].color = RBTree_black;
tree[sister].color = RBTree_red;
RBTree_left_rotate(tree, root, sister);
/* Reassignation required */
nephew_firstborn = sister;
sister = nephew_benjamin;
nephew_benjamin = tree[sister].right;
}
/* Case 4 */
tree[sister].color = tree[mom].color;
tree[mom].color = RBTree_black;
tree[nephew_firstborn].color = RBTree_black;
RBTree_right_rotate(tree, root, mom);
me = *root;
}
}
tree[me].color = RBTree_black;
}
#endif
#endif

View File

@ -243,7 +243,7 @@ typedef struct {
/* It is users job to put resize and alloca requests for sub-buffers of type T to the corresponding request
* vectors for this exact type T */
typedef struct {
VecMargaretMemAllocatorRequestResizeSubBuffer resize;
VecMargaretMemAllocatorRequestResizeSubBuffer expand;
VecMargaretMemAllocatorRequestAllocSubBuffer alloc;
} MargaretMemAllocatorRequestsForCertainBufferKindAllocation;
#include "../../../gen/l1/eve/margaret/VecMargaretMemAllocatorRequestsForCertainBufferKindAllocation.h"
@ -251,7 +251,8 @@ typedef struct {
typedef struct {
VecMargaretMemAllocatorRequestFreeSubBuffer free_subbuffer;
VecMargaretMemAllocatorRequestFreeImage free_image;
VecMargaretMemAllocatorRequestsForCertainBufferKindAllocation resize_alloc_buffer;
VecMargaretMemAllocatorRequestResizeSubBuffer shrink_subbuffer;
VecMargaretMemAllocatorRequestsForCertainBufferKindAllocation expand_alloc_buffer;
VecMargaretMemAllocatorRequestAllocImage alloc_image;
} MargaretMemAllocatorRequest;
@ -296,7 +297,7 @@ char* MargaretMemAllocator_get_host_visible_buffer_ptr(
#include "../../l1/core/uint_segments.h"
#include "../../l1/core/util.h"
#include "../../l1_5/core/rb_tree_node.h"
#include "../../l1_5/core/buff_rb_tree_node.h"
typedef struct {
U64 width;
@ -455,7 +456,6 @@ typedef struct{
U64 new_start;
U64 new_len;
} MargaretOldBufferResizeRecord;
#include "../../../gen/l1/eve/margaret/VecMargaretOldBufferResizeRecord.h"
typedef struct {
@ -466,6 +466,15 @@ typedef struct {
} MargaretResizeToNascentRecord;
#include "../../../gen/l1/eve/margaret/VecMargaretResizeToNascentRecord.h"
/* This is not a request from a user to MMA, this is a request from MMA to MMA-defragmentation subroutine */
typedef struct{
U64 old_size; /* in buffer in old VkDeviceMemory */
U64 new_size; /* in buffer in new VkDeviceMemory */
MargaretMemAllocatorSubBufferPosition* ans;
} MargaretSubBufRelocationRequest;
#include "../../../gen/l1/eve/margaret/VecMargaretSubBufRelocationRequest.h"
#include "../../../gen/l1/eve/margaret/VecVecMargaretSubBufRelocationRequest.h"
#include "../../../gen/l1/VecAndSpan_U8.h"
@ -476,9 +485,12 @@ typedef struct {
} MargaretMemFreeSpaceManager;
MargaretMemFreeSpaceManager MargaretMemFreeSpaceManager_new(){
MargaretMemFreeSpaceManager res = {.set_present = VecU8_new()};
MargaretMemFreeSpaceManager res = {.set_present = VecU8_new_zeroinit(1)};
res.set_present.buf[0] = 3;
for (U8 algn = 0; algn < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP; algn++)
res.free_space_in_memory[algn] = None_BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment();
res.free_space_in_memory[3] = Some_BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment(
BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_new_reserved(3, 1));
return res;
}
@ -488,6 +500,62 @@ void MargaretMemFreeSpaceManager_drop(MargaretMemFreeSpaceManager self){
VecU8_drop(self.set_present);
}
void MargaretMemFreeSpaceManager_sink(MargaretMemFreeSpaceManager* self){
for (U8 ae = 0; ae < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP; ae++)
if (self->free_space_in_memory[ae].variant == Option_Some)
BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_sink(&self->free_space_in_memory[ae].some);
}
void MargaretMemFreeSpaceManager_erase(MargaretMemFreeSpaceManager* man, U64 start, U64 len, U32 dev_mem_block){
if (len == 0)
return;
assert(man->set_present.len > 0);
for (size_t aj = 0; aj < man->set_present.len; aj++) {
U8 alignment = man->set_present.buf[aj];
assert(alignment < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP);
assert(man->free_space_in_memory[alignment].variant == Option_Some);
bool eret = BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_erase(&
man->free_space_in_memory[alignment].some, &(MargaretFreeMemSegment){start, len, dev_mem_block});
assert(eret);
}
}
void MargaretMemFreeSpaceManager_insert(MargaretMemFreeSpaceManager* man, U64 start, U64 len, U32 dev_mem_block){
assert(len > 0);
assert(man->set_present.len > 0); /* MargaretMemFreeSpaceManager will do that for us with 2^3 */
for (size_t aj = 0; aj < man->set_present.len; aj++) {
U8 alignment = man->set_present.buf[aj];
assert(alignment < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP);
assert(man->free_space_in_memory[alignment].variant == Option_Some);
bool iret = BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_insert(&
man->free_space_in_memory[alignment].some, (MargaretFreeMemSegment){start, len, dev_mem_block});
assert(iret);
}
}
OptionMargaretFreeMemSegment MargaretMemFreeSpaceManager_search(MargaretMemFreeSpaceManager* man, U64 len, U8 alignment_exp) {
check(alignment_exp < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP);
if (man->free_space_in_memory[alignment_exp].variant == Option_None) {
assert(man->set_present.len > 0);
assert(man->free_space_in_memory[man->set_present.buf[0]].variant == Option_Some);
BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment* have = &man->free_space_in_memory[man->set_present.buf[0]].some;
man->free_space_in_memory[alignment_exp] = Some_BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment(
BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_new_reserved(alignment_exp, have->el.len));
for (size_t i = 0; i < have->el.len; i++) {
// MargaretFreeMemSegment
BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_insert(&man->free_space_in_memory[alignment_exp].some,
*VecMargaretFreeMemSegment_at(&have->el, i));
}
}
assert(man->free_space_in_memory[alignment_exp].variant == Option_Some);
U64 sit = BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_find_min_grtr_or_eq(&man->free_space_in_memory[alignment_exp].some,
&(MargaretFreeMemSegment){.start = 0, .len = len, .dev_mem_block = 0});
if (sit == 0)
return None_MargaretFreeMemSegment();
return Some_MargaretFreeMemSegment(*BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_at_iter(
&man->free_space_in_memory[alignment_exp].some, sit));
}
typedef struct {
VkBufferUsageFlags usage;
U8 inner_alignment_exp;
@ -501,6 +569,22 @@ void MargaretBufferKindInfo_drop(MargaretBufferKindInfo self){
BuffRBTreeByLen_SetMargaretFreeMemSegment_drop(self.free_space_inside_buffers);
}
void MargaretBufferKindInfo_erase_free_space(MargaretBufferKindInfo* self, U64 start, U64 len, U32 dev_mem_block){
if (len == 0)
return;
bool eret = BuffRBTreeByLen_SetMargaretFreeMemSegment_erase(&self->free_space_inside_buffers,
&(MargaretFreeMemSegment){.start = start, .len = len, .dev_mem_block = dev_mem_block});
assert(eret);
}
void MargaretBufferKindInfo_insert_free_space(MargaretBufferKindInfo* self, U64 start, U64 len, U32 dev_mem_block){
if (len == 0)
return;
bool iret = BuffRBTreeByLen_SetMargaretFreeMemSegment_insert(&self->free_space_inside_buffers,
(MargaretFreeMemSegment){start, len, dev_mem_block});
assert(iret);
}
#include "../../../gen/l1/eve/margaret/VecMargaretBufferKindInfo.h"
/* VkDevice and VkPhysicalDevice stay remembered here. Don't forget that, please */
@ -557,111 +641,115 @@ MargaretMemAllocator MargaretMemAllocator_new(
return self;
}
void MargaretMemAllocator__erase_free_space_for_subbufers(
BuffRBTreeByLen_SetMargaretFreeMemSegment* free_space_inside_buffers, U64 start, U64 len, U32 dev_mem_block
){
if (len > 0) {
bool eret = BuffRBTreeByLen_SetMargaretFreeMemSegment_erase(free_space_inside_buffers, &(MargaretFreeMemSegment){
.start = start, .len = len, .dev_mem_block = dev_mem_block});
assert(eret);
}
void MargaretMemAllocator__sink_memory(MargaretMemAllocator* self){
for (size_t i = 0; i < self->buffer_types.len; i++)
BuffRBTreeByLen_SetMargaretFreeMemSegment_sink(&self->buffer_types.buf[i].free_space_inside_buffers);
MargaretMemFreeSpaceManager_sink(&self->mem_free_space);
}
/* Not in a dedicated buffer, just in general memory */
void MargaretMemAllocator__erase_free_space_in_memory(
MargaretMemAllocator* self, U64 start, U64 len, U32 dev_mem_block){
if (len == 0)
return;
MargaretMemFreeSpaceManager* man = &self->mem_free_space;
assert(man->set_present.len > 0);
for (size_t aj = 0; aj < man->set_present.len; aj++) {
U8 alignment = man->set_present.buf[aj];
assert(alignment < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP);
assert(man->free_space_in_memory[alignment].variant == Option_Some);
bool eret = BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_erase(&
man->free_space_in_memory[alignment].some, &(MargaretFreeMemSegment){start, len, dev_mem_block});
assert(eret);
U64Segment MargaretMemAllocatorOneBlock_get_left_free_space(const MargaretMemAllocatorOneBlock* self, U64 occ_it){
U64 occ_start;
const MargaretMemoryOccupation* occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_at_iter(&self->occupied_memory, occ_it, &occ_start, &occ);
U64 prev_occ_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find_prev(&self->occupied_memory, occ_it);
if (prev_occ_it != 0) {
U64 prev_occ_start;
const MargaretMemoryOccupation* prev_occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_at_iter(&self->occupied_memory, prev_occ_it, &prev_occ_start, &prev_occ);
assert(prev_occ_start + prev_occ->taken_size <= occ_start);
return (U64Segment){.start = prev_occ_start + prev_occ->taken_size, .len = occ_start - (prev_occ_start + prev_occ->taken_size)};
}
return (U64Segment){.start = 0, .len = occ_start};
}
void MargaretMemAllocator__insert_free_space_in_memory(
MargaretMemAllocator* self, U64 start, U64 len, U32 dev_mem_block){
assert(len > 0);
MargaretMemFreeSpaceManager* man = &self->mem_free_space;
assert(man->set_present.len > 0); /* MargaretMemFreeSpaceManager will do that for us with 2^3 */
for (size_t aj = 0; aj < man->set_present.len; aj++) {
U8 alignment = man->set_present.buf[aj];
assert(alignment < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP);
assert(man->free_space_in_memory[alignment].variant == Option_Some);
bool iret = BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_insert(&
man->free_space_in_memory[alignment].some, (MargaretFreeMemSegment){start, len, dev_mem_block});
assert(iret);
}
}
U64Segment MargaretMemAllocatorOneBlock_get_right_free_space(const MargaretMemAllocatorOneBlock* self, U64 occ_it){
U64 occ_start;
const MargaretMemoryOccupation* occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_at_iter(&self->occupied_memory, occ_it, &occ_start, &occ);
// void MargaretFreeMemSegment
U64 next_occ_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find_next(&self->occupied_memory, occ_it);
if (next_occ_it != 0) {
U64 next_occ_start;
const MargaretMemoryOccupation* next_occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_at_iter(&self->occupied_memory, next_occ_it, &next_occ_start, &next_occ);
assert(occ_start + occ->taken_size <= next_occ_start);
return (U64Segment){.start = occ_start + occ->taken_size, .len = next_occ_start - (occ_start + occ->taken_size)};
}
return (U64Segment){.start = occ_start + occ->taken_size, .len = self->length - (occ_start + occ->taken_size)};
}
/* If mem occupant in question is VkBuffer, it won't delete anything from the set of available free mem segments
* for that buffer kindred. It is your job to remove free buffer subsegments from this set*/
void MargaretMemAllocator__get_rid_of_memory_occupant(
MargaretMemAllocator* self, U32 mem_block_id, U64 occ_it){
MargaretMemAllocatorOneBlock* block = VecMargaretMemAllocatorOneBlock_mat(&self->blocks, mem_block_id);
assert(0 < occ_it && occ_it < block->occupied_memory.tree.len);
U64 occ_start = block->occupied_memory.el.buf[occ_it - 1].key;
/* We are doing silent pop. But right now we only copied buf occ, we will pop it silently at the end */
MargaretMemoryOccupation occ = block->occupied_memory.el.buf[occ_it - 1].value;
U64 occ_start;
const MargaretMemoryOccupation* occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_at_iter(&block->occupied_memory, occ_it, &occ_start, &occ);
/* Updating block usage counter */
block->occupation_counter -= occ.taken_size;
block->occupation_counter -= occ->taken_size;
U64 left_free_space_start, left_free_space_length;
U64Segment left_free_space = MargaretMemAllocatorOneBlock_get_left_free_space(block, occ_it);
U64Segment right_free_space = MargaretMemAllocatorOneBlock_get_right_free_space(block, occ_it);
U64 prev_occ_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find_prev(&block->occupied_memory, occ_it);
if (prev_occ_it != 0) {
U64 prev_occ_start;
const MargaretMemoryOccupation* prev_occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_at_iter(&block->occupied_memory, prev_occ_it, &prev_occ_start, &prev_occ);
assert(prev_occ_start + prev_occ->taken_size <= occ_start);
left_free_space_start = prev_occ_start + prev_occ->taken_size;
left_free_space_length = occ_start - (prev_occ_start + prev_occ->taken_size);
} else {
left_free_space_start = 0;
left_free_space_length = occ_start;
}
U64 right_free_space_start, right_free_space_length;
U64 next_occ_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find_next(&block->occupied_memory, occ_it);
if (next_occ_it != 0) {
U64 next_occ_start;
const MargaretMemoryOccupation* next_occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_at_iter(&block->occupied_memory, next_occ_it, &next_occ_start, &next_occ);
assert(occ_start + occ.taken_size <= next_occ_start);
right_free_space_start = occ_start + occ.taken_size;
right_free_space_length = next_occ_start - (occ_start + occ.taken_size);
} else {
right_free_space_start = occ_start + occ.taken_size;
right_free_space_length = block->length - (occ_start + occ.taken_size);
if (occ->variant == MargaretMemoryOccupation_Buffer) {
vkDestroyBuffer(self->device, occ->buf.buffer, NULL);
} else if (occ->variant == MargaretMemoryOccupation_Image) {
vkDestroyImage(self->device, occ->img.image, NULL);
}
/* All these iterators and pointers will get invalidated */
MargaretMemoryOccupation_drop(*occ); // yes, this is illegal, but it works. Don't do it again, please
BuffRBTree_MapU64ToMargaretMemoryOccupation_empty_index_erase(&block->occupied_memory, occ_it);
/* All these iterators and pointers just got invalidated */
if (occ.variant == MargaretMemoryOccupation_Buffer) {
vkDestroyBuffer(self->device, occ.buf.buffer, NULL);
} else {
vkDestroyImage(self->device, occ.img.image, NULL);
MargaretMemFreeSpaceManager_erase(&self->mem_free_space, left_free_space.start, left_free_space.len, mem_block_id);
MargaretMemFreeSpaceManager_erase(&self->mem_free_space, right_free_space.start, right_free_space.len, mem_block_id);
MargaretMemFreeSpaceManager_insert(&self->mem_free_space, left_free_space.start,
right_free_space.start + right_free_space.len - left_free_space.start, mem_block_id);
}
/* Given a subbuffer inside given buffer, returns segment of free space on the left */
U64Segment MargaretMemoryOccupationBuffer_get_left_free_space(
MargaretMemoryOccupationBuffer* buf, U64 subbuf_it){
U64 subbuf_start;
const MargaretBufferOccupationSubBuffer* subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_at_iter(&buf->subbuffers, subbuf_it, &subbuf_start, &subbuf);
assert(subbuf_start + subbuf->length <= buf->capacity);
U64 prev_subbuf_it = BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_find_prev(&buf->subbuffers, subbuf_it);
if (prev_subbuf_it != 0) {
U64 prev_subbuf_start;
const MargaretBufferOccupationSubBuffer* prev_subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_at_iter(&buf->subbuffers, prev_subbuf_it, &prev_subbuf_start, &prev_subbuf);
assert(prev_subbuf_start + prev_subbuf->length <= subbuf_start);
return (U64Segment){.start = prev_subbuf_start + prev_subbuf->length, .len = subbuf_start - (prev_subbuf_start + prev_subbuf->length)};
}
MargaretMemoryOccupation_drop(occ);
/* Occ is out of the game. And invalidated */
return (U64Segment){.start = 0, .len = subbuf_start};
}
MargaretMemAllocator__erase_free_space_in_memory(self, left_free_space_start, left_free_space_length, mem_block_id);
MargaretMemAllocator__erase_free_space_in_memory(self, right_free_space_start, right_free_space_length, mem_block_id);
/* Given a subbuffer inside this buffer, return segment of free space on the right */
U64Segment MargaretMemoryOccupationBuffer_get_right_free_space(
MargaretMemoryOccupationBuffer* buf, U64 subbuf_it){
U64 subbuf_start;
const MargaretBufferOccupationSubBuffer* subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_at_iter(&buf->subbuffers, subbuf_it, &subbuf_start, &subbuf);
assert(subbuf_start + subbuf->length <= buf->capacity);
U64 LEN = right_free_space_start + right_free_space_length - left_free_space_start;
MargaretMemAllocator__insert_free_space_in_memory(self, left_free_space_start, LEN, mem_block_id);
U64 next_subbuf_it = BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_find_next(&buf->subbuffers, subbuf_it);
if (next_subbuf_it != 0) {
U64 next_subbuf_start;
const MargaretBufferOccupationSubBuffer* next_subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_at_iter(&buf->subbuffers, next_subbuf_it, &next_subbuf_start, &next_subbuf);
assert(subbuf_start + subbuf->length <= next_subbuf_start);
return (U64Segment){.start = subbuf_start + subbuf->length, .len = next_subbuf_start - (subbuf_start + subbuf->length)};
}
return (U64Segment){.start = subbuf_start + subbuf->length, .len = buf->capacity - (subbuf_start + subbuf->length)};
}
/* Don't forget that all the iterators for MMA maps for buffers and sets for free space just got invalidated */
@ -684,57 +772,27 @@ void MargaretMemAllocator__get_rid_of_sub_buffer(MargaretMemAllocator* self, U32
assert(subbuf_start == start - occ_start);
assert(subbuf_start + subbuf->length <= buf->capacity);
assert(start + subbuf->length <= occ_start + buf->capacity);
kindred->total_occupation -= subbuf->length;
kindred->total_occupation -= subbuf->length; // todo: give a thought to memory counting
U64 left_free_space_start, left_free_space_length;
U64 prev_subbuf_it = BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_find_prev(&buf->subbuffers, subbuf_it);
if (prev_subbuf_it != 0) {
U64 prev_subbuf_start;
const MargaretBufferOccupationSubBuffer* prev_subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_at_iter(&buf->subbuffers, prev_subbuf_it, &prev_subbuf_start, &prev_subbuf);
assert(prev_subbuf_start + prev_subbuf->length <= subbuf_start);
left_free_space_start = prev_subbuf_start + prev_subbuf->length;
left_free_space_length = subbuf_start - (prev_subbuf_start + prev_subbuf->length);
} else {
left_free_space_start = 0;
left_free_space_length = subbuf_start;
}
U64 right_free_space_start, right_free_space_length;
U64 next_subbuf_it = BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_find_next(&buf->subbuffers, subbuf_it);
if (next_subbuf_it != 0) {
U64 next_subbuf_start;
const MargaretBufferOccupationSubBuffer* next_subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_at_iter(&buf->subbuffers, next_subbuf_it, &next_subbuf_start, &next_subbuf);
assert(subbuf_start + subbuf->length <= next_subbuf_start);
right_free_space_start = subbuf_start + subbuf->length;
right_free_space_length = next_subbuf_start - (subbuf_start + subbuf->length);
} else {
right_free_space_start = subbuf_start + subbuf->length;
right_free_space_length = buf->capacity - (subbuf_start + subbuf->length);
}
U64Segment left_free_space = MargaretMemoryOccupationBuffer_get_left_free_space(buf, subbuf_it);
U64Segment right_free_space = MargaretMemoryOccupationBuffer_get_right_free_space(buf, subbuf_it);
/* all these iterators and pointers will get invalidated */
/* Because MargaretBufferOccupationSubBuffer is primitive, we don't need to drop it before erasing */
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_empty_index_erase(&buf->subbuffers, subbuf_it);
/* all these iterators and pointers just got invalidated */
MargaretMemAllocator__erase_free_space_for_subbufers(&kindred->free_space_inside_buffers,
left_free_space_start, left_free_space_length, mem_block_id);
MargaretMemAllocator__erase_free_space_for_subbufers(&kindred->free_space_inside_buffers,
right_free_space_start, right_free_space_length, mem_block_id);
MargaretBufferKindInfo_erase_free_space(kindred, left_free_space.start, left_free_space.len, mem_block_id);
MargaretBufferKindInfo_erase_free_space(kindred, right_free_space.start, right_free_space.len, mem_block_id);
if (buf->subbuffers.el.len == 0) {
/* We don't actually need to add the BIG free memory segment because we are already deleting the whole buffer */
MargaretMemAllocator__get_rid_of_memory_occupant(self, mem_block_id, occ_it);
} else {
U64 LEN = right_free_space_start + right_free_space_length - left_free_space_start;
U64 LEN = right_free_space.start + right_free_space.len - left_free_space.start;
assert(LEN > 0);
bool iret = BuffRBTreeByLen_SetMargaretFreeMemSegment_insert(&kindred->free_space_inside_buffers,
(MargaretFreeMemSegment){.start = left_free_space_start, .len = LEN, .dev_mem_block = mem_block_id});
assert(iret);
MargaretBufferKindInfo_insert_free_space(kindred, left_free_space.start, LEN, mem_block_id);
}
}
@ -780,6 +838,7 @@ MargaretMemAllocatorDemands MargaretMemAllocator_carry_out_request(
MargaretMemAllocator* self, VkCommandBuffer cmd_buff, MargaretMemAllocatorRequest* request
){
MargaretMemAllocator_wipe_old(self);
assert(request->expand_alloc_buffer.len == self->buffer_types.len);
for (size_t i = 0; i < request->free_subbuffer.len; i++) {
MargaretMemAllocatorSubBufferPosition* req = request->free_subbuffer.buf[i];
MargaretMemAllocator__get_rid_of_sub_buffer(self, req->memory_allocation_id, req->offset_in_device_memory_nubble);
@ -790,11 +849,180 @@ MargaretMemAllocatorDemands MargaretMemAllocator_carry_out_request(
U64 occ_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find(&block->occupied_memory, req->offset_in_device_memory_nubble);
MargaretMemAllocator__get_rid_of_memory_occupant(self, req->memory_allocation_id, occ_it);
}
/* We iterate even over those buffer kinds, that we don't use. It is okay, there won't be alot of buffer kinds
* ,and we tend to use all of them */
for (U16 bk = 0; bk < (U16)self->buffer_types.len; bk++) {
MargaretBufferKindInfo* kindred = &self->buffer_types.buf[bk];
// todo: fix alignment issues here too
for (size_t shr = 0; shr < request->shrink_subbuffer.len; shr++) {
U64 new_size = request->shrink_subbuffer.buf[shr].new_size;
assert(new_size > 0);
MargaretMemAllocatorSubBufferPosition* ans = request->shrink_subbuffer.buf[shr].prev_ans;
MargaretMemAllocatorOneBlock* block = VecMargaretMemAllocatorOneBlock_mat(&self->blocks, ans->memory_allocation_id);
OptionRefMutMargaretMemoryOccupation Oocc = BuffRBTree_MapU64ToMargaretMemoryOccupation_mat(&block->occupied_memory,
ans->offset_in_device_memory_nubble - ans->offset);
MargaretMemoryOccupation* occ = OptionRefMutMargaretMemoryOccupation_expect(Oocc);
assert(occ->variant == MargaretMemoryOccupation_Buffer);
MargaretMemoryOccupationBuffer* buf = &occ->buf;
MargaretBufferKindInfo* kindred = VecMargaretBufferKindInfo_mat(&self->buffer_types, buf->kind);
U64 subbuf_it = BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_find(&buf->subbuffers, ans->offset);
U64 subbuf_start;
MargaretBufferOccupationSubBuffer* subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_mat_iter(&buf->subbuffers, subbuf_it, &subbuf_start, &subbuf);
assert(new_size <= subbuf->length);
if (new_size == subbuf->length)
continue;
U64Segment right_free_space = MargaretMemoryOccupationBuffer_get_right_free_space(buf, subbuf_it);
MargaretBufferKindInfo_erase_free_space(kindred, right_free_space.start, right_free_space.len, ans->memory_allocation_id);
MargaretBufferKindInfo_insert_free_space(kindred, right_free_space.start - (subbuf->length - new_size),
right_free_space.len + (subbuf->length - new_size), ans->memory_allocation_id);
subbuf->length = new_size;
}
VkPhysicalDeviceMaintenance4Properties maintenance4_properties = {
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES,
};
VkPhysicalDeviceMaintenance3Properties maintenance3_properties = {
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
.pNext = &maintenance4_properties
};
VkPhysicalDeviceProperties2 properties = {
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
.pNext = &maintenance3_properties,
};
vkGetPhysicalDeviceProperties2(self->physical_device, &properties);
// todo: resize requests for buffer kinds with preserve_at_quiet = false is a ridiculous thing, ban it
// todo: fix alignment on all buffer requests
VecMargaretOldBufferResizeRecord old_buf_resize_record = VecMargaretOldBufferResizeRecord_new();
/* We iterate even over those buffer kinds, that we don't use. It is okay, there won't be alot of buffer kinds
* + and we tend to use all of them */
for (U16 bk = 0; bk < (U16)self->buffer_types.len;) {
MargaretBufferKindInfo* kindred = &self->buffer_types.buf[bk];
MargaretMemAllocatorRequestsForCertainBufferKindAllocation* buf_requests = &request->expand_alloc_buffer.buf[bk];
/* We first try to do all the resize requests, that COULD be done using method 1 and 2. */
for (U64 rr = 0; rr < buf_requests->expand.len;) {
U64 new_size = buf_requests->expand.buf[rr].new_size;
MargaretMemAllocatorSubBufferPosition* ans = buf_requests->expand.buf[rr].prev_ans;
MargaretMemAllocatorOneBlock* block = VecMargaretMemAllocatorOneBlock_mat(&self->blocks, ans->memory_allocation_id);
U64 occ_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find(&block->occupied_memory,
ans->offset_in_device_memory_nubble - ans->offset);
U64 occ_start;
MargaretMemoryOccupation* occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_mat_iter(&block->occupied_memory, occ_it, &occ_start, &occ);
assert(occ->variant == MargaretMemoryOccupation_Buffer);
MargaretMemoryOccupationBuffer* buf = &occ->buf;
assert(ans->offset_in_device_memory_nubble == occ_start + ans->offset);
U64 subbuf_it = BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_find(&buf->subbuffers, ans->offset);
U64 subbuf_start;
MargaretBufferOccupationSubBuffer* subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_mat_iter(&buf->subbuffers, subbuf_it, &subbuf_start, &subbuf);
assert(subbuf->length <= new_size);
/* Method 1 */
U64Segment right_free_space = MargaretMemoryOccupationBuffer_get_right_free_space(buf, subbuf_it);
if (new_size - subbuf->length <= right_free_space.len) {
MargaretBufferKindInfo_erase_free_space(kindred, right_free_space.start, right_free_space.len, ans->memory_allocation_id);
MargaretBufferKindInfo_insert_free_space(kindred, right_free_space.start + (new_size - subbuf->length),
right_free_space.len - (new_size - subbuf->length), ans->memory_allocation_id);
VecMargaretOldBufferResizeRecord_append(&old_buf_resize_record, (MargaretOldBufferResizeRecord){
.old_mem_block_id = ans->memory_allocation_id, .old_start = ans->offset, .old_len = subbuf->length,
.new_mem_block_id = ans->memory_allocation_id, .new_start = ans->offset, .new_len = new_size});
subbuf->length = new_size; /* Success */
VecMargaretMemAllocatorRequestResizeSubBuffer_unordered_pop(&buf_requests->expand, rr);
continue;
}
/* Method 2 */
U64Segment RIGHT_FREE_SPACE = MargaretMemAllocatorOneBlock_get_right_free_space(block, occ_it);
if (
(RIGHT_FREE_SPACE.start + RIGHT_FREE_SPACE.len < buf->capacity) ||
(ans->offset_in_device_memory_nubble + new_size > RIGHT_FREE_SPACE.start + RIGHT_FREE_SPACE.len) ||
(subbuf_start + new_size > maintenance4_properties.maxBufferSize)) {
rr++;
continue;
}
VkBuffer temp_buf_extension;
VkBufferCreateInfo temp_buf_extension_crinfo = {
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
.size = subbuf_start + new_size,
.usage = kindred->usage,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
};
if (vkCreateBuffer(self->device, &temp_buf_extension_crinfo, NULL, &temp_buf_extension) != VK_SUCCESS)
abortf("vkCreateBuffer");
VkMemoryRequirements temp_buf_extension_req;
vkGetBufferMemoryRequirements(self->device, temp_buf_extension, &temp_buf_extension_req);
check(U64_is_2pow(temp_buf_extension_req.alignment));
check((temp_buf_extension_req.memoryTypeBits & (1ull << self->memory_type_id)) > 0)
if ((occ_start + temp_buf_extension_req.size > RIGHT_FREE_SPACE.start + RIGHT_FREE_SPACE.len) ||
((occ_start & (temp_buf_extension_req.alignment)) != 0)
){
vkDestroyBuffer(self->device, temp_buf_extension, NULL);
rr++;
continue;
}
MargaretBufferKindInfo_erase_free_space(kindred, right_free_space.start, right_free_space.len, ans->memory_allocation_id);
MargaretMemFreeSpaceManager_erase(&self->mem_free_space, RIGHT_FREE_SPACE.start, RIGHT_FREE_SPACE.len, ans->memory_allocation_id);
MargaretMemFreeSpaceManager_insert(&self->mem_free_space,
occ_start + temp_buf_extension_req.size,
RIGHT_FREE_SPACE.start + RIGHT_FREE_SPACE.len - (occ_start + temp_buf_extension_req.size),
ans->memory_allocation_id);
VecMargaretOldBufferResizeRecord_append(&old_buf_resize_record, (MargaretOldBufferResizeRecord){
.old_mem_block_id = ans->memory_allocation_id, .old_start = ans->offset, .old_len = subbuf->length,
.new_mem_block_id = ans->memory_allocation_id, .new_start = ans->offset, .new_len = new_size});
/* Success */
subbuf->length = new_size;
occ->taken_size = temp_buf_extension_req.size;
buf->capacity = subbuf_start + new_size;
vkDestroyBuffer(self->device, buf->buffer, NULL);
buf->buffer = temp_buf_extension;
/* TODO: write RBTree_Map, rewrite the whole MMA to use normal RBTree */
for (size_t si = 0; si < buf->subbuffers.el.len; buf++) {
buf->subbuffers.el.buf[si].value.ans->buffer = temp_buf_extension;
}
VecMargaretMemAllocatorRequestResizeSubBuffer_unordered_pop(&buf_requests->expand, rr);
}
}
VecMargaretResizeToNascentRecord to_nascent = VecMargaretResizeToNascentRecord_new();
VkBuffer wart_hand = VK_NULL_HANDLE;
U64 wart_capacity = 0;
VkMemoryRequirements wart_mem_req; /* undefined when wart_hand is 0 */
for (U16 bk = 0; bk < (U16)self->buffer_types.len;) {
MargaretBufferKindInfo* kindred = &self->buffer_types.buf[bk];
MargaretMemAllocatorRequestsForCertainBufferKindAllocation* buf_requests = &request->expand_alloc_buffer.buf[bk];
/* We tried methods 1, 2, now we start with method 3 and if it fails we do defragmentation */
for (U64 rr = 0; rr < buf_requests->expand.len;) {
U64 new_size = buf_requests->expand.buf[rr].new_size;
MargaretMemAllocatorSubBufferPosition* ans = buf_requests->expand.buf[rr].prev_ans;
MargaretMemAllocatorOneBlock* block = VecMargaretMemAllocatorOneBlock_mat(&self->blocks, ans->memory_allocation_id);
U64 occ_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find(&block->occupied_memory,
ans->offset_in_device_memory_nubble - ans->offset);
U64 occ_start;
MargaretMemoryOccupation* occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_mat_iter(&block->occupied_memory, occ_it, &occ_start, &occ);
assert(occ->variant == MargaretMemoryOccupation_Buffer);
MargaretMemoryOccupationBuffer* buf = &occ->buf;
assert(ans->offset_in_device_memory_nubble == occ_start + ans->offset);
U64 subbuf_it = BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_find(&buf->subbuffers, ans->offset);
U64 subbuf_start;
MargaretBufferOccupationSubBuffer* subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_mat_iter(&buf->subbuffers, subbuf_it, &subbuf_start, &subbuf);
assert(subbuf->length <= new_size);
for (int attempt = 0; attempt < 2; attempt++) {
U64 temp_ext_wart_capacity = wart_capacity + new_size;
}
}
}
return 0;
}

View File

@ -287,15 +287,14 @@ NODISCARD VecU8 margaret_stringify_device_memory_properties_2(VkPhysicalDevice p
VkPhysicalDeviceMaintenance3Properties maintenance3_properties = {
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
};
VkPhysicalDeviceMaintenance4PropertiesKHR maintenance4_properties = {
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR,
VkPhysicalDeviceMaintenance4Properties maintenance4_properties = {
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES,
.pNext = &maintenance3_properties,
};
VkPhysicalDeviceProperties2 properties = {
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
.pNext = &maintenance4_properties,
};
/* Needs VK_KHR_maintenance4 */
vkGetPhysicalDeviceProperties2(physical_device, &properties);
U64 maxBufferSize = maintenance4_properties.maxBufferSize;
U64 maxMemoryAllocationCount = properties.properties.limits.maxMemoryAllocationCount;

View File

@ -1,7 +1,7 @@
#include "../../../l1/core/util.h"
typedef U64 VkDeviceSize;
#define VK_NULL_HANDLE NULL
typedef int VkResult;
const VkResult VK_SUCCESS = 120;
@ -188,4 +188,45 @@ void vkFreeMemory(
VkDeviceMemory memory,
const VkAllocationCallbacks* pAllocator);
typedef struct VkPhysicalDeviceLimits {
/* ... */
uint32_t maxMemoryAllocationCount;
/* ... */
} VkPhysicalDeviceLimits;
typedef struct VkPhysicalDeviceProperties {
/* ... */
VkPhysicalDeviceLimits limits;
/* ... */
} VkPhysicalDeviceProperties;
typedef struct VkPhysicalDeviceMaintenance4Properties {
VkStructureType sType;
void* pNext;
VkDeviceSize maxBufferSize;
} VkPhysicalDeviceMaintenance4Properties;
const VkStructureType VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES = 14542;
typedef struct VkPhysicalDeviceMaintenance3Properties {
VkStructureType sType;
void* pNext;
/* ... */
VkDeviceSize maxMemoryAllocationSize;
} VkPhysicalDeviceMaintenance3Properties;
const VkStructureType VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES = 145;
typedef struct VkPhysicalDeviceProperties2 {
VkStructureType sType;
void* pNext;
VkPhysicalDeviceProperties properties;
} VkPhysicalDeviceProperties2;
const VkStructureType VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 = 5324;
void vkGetPhysicalDeviceProperties2(
VkPhysicalDevice physicalDevice,
VkPhysicalDeviceProperties2* pProperties);
#include "../../margaret/vulkan_memory_claire.h"