diff --git a/src/l1/anne/margaret/margaret_misc.h b/src/l1/anne/margaret/margaret_misc.h index f01bfe8..42b6fa0 100644 --- a/src/l1/anne/margaret/margaret_misc.h +++ b/src/l1/anne/margaret/margaret_misc.h @@ -27,12 +27,15 @@ void generate_margaret_eve_for_vulkan_utils() { /* For l2/margaret/vulkan_memory_claire.h */ generate_eve_span_company_for_primitive(l, ns, cstr("MargaretBufferKindDescription"), false, true); + generate_eve_span_company_for_primitive(l, ns, cstr("MargaretOldBufferResizeRecord"), true, false); generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){ /* We won't need to clone this type, like, at all... It is actually clonable, but we just made * it non-clonable */ .T = cstr("BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment") }); - generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorOneBlock"), true, false); + generate_util_templ_inst_eve_header(l, ns, (util_templates_instantiation_options){ + .T = cstr("MargaretMemAllocatorOneBlock"), .vec = true, .vec_extended = true, + }); generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorOneMemType"), true, false); generate_eve_span_company_for_primitive(l, ns, cstr("MargaretBufferKindInfo"), true, false); diff --git a/src/l1_5/codegen/all_set_map_templ_util_inst.h b/src/l1_5/codegen/all_set_map_templ_util_inst.h index c621e46..349f5ec 100644 --- a/src/l1_5/codegen/all_set_map_templ_util_inst.h +++ b/src/l1_5/codegen/all_set_map_templ_util_inst.h @@ -6,7 +6,6 @@ /* We assume that T is trivially movable */ typedef struct { SpanU8 T; - bool t_ptr; bool t_integer; bool t_primitive; bool t_clonable; @@ -16,20 +15,22 @@ typedef struct { /* GT. You probably want it to be a pointer or an integer parameter. * Leave empty if you don't need guest data (GT = void) - * GT must be primitive */ + * GT must be primitive, or, even better, be integer */ SpanU8 guest_data_T; + + /* If `unconditional_equality` is set, methods, that are needed to return value T + * or reference to T are not generated. + * I, alas, wrote support for `unconditional_equality = false` but it should be noted, that + * `unconditional_equality = false` it is absolutely useless */ + bool unconditional_equality; } set_instantiation_op; void set_instantiation_op_fix(set_instantiation_op* self){ - if (self->t_ptr) - self->t_integer = true; if (self->t_integer) self->t_primitive = true; if (self->t_primitive) self->t_clonable = true; assert(self->T.len > 0); - assert(!self->t_integer || self->alternative_equal.len == 0); - assert(!self->t_integer || self->alternative_less.len == 0); assert((self->alternative_less.len == 0 && self->alternative_equal.len == 0 && self->alternative_comp_set_name_embed.len == 0 )||( @@ -39,6 +40,53 @@ void set_instantiation_op_fix(set_instantiation_op* self){ assert(self->alternative_comp_set_name_embed.len > 0); assert(self->alternative_equal.len > 0 && self->alternative_less.len > 0); } + if (self->t_integer && self->alternative_comp_set_name_embed.len == 0) + self->unconditional_equality = true; +} + +/* We assume K and V are trivially movable */ +typedef struct { + SpanU8 K; + bool k_integer; + bool k_primitive; + bool k_clonable; + SpanU8 V; + bool v_integer; + bool v_primitive; + bool v_clonable; + + SpanU8 alternative_equal; + SpanU8 alternative_less; + SpanU8 alternative_comp_map_name_embed; + + SpanU8 guest_data_T; + + /* If `unconditional_equality` is set, methods, that are needed to return value (K, V) + * or reference to K and V are generated such that they return only V part. I don't plan to support + * `unconditional_equality=false` + */ + bool unconditional_equality; +} map_instantiation_op; + +void map_instantiation_op_fix(map_instantiation_op* self){ + if (self->k_integer) + self->k_primitive = true; + if (self->k_primitive) + self->k_clonable = true; + assert(self->K.len > 0 && self->V.len > 0); + assert((self->alternative_less.len == 0 && self->alternative_equal.len == 0 + && self->alternative_comp_map_name_embed.len == 0 + )||( + self->alternative_comp_map_name_embed.len != 0 && + (self->alternative_less.len != 0 || self->alternative_equal.len != 0))); + if (self->guest_data_T.len > 0) { + assert(self->alternative_comp_map_name_embed.len > 0); + assert(self->alternative_equal.len > 0 && self->alternative_less.len > 0); + } + if (self->k_integer && self->alternative_comp_map_name_embed.len == 0) + self->unconditional_equality = true; + if (!self->unconditional_equality) + abortf("map_instantiation_op_fix::unconditional_equality = false isn't supported\n"); } #endif diff --git a/src/l1_5/codegen/rb_tree_set_map_template_inst.h b/src/l1_5/codegen/rb_tree_set_map_template_inst.h index 75e032e..397d52f 100644 --- a/src/l1_5/codegen/rb_tree_set_map_template_inst.h +++ b/src/l1_5/codegen/rb_tree_set_map_template_inst.h @@ -3,154 +3,338 @@ #include "all_set_map_templ_util_inst.h" -/* When key is given by value into some method of Buff_RBTreeSet */ -NODISCARD VecU8 codegen_rb_tree_set_key_value_NOT_EQUAL_element(set_instantiation_op op){ - if (op.guest_data_T.len > 0) { - assert(op.alternative_equal.len > 0); - if (op.t_integer) - return VecU8_fmt("!%s(key, self->el.buf[cur - 1], self->guest)", op.alternative_equal); - return VecU8_fmt("!%s(&key, &self->el.buf[cur - 1], self->guest)", op.alternative_equal); - } - if (op.alternative_equal.len > 0) { - if (op.t_integer) - return VecU8_fmt("!%s(key, self->el.buf[cur - 1])", op.alternative_equal); - return VecU8_fmt("!%s(&key, &self->el.buf[cur - 1])", op.alternative_equal); - } - if (op.t_integer) - return VecU8_fmt("key != self->el.buf[cur - 1]"); - return VecU8_fmt("!%s_equal_%s(&key, &self->el.buf[cur - 1])", op.T, op.T); +SpanU8 codegen_rb_tree_map__key_of_cur_el(map_instantiation_op op){ + return op.V.len > 0 ? cstr("self->el.buf[cur - 1].key") : cstr("self->el.buf[cur - 1]"); } /* When key is given by value into some method of Buff_RBTreeSet */ -NODISCARD VecU8 codegen_rb_tree_set_key_value_LESS_element(set_instantiation_op op){ +NODISCARD VecU8 codegen_rb_tree_map__key_value_NOT_EQUAL_element(map_instantiation_op op){ + if (op.guest_data_T.len > 0) { + assert(op.alternative_equal.len > 0); + if (op.k_integer) + return VecU8_fmt("!%s(key, %s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op)); + return VecU8_fmt("!%s(&key, &%s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op)); + } + if (op.alternative_equal.len > 0) { + if (op.k_integer) + return VecU8_fmt("!%s(key, %s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op)); + return VecU8_fmt("!%s(&key, &%s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op)); + } + if (op.k_integer) + return VecU8_fmt("key != %s", codegen_rb_tree_map__key_of_cur_el(op)); + return VecU8_fmt("!%s_equal_%s(&key, &%s)", op.K, op.K, codegen_rb_tree_map__key_of_cur_el(op)); +} + +/* When key is given by value into some method of Buff_RBTreeSet */ +NODISCARD VecU8 codegen_rb_tree_map__key_value_LESS_element(map_instantiation_op op){ if (op.guest_data_T.len > 0) { assert(op.alternative_less.len > 0); - if (op.t_integer) - return VecU8_fmt("%s(key, self->el.buf[cur - 1], self->guest)", op.alternative_less); - return VecU8_fmt("%s(&key, &self->el.buf[cur - 1], self->guest)", op.alternative_less); + if (op.k_integer) + return VecU8_fmt("%s(key, %s, self->guest)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op)); + return VecU8_fmt("%s(&key, &%s, self->guest)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op)); } if (op.alternative_less.len > 0) { - if (op.t_integer) - return VecU8_fmt("%s(key, self->el.buf[cur - 1])", op.alternative_less); - return VecU8_fmt("%s(&key, &self->el.buf[cur - 1])", op.alternative_less); + if (op.k_integer) + return VecU8_fmt("%s(key, %s)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op)); + return VecU8_fmt("%s(&key, &%s)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op)); } - if (op.t_integer) - return VecU8_fmt("key < self->el.buf[cur - 1]"); - return VecU8_fmt("%s_less_%s(&key, &self->el.buf[cur - 1])", op.T, op.T); + if (op.k_integer) + return VecU8_fmt("key < %s", codegen_rb_tree_map__key_of_cur_el(op)); + return VecU8_fmt("%s_less_%s(&key, &%s)", op.K, op.K, codegen_rb_tree_map__key_of_cur_el(op)); } /* When key is given by ref into some method of Buff_RBTreeSet * Ofk when op.T is integer, argument is still taken by a value */ -NODISCARD VecU8 codegen_rb_tree_set_key_ref_NOT_EQUAL_element(set_instantiation_op op){ +NODISCARD VecU8 codegen_rb_tree_map__key_ref_NOT_EQUAL_element(map_instantiation_op op){ if (op.guest_data_T.len > 0) { assert(op.alternative_equal.len > 0); - if (op.t_integer) - return VecU8_fmt("!%s(key, self->el.buf[cur - 1], self->guest)", op.alternative_equal); - return VecU8_fmt("!%s(key, &self->el.buf[cur - 1], self->guest)", op.alternative_equal); + if (op.k_integer) + return VecU8_fmt("!%s(key, %s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op)); + return VecU8_fmt("!%s(key, &%s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op)); } if (op.alternative_equal.len > 0) { - if (op.t_integer) - return VecU8_fmt("!%s(key, self->el.buf[cur - 1])", op.alternative_equal); - return VecU8_fmt("!%s(key, &self->el.buf[cur - 1])", op.alternative_equal); + if (op.k_integer) + return VecU8_fmt("!%s(key, %s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op)); + return VecU8_fmt("!%s(key, &%s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op)); } - if (op.t_integer) - return VecU8_fmt("key != self->el.buf[cur - 1]"); - return VecU8_fmt("!%s_equal_%s(key, &self->el.buf[cur - 1])", op.T, op.T); + if (op.k_integer) + return VecU8_fmt("key != %s", codegen_rb_tree_map__key_of_cur_el(op)); + return VecU8_fmt("!%s_equal_%s(key, &%s)", op.K, op.K, codegen_rb_tree_map__key_of_cur_el(op)); } /* When key is given by a pointer into some method of Buff_RBTreeSet */ -NODISCARD VecU8 codegen_rb_tree_set_key_ref_EQUAL_element(set_instantiation_op op){ +NODISCARD VecU8 codegen_rb_tree_map__key_ref_EQUAL_element(map_instantiation_op op){ if (op.guest_data_T.len > 0) { assert(op.alternative_equal.len > 0); - if (op.t_integer) - return VecU8_fmt("%s(key, self->el.buf[cur - 1], self->guest)", op.alternative_equal); - return VecU8_fmt("%s(key, &self->el.buf[cur - 1], self->guest)", op.alternative_equal); + if (op.k_integer) + return VecU8_fmt("%s(key, %s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op)); + return VecU8_fmt("%s(key, &%s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op)); } if (op.alternative_equal.len > 0) { - if (op.t_integer) - return VecU8_fmt("%s(key, self->el.buf[cur - 1])", op.alternative_equal); - return VecU8_fmt("%s(key, &self->el.buf[cur - 1])", op.alternative_equal); + if (op.k_integer) + return VecU8_fmt("%s(key, %s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op)); + return VecU8_fmt("%s(key, &%s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op)); } - if (op.t_integer) - return VecU8_fmt("key == self->el.buf[cur - 1]"); - return VecU8_fmt("%s_equal_%s(key, &self->el.buf[cur - 1])", op.T, op.T); + if (op.k_integer) + return VecU8_fmt("key == %s", codegen_rb_tree_map__key_of_cur_el(op)); + return VecU8_fmt("%s_equal_%s(key, &%s)", op.K, op.K, codegen_rb_tree_map__key_of_cur_el(op)); } /* When key is given by a pointer into some method of Buff_RBTreeSet */ -NODISCARD VecU8 codegen_rb_tree_set_key_ref_LESS_element(set_instantiation_op op){ +NODISCARD VecU8 codegen_rb_tree_map__key_ref_LESS_element(map_instantiation_op op){ if (op.guest_data_T.len > 0) { assert(op.alternative_less.len > 0); - if (op.t_integer) - return VecU8_fmt("%s(key, self->el.buf[cur - 1], self->guest)", op.alternative_less); - return VecU8_fmt("%s(key, &self->el.buf[cur - 1], self->guest)", op.alternative_less); + if (op.k_integer) + return VecU8_fmt("%s(key, %s, self->guest)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op)); + return VecU8_fmt("%s(key, &%s, self->guest)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op)); } if (op.alternative_less.len > 0) { - if (op.t_integer) - return VecU8_fmt("%s(key, self->el.buf[cur - 1])", op.alternative_less); - return VecU8_fmt("%s(key, &self->el.buf[cur - 1])", op.alternative_less); + if (op.k_integer) + return VecU8_fmt("%s(key, %s)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op)); + return VecU8_fmt("%s(key, &%s)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op)); } - if (op.t_integer) - return VecU8_fmt("key < self->el.buf[cur - 1]"); - return VecU8_fmt("%s_less_%s(key, &self->el.buf[cur - 1])", op.T, op.T); + if (op.k_integer) + return VecU8_fmt("key < %s", codegen_rb_tree_map__key_of_cur_el(op)); + return VecU8_fmt("%s_less_%s(key, &%s)", op.K, op.K, codegen_rb_tree_map__key_of_cur_el(op)); } -/* When method returns constant pointer to found key (wrapped in Option) we will use this type - * Ofcourse, it can turn out that it is not generated. So be careful and generate it by yourself - */ -NODISCARD VecU8 codegen_rb_tree_set_option_returned_ref_t(set_instantiation_op op){ - /* Constant pointer to an integer is an integer */ - return op.t_integer ? VecU8_fmt("Option%s", op.T) : VecU8_fmt("OptionRef%s", op.T); -} - -/* Suppose some method returns pointer to key (ofc wrapped in option). And we found what to return - * we return it from self->el array */ -NODISCARD VecU8 codegen_rb_tree_set_some_ref_t(set_instantiation_op op, SpanU8 index_var_name){ - if (op.t_integer) - return VecU8_fmt("Some_%s(self->el.buf[%s - 1])", op.T, index_var_name); - return VecU8_fmt("Some_Ref%s(&self->el.buf[%s - 1])", op.T, index_var_name); -} - -/* Suppose some method returns pointer to key (ofc wrapped in option). But this time we found nothing */ -NODISCARD VecU8 codegen_rb_tree_set_none_ref_t(set_instantiation_op op){ - if (op.t_integer) - return VecU8_fmt("None_%s()", op.T); - return VecU8_fmt("None_Ref%s()", op.T); -} - -NODISCARD VecU8 codegen_rb_tree_set_option_returned_value_t(set_instantiation_op op){ - return VecU8_fmt("Option%s", op.T); -} - -/* Suppose some method returns an owned key (by value, ofc wrapped in option). If we DID found something, - * we construct Option_Some */ -NODISCARD VecU8 codegen_rb_tree_set_some_t(set_instantiation_op op, SpanU8 val_giving_expr){ - return VecU8_fmt("Some_%s(%s)", op.T, val_giving_expr); -} - -/* Suppose some method returns an owned key (by value, ofc wrapped in option). But this time we found nothing */ -NODISCARD VecU8 codegen_rb_tree_set_none_t(set_instantiation_op op){ - return VecU8_fmt("None_%s()", op.T); -} /* Suppose some method (like _erase() or _pop(), or _find(), or _at(), takes constant reference to key T * This function tells how to write type of this argument. Basically it is needed to take into account that * integer is better than pointer to integer. (Though, notice that _pop family of methods don't exist for * sets of integers */ -NODISCARD VecU8 codegen_rb_tree_set_taking_ref_t_argument(set_instantiation_op op){ - return !op.t_integer ? VecU8_fmt("const %s*", op.T) : VecU8_from_span(op.T); +NODISCARD VecU8 codegen_rb_tree_map__taking_ref_k_argument(map_instantiation_op op){ + return op.k_integer ? VecU8_from_span(op.K) : VecU8_fmt("const %s*", op.K); } -NODISCARD VecU8 get_name_of_rb_tree_set_structure(set_instantiation_op op){ - if (op.alternative_comp_set_name_embed.len) - return VecU8_fmt("BuffRBTreeBy%s_Set%s", op.alternative_comp_set_name_embed, op.T); - return VecU8_fmt("BuffRBTree_Set%s", op.T); +NODISCARD VecU8 codegen_rb_tree_map__taking_t_argument(map_instantiation_op op){ + return op.V.len > 0 ? VecU8_fmt("%s key, %s value") : VecU8_fmt("%s key"); +} + +/* Yes, both sets and maps use this function to instantiate themselves. No, user does not need to use it + * set is either a set name or a map name. If we are instantiating set, TT is op.T from set options, if we are + * instantiating a map, TT is KVP{op.K}To{op.V} from map options + * */ +void codegen_append_rb_tree_map__structure_and_simplest_methods( + VecU8* res, map_instantiation_op op, SpanU8 set, SpanU8 TT + ){ + VecU8_append_vec(res, VecU8_fmt( + "typedef struct {\n" + SPACE "VecRBTreeNode tree;\n" + SPACE "U64 root;\n" + SPACE "Vec%s el;\n" + "%s" + "} %s;\n\n", + TT, op.guest_data_T.len > 0 ? VecU8_fmt(SPACE "%s guest;\n", op.guest_data_T) : vcstr(""), set)); + + if (op.guest_data_T.len > 0) { + VecU8_append_vec(res, VecU8_fmt( + "NODISCARD %s %s_new(%s guest) {\n" /* set, set, op.guest_data_T */ + SPACE "return (%s){.tree = VecRBTreeNode_new_zeroinit(1), .root = 0, .el = Vec%s_new(), .guest = guest};\n" /* set, TT */ + "}\n\n", + set, set, op.guest_data_T, + set, TT)); + VecU8_append_vec(res, VecU8_fmt( + "NODISCARD %s %s_new_reserved(%s guest, size_t size) {\n" /* set, set, op.guest_data_T */ + SPACE "return (%s){.tree = (VecRBTreeNode){\n" /* set */ + SPACE SPACE ".buf = (RBTreeNode*)safe_calloc(size + 1, sizeof(RBTreeNode)), .len = 1, .capacity = size + 1},\n" + SPACE SPACE ".root = 0, .el = Vec%s_new_reserved(size), .guest = guest};\n" /* TT */ + "}\n\n", + set, set, op.guest_data_T, + set, TT)); + } else { + VecU8_append_vec(res, VecU8_fmt( + "NODISCARD %s %s_new() {\n" /* set, set */ + SPACE "return (%s){.tree = VecRBTreeNode_new_zeroinit(1), .root = 0, .el = Vec%s_new()};\n" /* set, TT */ + "}\n\n", + set, set, + set, TT)); + VecU8_append_vec(res, VecU8_fmt( + "NODISCARD %s %s_new_reserved(size_t size) {\n" /* set, set */ + SPACE "return (%s){.tree = (VecRBTreeNode){\n" + SPACE SPACE ".buf = (RBTreeNode*)safe_calloc(size + 1, sizeof(RBTreeNode)), .len = 1, .capacity = size + 1},\n" + SPACE SPACE ".root = 0, .el = Vec%s_new_reserved(size)};\n" /* set, TT */ + "}\n\n", + set, set, + set, TT)); + } + + VecU8_append_vec(res, VecU8_fmt( + "void %s_drop(%s self) {\n" /* set, set */ + SPACE "VecRBTreeNode_drop(self.tree);\n" + SPACE "Vec%s_drop(self.el);\n" /* TT */ + "}\n\n", set, set, TT)); + + VecU8_append_vec(res, VecU8_fmt( + "U64 %s_find(const %s* self, %v key) {\n" /* set, set, taking_ref_t_argument */ + SPACE "U64 cur = self->root;\n" + SPACE "while (cur != 0 && %v) {\n" /* key reference not equal cur element */ + SPACE SPACE "if (%v) {\n" /* key reference less than cur element */ + SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" + SPACE SPACE "} else {\n" + SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" + SPACE SPACE "}\n" + SPACE "}\n" + SPACE "return cur;\n" + "}\n\n", + set, set, codegen_rb_tree_map__taking_ref_k_argument(op), + codegen_rb_tree_map__key_ref_NOT_EQUAL_element(op), + codegen_rb_tree_map__key_ref_LESS_element(op) + )); + + + if (op.k_clonable && op.v_clonable) { + VecU8_append_vec(res, VecU8_fmt( + "NODISCARD %s %s_clone(const %s* self){\n" /* set, set, set */ + SPACE "return (%s){.tree = VecRBTreeNode_clone(&self->tree), .root = self->root,\n" /* set */ + SPACE SPACE ".el = Vec%s_clone(&self->el)%s};\n" /* TT, whether to clone guest or no */ + "}\n\n", + set, set, set, + set, + TT, op.guest_data_T.len > 0 ? cstr(", .guest = self->guest") : cstr(""))); + } + + VecU8_append_vec(res, VecU8_fmt( + "U64 %s_find_next(const %s* self, U64 x){\n" + SPACE "assert(x != 0 && x < self->tree.len);\n" + SPACE "if (self->tree.buf[x].right != 0)\n" + SPACE SPACE "return RBTree_minimum_in_subtree(self->tree.buf, self->tree.buf[x].right);\n" + SPACE "while (true) {\n" + SPACE SPACE "U64 p = self->tree.buf[x].parent;\n" + SPACE SPACE "if (p == 0)\n" + SPACE SPACE SPACE "return 0;\n" + SPACE SPACE "if (self->tree.buf[p].left == x)\n" + SPACE SPACE SPACE "return p;\n" + SPACE SPACE "x = p;\n" + SPACE "}\n" + "}\n\n", set, set)); + + VecU8_append_vec(res, VecU8_fmt( + "U64 %s_find_prev(const %s* self, U64 x){\n" + SPACE "assert(x != 0 && x < self->tree.len);\n" + SPACE "if (self->tree.buf[x].left != 0)\n" + SPACE SPACE "return RBTree_maximum_in_subtree(self->tree.buf, self->tree.buf[x].left);\n" + SPACE "while (true) {\n" + SPACE SPACE "U64 p = self->tree.buf[x].parent;\n" + SPACE SPACE "if (p == 0)\n" + SPACE SPACE SPACE "return 0;\n" + SPACE SPACE "if (self->tree.buf[p].right == x)\n" + SPACE SPACE SPACE "return p;\n" + SPACE SPACE "x = p;\n" + SPACE "}\n" + "}\n\n", set, set)); + + VecU8_append_vec(res, VecU8_fmt( + "U64 %s_find_min(const %s* self) {\n" + SPACE "return self->root != 0 ? RBTree_minimum_in_subtree(self->tree.buf, self->root) : 0;\n" + "}\n\n", set, set)); + + VecU8_append_vec(res, VecU8_fmt( + "U64 %s_find_max(const %s* self) {\n" + SPACE "return self->root != 0 ? RBTree_maximum_in_subtree(self->tree.buf, self->root) : 0;\n" + "}\n\n", set, set)); + + VecU8_append_vec(res, VecU8_fmt( + "U64 %s_find_max_less(const %s* self, %v key) {\n" /* set, set, taking_ref_t_argument */ + SPACE "U64 last_less = 0;\n" + SPACE "U64 cur = self->root;\n" + SPACE "while (cur != 0) {\n" + SPACE SPACE "if (%v) {\n" /* key_ref_EQUAL_element */ + SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" + SPACE SPACE SPACE "if (cur == 0)\n" + SPACE SPACE SPACE SPACE "return last_less;\n" + SPACE SPACE SPACE "while (self->tree.buf[cur].right != 0)\n" + SPACE SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" + SPACE SPACE SPACE "return cur;\n" + SPACE SPACE "} else if (%v) {\n" /* key_ref_LESS_element */ + SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" + SPACE SPACE "} else {\n" + SPACE SPACE SPACE "last_less = cur;\n" + SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" + SPACE SPACE "}\n" + SPACE "}\n" + SPACE "return last_less;\n" + "}\n\n", + set, set, codegen_rb_tree_map__taking_ref_k_argument(op), + codegen_rb_tree_map__key_ref_EQUAL_element(op), + codegen_rb_tree_map__key_ref_LESS_element(op) + )); + + VecU8_append_vec(res, VecU8_fmt( + "U64 %s_find_max_less_or_eq(const %s* self, %v key) {\n" /* set, set, taking_ref_t_argument */ + SPACE "U64 last_less = 0;\n" + SPACE "U64 cur = self->root;\n" + SPACE "while (cur != 0) {\n" + SPACE SPACE "if (%v) {\n" /* key_ref_EQUAL_element */ + SPACE SPACE SPACE "return cur;\n" + SPACE SPACE "} else if (%v) {\n" /* key_ref_LESS_element */ + SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" + SPACE SPACE "} else {\n" + SPACE SPACE SPACE "last_less = cur;\n" + SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" + SPACE SPACE "}\n" + SPACE "}\n" + SPACE "return last_less;\n" + "}\n\n", + set, set, codegen_rb_tree_map__taking_ref_k_argument(op), + codegen_rb_tree_map__key_ref_EQUAL_element(op), + codegen_rb_tree_map__key_ref_LESS_element(op) + )); + + VecU8_append_vec(res, VecU8_fmt( + "U64 %s_find_min_grtr(const %s* self, %v key) {\n" /* set, set, taking_ref_t_argument */ + SPACE "U64 last_grtr = 0;\n" + SPACE "U64 cur = self->root;\n" + SPACE "while (cur != 0) {\n" + SPACE SPACE "if (%v) {\n" /* key_ref_EQUAL_element */ + SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" + SPACE SPACE SPACE "if (cur == 0)\n" + SPACE SPACE SPACE SPACE "return last_grtr;\n" + SPACE SPACE SPACE "while (self->tree.buf[cur].left != 0)\n" + SPACE SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" + SPACE SPACE SPACE "return cur;\n" + SPACE SPACE "} else if (%v) {\n" /* key_ref_LESS_element */ + SPACE SPACE SPACE "last_grtr = cur;\n" + SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" + SPACE SPACE "} else {\n" + SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" + SPACE SPACE "}\n" + SPACE "}\n" + SPACE "return last_grtr;\n" + "}\n\n", + set, set, codegen_rb_tree_map__taking_ref_k_argument(op), + codegen_rb_tree_map__key_ref_EQUAL_element(op), + codegen_rb_tree_map__key_ref_LESS_element(op) + )); + + + VecU8_append_vec(res, VecU8_fmt( + "U64 %s_find_min_grtr_or_eq(const %s* self, %v key) {\n" /* set, set, taking_ref_t_argument */ + SPACE "U64 last_grtr = 0;\n" + SPACE "U64 cur = self->root;\n" + SPACE "while (cur != 0) {\n" + SPACE SPACE "if (%v) {\n" /* key_ref_EQUAL_element */ + SPACE SPACE SPACE "return cur;\n" + SPACE SPACE "} else if (%v) {\n" /* key_ref_LESS_element */ + SPACE SPACE SPACE "last_grtr = cur;\n" + SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" + SPACE SPACE "} else {\n" + SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" + SPACE SPACE "}\n" + SPACE "}\n" + SPACE "return last_grtr;\n" + "}\n\n", + set, set, codegen_rb_tree_map__taking_ref_k_argument(op), + codegen_rb_tree_map__key_ref_EQUAL_element(op), + codegen_rb_tree_map__key_ref_LESS_element(op) + )); } /* Generates methods _insert() _pop_substitute() _erase_substitute() for SetT * Takes ownership of strings Tc, Fc */ -void codegen_append_rb_tree_set_insert_kind_method( - VecU8* result, set_instantiation_op op, SpanU8 set, SpanU8 method_name, VecU8 RT, VecU8 Tc, VecU8 Fc +void codegen_append_rb_tree_map__insert_kind_method( + VecU8* res, map_instantiation_op op, SpanU8 set, SpanU8 method_name, VecU8 RT, VecU8 Tc, VecU8 Fc ){ VecU8 Tc_root = prepend_spaces_to_SpanU8_lines(VecU8_to_span(&Tc), 2); VecU8 Tc_on_left = prepend_spaces_to_SpanU8_lines(VecU8_to_span(&Tc), 4); @@ -159,12 +343,16 @@ void codegen_append_rb_tree_set_insert_kind_method( VecU8_drop(Tc); VecU8_drop(Fc); - VecU8_append_vec(result, VecU8_fmt( - "%v %s_%s(%s* self, %s key) {\n" /* set, set, op.T */ + VecU8 line_that_appends_new_el_to_el_vec = op.V.len > 0 ? + VecU8_fmt("VecKVP%sTo%s_append(&self->el, (KVP%sTo%s){.key = key, .value = value});", op.K, op.V, op.K, op.V) : + VecU8_fmt("Vec%s_append(&self->el, key);", op.K); + + VecU8_append_vec(res, VecU8_fmt( + "%v %s_%s(%s* self, %v) {\n" /* RT, set, method_name, set, taking_t_argument */ SPACE "if (self->root == 0) {\n" SPACE SPACE "assert(self->tree.len == 1);\n" SPACE SPACE "VecRBTreeNode_append(&self->tree, (RBTreeNode){.color = RBTree_black});\n" - SPACE SPACE "Vec%s_append(&self->el, key);\n" /* op.T */ + SPACE SPACE "%s\n" /* line_that_appends_new_el_to_el_vec */ SPACE SPACE "self->root = 1;\n" "%v" /* Tc_root */ /* Should have returned by now in Tc*/ @@ -180,7 +368,7 @@ void codegen_append_rb_tree_set_insert_kind_method( SPACE SPACE SPACE SPACE "VecRBTreeNode_append(&self->tree, (RBTreeNode){.parent = cur, .color = RBTree_red});\n" SPACE SPACE SPACE SPACE "self->tree.buf[cur].left = n;\n" SPACE SPACE SPACE SPACE "RBTree_fix_after_insert(self->tree.buf, &self->root, n);\n" - SPACE SPACE SPACE SPACE "Vec%s_append(&self->el, key);\n" /* op.T */ + SPACE SPACE SPACE SPACE "%s\n" /* line_that_appends_new_el_to_el_vec */ "%v" /* Tc_on_left */ /* Should have returned by now in Tc*/ SPACE SPACE SPACE "}\n" @@ -193,7 +381,7 @@ void codegen_append_rb_tree_set_insert_kind_method( SPACE SPACE SPACE SPACE "VecRBTreeNode_append(&self->tree, (RBTreeNode){.parent = cur, .color = RBTree_red});\n" SPACE SPACE SPACE SPACE "self->tree.buf[cur].right = n;\n" SPACE SPACE SPACE SPACE "RBTree_fix_after_insert(self->tree.buf, &self->root, n);\n" - SPACE SPACE SPACE SPACE "Vec%s_append(&self->el, key);\n" /* op.T */ + SPACE SPACE SPACE SPACE "%s\n" /* line_that_appends_new_el_to_el_vec */ "%v" /* Tc_on_right */ /* Should have returned by now in Tc*/ SPACE SPACE SPACE "}\n" @@ -202,141 +390,23 @@ void codegen_append_rb_tree_set_insert_kind_method( "%v" /* Fc_exists */ /* Should have returned by now in Tc*/ "}\n\n", - RT, set, method_name, set, op.T, op.T, Tc_root, - codegen_rb_tree_set_key_value_NOT_EQUAL_element(op), - codegen_rb_tree_set_key_value_LESS_element(op), - op.T, Tc_on_left, op.T, Tc_on_right, Fc_exists + RT, set, method_name, set, codegen_rb_tree_map__taking_t_argument(op), + VecU8_to_span(&line_that_appends_new_el_to_el_vec), // !! + Tc_root, + codegen_rb_tree_map__key_value_NOT_EQUAL_element(op), + codegen_rb_tree_map__key_value_LESS_element(op), + VecU8_to_span(&line_that_appends_new_el_to_el_vec), + Tc_on_left, + VecU8_to_span(&line_that_appends_new_el_to_el_vec), + Tc_on_right, + Fc_exists )); + + VecU8_drop(line_that_appends_new_el_to_el_vec); } -void codegen_append_rb_tree_set_erase_kind_method( - VecU8* result, set_instantiation_op op, SpanU8 set, SpanU8 method_name, VecU8 RT, - VecU8 Fc, VecU8 Tc_cur_available, VecU8 Tc_returning - ){ - VecU8 not_found_case = prepend_spaces_to_SpanU8_lines(VecU8_to_span(&Fc), 3); - VecU8 saving_prev = prepend_spaces_to_SpanU8_lines(VecU8_to_span(&Tc_cur_available), 1); - VecU8 ret_found_case = prepend_spaces_to_SpanU8_lines(VecU8_to_span(&Tc_returning), 1); - VecU8_drop(Fc); - VecU8_drop(Tc_cur_available); - VecU8_drop(Tc_returning); - - VecU8_append_vec(result, VecU8_fmt( - "%v %s_%s(%s* self, %v key) {\n" /* RT, set, method_name, set, taking_ref_t_argument */ - SPACE "U64 cur = self->root;\n" - SPACE "while (true){\n" - SPACE SPACE "if (cur == 0) {\n" - "%v" /* not_found_case */ - SPACE SPACE "}\n" - SPACE SPACE "if (%v)\n" /* key_ref_EQUAL_element */ - SPACE SPACE SPACE "break;\n" - SPACE SPACE "if (%v)\n" /* key_ref_LESS_element */ - SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" - SPACE SPACE "else\n" - SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" - SPACE "}\n" - "%v" /* saving_prev */ - SPACE "%s_empty_index_erase(self, cur);\n" /* set */ - "%v" /* ret_found_case */ - "}\n\n", - RT, set, method_name, set, codegen_rb_tree_set_taking_ref_t_argument(op), - not_found_case, - codegen_rb_tree_set_key_ref_EQUAL_element(op), - codegen_rb_tree_set_key_ref_LESS_element(op), - saving_prev, - set, - ret_found_case - )); -} - -/* src/l1_5/core/rb_tree_node.h is a dependency of all instantiations of rb_tree_set template - * Don't forget to include them - */ -NODISCARD VecU8 generate_rb_tree_Set_template_instantiation(set_instantiation_op op){ - set_instantiation_op_fix(&op); - VecU8 res = VecU8_new(); - VecU8 g_set = get_name_of_rb_tree_set_structure(op); - SpanU8 set = VecU8_to_span(&g_set); - VecU8_append_vec(&res, VecU8_fmt( - "typedef struct {\n" - SPACE "VecRBTreeNode tree;\n" - SPACE "U64 root;\n" - SPACE "Vec%s el;\n", - op.T)); - if (op.guest_data_T.len > 0) { - VecU8_append_vec(&res, VecU8_fmt(SPACE "%s guest;\n", op.guest_data_T)); - } - VecU8_append_vec(&res, VecU8_fmt( - "} %s;\n\n", set)); - - if (op.guest_data_T.len > 0) { - VecU8_append_vec(&res, VecU8_fmt( - "NODISCARD %s %s_new(%s guest) {\n" /* set, set, op.guest_data_T */ - SPACE "return (%s){.tree = VecRBTreeNode_new_zeroinit(1), .root = 0, .el = Vec%s_new(), .guest = guest};\n" /* set, op.T */ - "}\n\n", set, set, op.guest_data_T, set, op.T)); - VecU8_append_vec(&res, VecU8_fmt( - "NODISCARD %s %s_new_reserved(%s guest, size_t size) {\n" /* set, set, op.guest_data_T */ - SPACE "return (%s){.tree = (VecRBTreeNode){\n" - SPACE SPACE ".buf = (RBTreeNode*)safe_calloc(size + 1, sizeof(RBTreeNode)), .len = 1, .capacity = size + 1},\n" - SPACE SPACE ".root = 0, .el = Vec%s_new_reserved(size), .guest = guest};\n" /* set, op.T */ - "}\n\n", set, set, op.guest_data_T, set, op.T)); - } else { - VecU8_append_vec(&res, VecU8_fmt( - "NODISCARD %s %s_new() {\n" /* set, set */ - SPACE "return (%s){.tree = VecRBTreeNode_new_zeroinit(1), .root = 0, .el = Vec%s_new()};\n" /* set, op.T */ - "}\n\n", set, set, set, op.T)); - VecU8_append_vec(&res, VecU8_fmt( - "NODISCARD %s %s_new_reserved(size_t size) {\n" /* set, set */ - SPACE "return (%s){.tree = (VecRBTreeNode){\n" - SPACE SPACE ".buf = (RBTreeNode*)safe_calloc(size + 1, sizeof(RBTreeNode)), .len = 1, .capacity = size + 1},\n" - SPACE SPACE ".root = 0, .el = Vec%s_new_reserved(size)};\n" /* set, op.T */ - "}\n\n", set, set, set, op.T)); - } - - VecU8_append_vec(&res, VecU8_fmt( - "void %s_drop(%s self) {\n" - SPACE "VecRBTreeNode_drop(self.tree);\n" - SPACE "Vec%s_drop(self.el);\n" - "}\n\n", set, set, op.T)); - - /* Method _insert() does not try to replace the existing element with equal key, - * it returns true if insertion was done, false if collision happened and key was not inserted */ - codegen_append_rb_tree_set_insert_kind_method(&res, op, set, cstr("insert"), vcstr("bool"), - vcstr("return true;\n"), - op.t_primitive ? - vcstr("return false;\n") : - VecU8_fmt( - "%s_drop(key);\n" /* op.T */ - "return false;\n", - op.T)); - - if (!op.t_integer) { - /* Method _erase_substitute() is a more bald version of _insert() method. It will substitute - * previous element with equal key it it was found. It still returns true if no conflict has happened, though */ - codegen_append_rb_tree_set_insert_kind_method(&res, op, set, cstr("erase_substitute"), vcstr("bool"), - vcstr("return true;\n"), - op.t_primitive ? - vcstr("return false;\n") : - VecU8_fmt( - "%s_drop(self->el.buf[cur - 1]);\n" - "self->el.buf[cur - 1] = key;\n" - "return false;\n", - op.T)); - - /* Method _pop_substitute() is just like _erase_substitute(), but it returns a previous key - * that was overthrown after collision. Wrapped in option, ofcourse */ - codegen_append_rb_tree_set_insert_kind_method(&res, op, set, cstr("pop_substitute"), - codegen_rb_tree_set_option_returned_value_t(op), - VecU8_fmt("return %v;\n", codegen_rb_tree_set_none_t(op)), - VecU8_fmt( - "%s old = self->el.buf[cur - 1];\n" /* op.T */ - "self->el.buf[cur - 1] = key;\n" - "return %v;", /* Some_T(old) */ - op.T, codegen_rb_tree_set_some_t(op, cstr("old")))); - } - - /* Erasing time!!!! */ - - VecU8_append_vec(&res, VecU8_fmt( +void codegen_append_rb_tree_map__method_empty_index_erase(VecU8* res, SpanU8 set){ + VecU8_append_vec(res, VecU8_fmt( "/* UNSAFE. Use when you dropped the symbol that is about to be deleted */\n" "void %s_empty_index_erase(%s* self, U64 z) {\n" /* set, set */ SPACE "assert(z != 0 && z < self->tree.len);\n" @@ -373,206 +443,199 @@ NODISCARD VecU8 generate_rb_tree_Set_template_instantiation(set_instantiation_op SPACE SPACE "RBTree_fix_after_delete(self->tree.buf, &self->root, x);\n" "}\n\n", set, set)); +} - codegen_append_rb_tree_set_erase_kind_method(&res, op, set, cstr("erase"), vcstr("bool"), +void codegen_append_rb_tree_map__erase_kind_method( + VecU8* res, map_instantiation_op op, SpanU8 set, SpanU8 method_name, VecU8 RT, + VecU8 Fc, VecU8 Tc_cur_available, VecU8 Tc_returning + ){ + VecU8 not_found_case = prepend_spaces_to_SpanU8_lines(VecU8_to_span(&Fc), 3); + VecU8 saving_prev = prepend_spaces_to_SpanU8_lines(VecU8_to_span(&Tc_cur_available), 1); + VecU8 ret_found_case = prepend_spaces_to_SpanU8_lines(VecU8_to_span(&Tc_returning), 1); + VecU8_drop(Fc); + VecU8_drop(Tc_cur_available); + VecU8_drop(Tc_returning); + + VecU8_append_vec(res, VecU8_fmt( + "%v %s_%s(%s* self, %v key) {\n" /* RT, set, method_name, set, taking_ref_t_argument */ + SPACE "U64 cur = self->root;\n" + SPACE "while (true){\n" + SPACE SPACE "if (cur == 0) {\n" + "%v" /* not_found_case */ + SPACE SPACE "}\n" + SPACE SPACE "if (%v)\n" /* key_ref_EQUAL_element */ + SPACE SPACE SPACE "break;\n" + SPACE SPACE "if (%v)\n" /* key_ref_LESS_element */ + SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" + SPACE SPACE "else\n" + SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" + SPACE "}\n" + "%v" /* saving_prev */ + SPACE "%s_empty_index_erase(self, cur);\n" /* set */ + "%v" /* ret_found_case */ + "}\n\n", + RT, set, method_name, set, codegen_rb_tree_map__taking_ref_k_argument(op), + not_found_case, + codegen_rb_tree_map__key_ref_EQUAL_element(op), + codegen_rb_tree_map__key_ref_LESS_element(op), + saving_prev, + set, + ret_found_case)); +} + +/* When method returns constant pointer to found key (wrapped in Option) we will use this type + * Ofcourse, it can turn out that it is not generated. So be careful and generate it by yourself + */ +NODISCARD VecU8 codegen_rb_tree_map__option_returned_ref_t(map_instantiation_op op, bool mut){ + /* Constant pointer to an integer is an integer */ + + // Because we don't generate such methods if V is void and we have unconditional_equality + assert(!op.unconditional_equality || op.V.len > 0); + // Because we don't support it + assert(!(op.V.len > 0) || op.unconditional_equality); + // Key can't be mutable + assert(!mut || op.V.len > 0); + + if (op.V.len > 0) { + if (op.v_integer) + return VecU8_fmt("Option%s", op.V); + return mut ? VecU8_fmt("OptionMutRef%s", op.V) : VecU8_fmt("OptionRef%s", op.V); + } + return op.k_integer ? VecU8_fmt("Option%s", op.K) : VecU8_fmt("OptionRef%s", op.K); +} + +/* Suppose some method returns pointer to key (ofc wrapped in option). And we found what to return + * we return it from self->el array */ +NODISCARD VecU8 codegen_rb_tree_map__some_ref_t(map_instantiation_op op){ + assert(!op.unconditional_equality || op.V.len > 0); + assert(!(op.V.len > 0) || op.unconditional_equality); + + if (op.V.len > 0) { + if (op.v_integer) + return VecU8_fmt("Some_%s(self->el.buf[cur - 1].value)", op.V); + return VecU8_fmt("Some_Ref%s(&self->el.buf[cur - 1]->value)", op.V); + } + if (op.k_integer) + return VecU8_fmt("Some_%s(self->el.buf[cur - 1])", op.K); + return VecU8_fmt("Some_Ref%s(&self->el.buf[cur - 1])", op.K); +} + +/* Suppose some method returns pointer to key (ofc wrapped in option). But this time we found nothing */ +NODISCARD VecU8 codegen_rb_tree_map__none_ref_t(map_instantiation_op op){ + assert(!op.unconditional_equality || op.V.len > 0); + assert(!(op.V.len > 0) || op.unconditional_equality); + + if (op.V.len > 0) + return op.v_integer ? VecU8_fmt("None_%s()", op.V) : VecU8_fmt("None_Ref%s()", op.V); + return op.k_integer ? VecU8_fmt("None_%s()", op.K) : VecU8_fmt("None_Ref%s", op.K); +} + +/* Implementing it for a set was a biggest mistake of my day */ +void codegen_append_rb_tree_map__method_at(VecU8* res, map_instantiation_op op, SpanU8 set, bool mut){ + VecU8_append_vec(res, VecU8_fmt( + "%v %s_%s(%s%s* self, %v key) {\n" /* option_returned_ref_t, set, mat/at, e/const, set, taking_ref_t_argument */ + SPACE "U64 cur = self->root;\n" + SPACE "while (cur != 0) {\n" + SPACE SPACE "if (%v) {\n" /* key_ref_EQUAL_element */ + SPACE SPACE SPACE "return %v;\n" /* some_ref_t */ + SPACE SPACE "} else if (%v) {\n" /* key_ref_LESS_element */ + SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" + SPACE SPACE "} else {\n" + SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" + SPACE SPACE "}\n" + SPACE "}\n" + SPACE "return %v;\n" /* none_ref_t */ + "}\n\n", + codegen_rb_tree_map__option_returned_ref_t(op, mut), set, mut ? cstr("mat") : cstr("at"), + mut ? cstr("") : cstr("const "), set, codegen_rb_tree_map__taking_ref_k_argument(op), + + codegen_rb_tree_map__key_ref_EQUAL_element(op), + codegen_rb_tree_map__some_ref_t(op), + codegen_rb_tree_map__key_ref_LESS_element(op), + codegen_rb_tree_map__none_ref_t(op) + )); +} + +NODISCARD VecU8 get_name_of_rb_tree_set_structure(set_instantiation_op op){ + if (op.alternative_comp_set_name_embed.len) + return VecU8_fmt("BuffRBTreeBy%s_Set%s", op.alternative_comp_set_name_embed, op.T); + return VecU8_fmt("BuffRBTree_Set%s", op.T); +} + +/* src/l1_5/core/rb_tree_node.h is a dependency of all instantiations of rb_tree_set template + * Don't forget to include them + * */ +NODISCARD VecU8 generate_rb_tree_Set_template_instantiation(set_instantiation_op op){ + set_instantiation_op_fix(&op); + VecU8 res = VecU8_new(); + VecU8 g_set = get_name_of_rb_tree_set_structure(op); + SpanU8 set = VecU8_to_span(&g_set); + + map_instantiation_op map_op = {.K = op.T, + .k_integer = op.t_integer, .k_primitive = op.t_primitive, .k_clonable = op.t_clonable, + .V = cstr(""), .v_primitive = true, .v_clonable = true, + .alternative_equal = op.alternative_equal, .alternative_less = op.alternative_less, + .alternative_comp_map_name_embed = op.alternative_comp_set_name_embed, .guest_data_T = op.guest_data_T, + .unconditional_equality = op.unconditional_equality + }; + + codegen_append_rb_tree_map__structure_and_simplest_methods(&res, map_op, set, op.T); + + /* Method _insert() does not try to replace the existing element with equal key, + * it returns true if insertion was done, false if collision happened and key was not inserted */ + codegen_append_rb_tree_map__insert_kind_method(&res, map_op, set, cstr("insert"), vcstr("bool"), + vcstr("return true;\n"), + op.t_primitive ? + vcstr("return false;\n") : + VecU8_fmt( + "%s_drop(key);\n" /* op.T */ + "return false;\n", + op.T)); + + if (!op.unconditional_equality) { + /* Method _erase_substitute() is a more bald version of _insert() method. It will substitute + * previous element with equal key if it was found. It still returns true if no conflict has happened, though */ + codegen_append_rb_tree_map__insert_kind_method(&res, map_op, set, cstr("erase_substitute"), vcstr("bool"), + vcstr("return true;\n"), + op.t_primitive ? + vcstr("return false;\n") : + VecU8_fmt( + "%s_drop(self->el.buf[cur - 1]);\n" + "self->el.buf[cur - 1] = key;\n" + "return false;\n", + op.T)); + + /* Method _pop_substitute() is just like _erase_substitute(), but it returns a previous key + * that was overthrown after collision. Wrapped in option, ofcourse */ + codegen_append_rb_tree_map__insert_kind_method(&res, map_op, set, cstr("pop_substitute"), + VecU8_fmt("Option%s", op.T), + VecU8_fmt("return None_%s();\n", op.T), + VecU8_fmt( + "%s saved = self->el.buf[cur - 1];\n" /* op.T */ + "self->el.buf[cur - 1] = key;\n" + "return Some_%s(saved);", /* op.V */ + op.T, op.T)); + } + + /* Erasing time!!!! */ + codegen_append_rb_tree_map__method_empty_index_erase(&res, set); + + codegen_append_rb_tree_map__erase_kind_method(&res, map_op, set, cstr("erase"), vcstr("bool"), vcstr("return false;\n"), op.t_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1]);\n", op.T), vcstr("return true;\n")); - if (!op.t_integer) { - codegen_append_rb_tree_set_erase_kind_method(&res, op, set, cstr("pop"), - codegen_rb_tree_set_option_returned_value_t(op), - VecU8_fmt("return %v;\n", codegen_rb_tree_set_none_t(op)), + if (!op.unconditional_equality) { + codegen_append_rb_tree_map__erase_kind_method(&res, map_op, set, cstr("pop"), + VecU8_fmt("Option%s", op.T), + VecU8_fmt("return None_%s();\n", op.T), VecU8_fmt("%s saved = self->el.buf[cur - 1];\n", op.T), - VecU8_fmt("return %v;\n", codegen_rb_tree_set_some_t(op, cstr("saved"))) - ); + VecU8_fmt("return Some_%s(saved);\n", op.T)); } - /* We erased enough */ - VecU8_append_vec(&res, VecU8_fmt( - "U64 %s_find(const %s* self, %v key) {\n" /* set, set, taking_ref_t_argument */ - SPACE "U64 cur = self->root;\n" - SPACE "while (cur != 0 && %v) {\n" /* key reference not equal cur element */ - SPACE SPACE "if (%v) {\n" /* key reference less than cue element */ - SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" - SPACE SPACE "} else {\n" - SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" - SPACE SPACE "}\n" - SPACE "}\n" - SPACE "return cur;\n" - "}\n\n", - set, set, codegen_rb_tree_set_taking_ref_t_argument(op), - codegen_rb_tree_set_key_ref_NOT_EQUAL_element(op), - codegen_rb_tree_set_key_ref_LESS_element(op) - )); - - if (!op.t_integer) { - VecU8_append_vec(&res, VecU8_fmt( - "%v %s_at(const %s* self, %v key) {\n" /* option_returned_ref_t, set, set, taking_ref_t_argument */ - SPACE "U64 cur = self->root;\n" - SPACE "while (cur != 0) {\n" - SPACE SPACE "if (%v) {\n" /* key_ref_EQUAL_element */ - SPACE SPACE SPACE "return %v;\n" /* some_ref_t */ - SPACE SPACE "} else if (%v) {\n" /* key_ref_LESS_element */ - SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" - SPACE SPACE "} else {\n" - SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" - SPACE SPACE "}\n" - SPACE "}\n" - SPACE "return %v;\n" /* none_ref_t */ - "}\n\n", - codegen_rb_tree_set_option_returned_ref_t(op), set, set, codegen_rb_tree_set_taking_ref_t_argument(op), - codegen_rb_tree_set_key_ref_EQUAL_element(op), - codegen_rb_tree_set_some_ref_t(op, cstr("cur")), - codegen_rb_tree_set_key_ref_LESS_element(op), - codegen_rb_tree_set_none_ref_t(op) - )); - } - - VecU8_append_vec(&res, VecU8_fmt( - "NODISCARD %s %s_clone(const %s* self){\n" /* set, set, set */ - SPACE "return (%s){.tree = VecRBTreeNode_clone(&self->tree), .root = self->root,\n" /* set */ - SPACE SPACE ".el = Vec%s_clone(&self->el)%s};\n" /* op.T, whether to clone guest or no */ - "}\n\n", - set, set, set, - set, - op.T, op.guest_data_T.len > 0 ? cstr(", .guest = self->guest") : cstr(""))); - - VecU8_append_vec(&res, VecU8_fmt( - "U64 %s_find_next(const %s* self, U64 x){\n" - SPACE "assert(x != 0 && x < self->tree.len);\n" - SPACE "if (self->tree.buf[x].right != 0)\n" - SPACE SPACE "return RBTree_minimum_in_subtree(self->tree.buf, self->tree.buf[x].right);\n" - SPACE "while (true) {\n" - SPACE SPACE "U64 p = self->tree.buf[x].parent;\n" - SPACE SPACE "if (p == 0)\n" - SPACE SPACE SPACE "return 0;\n" - SPACE SPACE "if (self->tree.buf[p].left == x)\n" - SPACE SPACE SPACE "return p;\n" - SPACE SPACE "x = p;\n" - SPACE "}\n" - "}\n\n", set, set)); - - VecU8_append_vec(&res, VecU8_fmt( - "U64 %s_find_prev(const %s* self, U64 x){\n" - SPACE "assert(x != 0 && x < self->tree.len);\n" - SPACE "if (self->tree.buf[x].left != 0)\n" - SPACE SPACE "return RBTree_maximum_in_subtree(self->tree.buf, self->tree.buf[x].left);\n" - SPACE "while (true) {\n" - SPACE SPACE "U64 p = self->tree.buf[x].parent;\n" - SPACE SPACE "if (p == 0)\n" - SPACE SPACE SPACE "return 0;\n" - SPACE SPACE "if (self->tree.buf[p].right == x)\n" - SPACE SPACE SPACE "return p;\n" - SPACE SPACE "x = p;\n" - SPACE "}\n" - "}\n\n", set, set)); - - VecU8_append_vec(&res, VecU8_fmt( - "U64 %s_find_min(const %s* self) {\n" - SPACE "return self->root != 0 ? RBTree_minimum_in_subtree(self->tree.buf, self->root) : 0;\n" - "}\n\n", set, set)); - - VecU8_append_vec(&res, VecU8_fmt( - "U64 %s_find_max(const %s* self) {\n" - SPACE "return self->root != 0 ? RBTree_maximum_in_subtree(self->tree.buf, self->root) : 0;\n" - "}\n\n", set, set)); - - VecU8_append_vec(&res, VecU8_fmt( - "U64 %s_find_max_less(const %s* self, %v key) {\n" /* set, set, taking_ref_t_argument */ - SPACE "U64 last_less = 0;\n" - SPACE "U64 cur = self->root;\n" - SPACE "while (cur != 0) {\n" - SPACE SPACE "if (%v) {\n" /* key_ref_EQUAL_element */ - SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" - SPACE SPACE SPACE "if (cur == 0)\n" - SPACE SPACE SPACE SPACE "return last_less;\n" - SPACE SPACE SPACE "while (self->tree.buf[cur].right != 0)\n" - SPACE SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" - SPACE SPACE SPACE "return cur;\n" - SPACE SPACE "} else if (%v) {\n" /* key_ref_LESS_element */ - SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" - SPACE SPACE "} else {\n" - SPACE SPACE SPACE "last_less = cur;\n" - SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" - SPACE SPACE "}\n" - SPACE "}\n" - SPACE "return last_less;\n" - "}\n\n", - set, set, codegen_rb_tree_set_taking_ref_t_argument(op), - codegen_rb_tree_set_key_ref_EQUAL_element(op), - codegen_rb_tree_set_key_ref_LESS_element(op) - )); - - VecU8_append_vec(&res, VecU8_fmt( - "U64 %s_find_max_less_or_eq(const %s* self, %v key) {\n" /* set, set, taking_ref_t_argument */ - SPACE "U64 last_less = 0;\n" - SPACE "U64 cur = self->root;\n" - SPACE "while (cur != 0) {\n" - SPACE SPACE "if (%v) {\n" /* key_ref_EQUAL_element */ - SPACE SPACE SPACE "return cur;\n" - SPACE SPACE "} else if (%v) {\n" /* key_ref_LESS_element */ - SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" - SPACE SPACE "} else {\n" - SPACE SPACE SPACE "last_less = cur;\n" - SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" - SPACE SPACE "}\n" - SPACE "}\n" - SPACE "return last_less;\n" - "}\n\n", - set, set, codegen_rb_tree_set_taking_ref_t_argument(op), - codegen_rb_tree_set_key_ref_EQUAL_element(op), - codegen_rb_tree_set_key_ref_LESS_element(op) - )); - - VecU8_append_vec(&res, VecU8_fmt( - "U64 %s_find_min_grtr(const %s* self, %v key) {\n" /* set, set, taking_ref_t_argument */ - SPACE "U64 last_grtr = 0;\n" - SPACE "U64 cur = self->root;\n" - SPACE "while (cur != 0) {\n" - SPACE SPACE "if (%v) {\n" /* key_ref_EQUAL_element */ - SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" - SPACE SPACE SPACE "if (cur == 0)\n" - SPACE SPACE SPACE SPACE "return last_grtr;\n" - SPACE SPACE SPACE "while (self->tree.buf[cur].left != 0)\n" - SPACE SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" - SPACE SPACE SPACE "return cur;\n" - SPACE SPACE "} else if (%v) {\n" /* key_ref_LESS_element */ - SPACE SPACE SPACE "last_grtr = cur;\n" - SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" - SPACE SPACE "} else {\n" - SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" - SPACE SPACE "}\n" - SPACE "}\n" - SPACE "return last_grtr;\n" - "}\n\n", - set, set, codegen_rb_tree_set_taking_ref_t_argument(op), - codegen_rb_tree_set_key_ref_EQUAL_element(op), - codegen_rb_tree_set_key_ref_LESS_element(op) - )); - - - VecU8_append_vec(&res, VecU8_fmt( - "U64 %s_find_min_grtr_or_eq(const %s* self, %v key) {\n" /* set, set, taking_ref_t_argument */ - SPACE "U64 last_grtr = 0;\n" - SPACE "U64 cur = self->root;\n" - SPACE "while (cur != 0) {\n" - SPACE SPACE "if (%v) {\n" /* key_ref_EQUAL_element */ - SPACE SPACE SPACE "return cur;\n" - SPACE SPACE "} else if (%v) {\n" /* key_ref_LESS_element */ - SPACE SPACE SPACE "last_grtr = cur;\n" - SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" - SPACE SPACE "} else {\n" - SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" - SPACE SPACE "}\n" - SPACE "}\n" - SPACE "return last_grtr;\n" - "}\n\n", - set, set, codegen_rb_tree_set_taking_ref_t_argument(op), - codegen_rb_tree_set_key_ref_EQUAL_element(op), - codegen_rb_tree_set_key_ref_LESS_element(op) - )); + if (!op.unconditional_equality) + codegen_append_rb_tree_map__method_at(&res, map_op, set, false); VecU8_drop(g_set); return res; @@ -608,4 +671,72 @@ void generate_rb_tree_Set_templ_inst_guarded_header( finish_header(head); } +NODISCARD VecU8 get_name_of_rb_tree_map_structure(map_instantiation_op op){ + if (op.alternative_comp_map_name_embed.len) + return VecU8_fmt("BuffRBTreeBy%s_Map%sTo%s", op.alternative_comp_map_name_embed, op.K, op.V); + return VecU8_fmt("BuffRBTree_Map%sTo%s", op.K, op.V); +} + +NODISCARD VecU8 generate_rb_tree_Map_template_instantiation(map_instantiation_op op){ + map_instantiation_op_fix(&op); + VecU8 res = VecU8_new(); + VecU8 map_g = get_name_of_rb_tree_map_structure(op); + SpanU8 map = VecU8_to_span(&map_g); + VecU8 kvp_g = get_name_of_rb_tree_map_structure(op); + SpanU8 kvp = VecU8_to_span(&kvp_g); + + codegen_append_rb_tree_map__structure_and_simplest_methods(&res, op, map, kvp); + + VecU8_append_span(&res, cstr("return false\n")); + codegen_append_rb_tree_map__insert_kind_method(&res, op, map, cstr("insert"), vcstr("bool"), + vcstr("return true;\n"), + VecU8_fmt("%v%v" "return false;\n", + op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(key);\n", op.k_primitive), + op.v_primitive ? vcstr("") : VecU8_fmt("%s_drop(value);\n", op.v_primitive))); + + + codegen_append_rb_tree_map__insert_kind_method(&res, op, map, cstr("erase_substitute"), vcstr("bool"), + vcstr("return true;\n"), + VecU8_fmt("%v" "self->el.buf[cur - 1].key = key;\n" "%v" "self->el.buf[cur - 1].value = key;\n" + "return false;\n", + op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].key);\n", op.k_primitive), + op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].value);\n", op.v_primitive) + )); + + codegen_append_rb_tree_map__insert_kind_method(&res, op, map, cstr("pop_substitute"), + VecU8_fmt("Option%s", op.V), + VecU8_fmt("return None_%s;\n", op.V), + VecU8_fmt( + "%v" "self->el.buf[cur - 1].key = key;\n" /**/ + "%s saved = self->el.buf[cur - 1].value;\n" /* op.V */ + "self->el.buf[cur - 1].value = value;\n" + "return Some_%s(saved);\n", /* op.V */ + op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].key);\n", op.k_primitive), + op.V, op.V)); + + /* Erasing time!!!! */ + codegen_append_rb_tree_map__method_empty_index_erase(&res, map); + + codegen_append_rb_tree_map__erase_kind_method(&res, op, map, cstr("erase"), vcstr("bool"), + vcstr("return false;\n"), + VecU8_fmt("%v%v", + op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].key);\n", op.K), + op.v_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].value);\n", op.V)), + vcstr("return true;\n")); + + codegen_append_rb_tree_map__erase_kind_method(&res, op, map, cstr("pop"), + VecU8_fmt("Option%s", op.V), + VecU8_fmt("return None_%s();\n", op.V), + VecU8_fmt("%v" "%s saved = self->el.buf[cur - 1].value;\n", + op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].key);\n", op.K), + op.V), + VecU8_fmt("return Some_%s(saved);\n", op.V)); + /* We erased enough */ + + codegen_append_rb_tree_map__method_at(&res, op, map, false); + codegen_append_rb_tree_map__method_at(&res, op, map, true); + + return res; +} + #endif \ No newline at end of file diff --git a/src/l2/margaret/vulkan_memory_claire.h b/src/l2/margaret/vulkan_memory_claire.h index 6fec63a..36554ef 100644 --- a/src/l2/margaret/vulkan_memory_claire.h +++ b/src/l2/margaret/vulkan_memory_claire.h @@ -254,19 +254,31 @@ typedef struct { typedef struct MargaretMemAllocator MargaretMemAllocator; MargaretMemAllocator MargaretMemAllocator_new( - VkDevice device, VkPhysicalDevice physical_device, SpanMargaretBufferKindDescription buffer_types, double alpha); + VkDevice device, VkPhysicalDevice physical_device, SpanMargaretBufferKindDescription buffer_types); -/* Vibe check */ -bool MargaretMemAllocator_request_needs_silence(MargaretMemAllocator* self, MargaretMemAllocatorRequest req); + +/* Demands + Warnings */ +typedef struct { + /* If for some memory types we will do defragmentation, MargaretMemAllocator warns us that for + * these memory types position-structures, that it filled, will be updated. If these values + * (buffer/image handlers + sub-buffer positions) are dependencies of other + * objects, these objects need to be updated (or rebuilt) */ + U32 defragmented_mem_types; + /* If for some set of requests MargaretMemAllocator needs to execute some Vulkan copying commands, + * it will demand you to actually execute the command buffer that you gave it. If this is `true` it does + * not necessarily mean that defragmentation is happening right now, no, defragmentation is indicated by + * `defragmented_mem_types` warning field, but if you are doing a DIRECT BUFFER (sub-buffer in terms of + * vulkan) RESIZE, this sub-buffer may be copied. + * It won't affect other data structures in your memory, + * of course, (still, notice that position of your sub-buffer will be updated). + */ + bool need_command_buffer; +} MargaretMemAllocatorDemands; /* Appends copying commands into cmd_buff. It may append none. Defragmentation, device memory relocation -* need copying commands, but buffer resize may also require copying. -* If silence is needed, silence flag should be set, otherwise method aborts. You can use -* _request_needs_silence method to check if silence is needed, but if you know for sure that you already have -* silence anyway, you can pass `silence=true`. -* Returned value: true if some_commands were appended to cmd_buff and need to be executed before any further -* actions with memory managed by Self would make any sense */ -bool MargaretMemAllocator_carry_out_request(MargaretMemAllocator* self, VkCommandBuffer cmd_buff, bool silence); +* need copying commands, but buffer resize may also require copying */ +MargaretMemAllocatorDemands MargaretMemAllocator_carry_out_request( + MargaretMemAllocator* self, VkCommandBuffer cmd_buff, MargaretMemAllocatorRequest request); void MargaretMemAllocator_wipe_old(MargaretMemAllocator* self); @@ -371,6 +383,11 @@ typedef struct { void* mapped_memory; } MargaretMemAllocatorOneBlock; +void MargaretMemAllocatorOneBlock_drop(MargaretMemAllocatorOneBlock self){ + BuffRBTree_SetMargaretMemoryOccupation_drop(self.occupied_memory); + BuffRBTree_SetMargaretBufferOccupationSubBuffer_drop(self.occupied_buffers); +} + #include "../../../gen/l1/eve/margaret/VecMargaretMemAllocatorOneBlock.h" /* Used to enumerate both free memory segments in VkDeviceMemory @@ -432,8 +449,30 @@ typedef const MargaretFreeMemSegment* RefMargaretFreeMemSegment; #include "../../../gen/l1_5/eve/margaret/BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment.h" #include "../../../gen/l1/eve/margaret/OptionBuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment.h" +typedef struct{ + U32 old_mem_block_id; + U64 old_start; + U64 old_len; + U32 new_mem_block_id; + U64 new_start; + U64 new_len; +} MargaretOldBufferResizeRecord; + +#include "../../../gen/l1/eve/margaret/VecMargaretOldBufferResizeRecord.h" + typedef struct { VecMargaretMemAllocatorOneBlock blocks; + /* old_blocks is usually empty. BUT! When you generated a defragmentation command buffer with + * MargaretMemAllocator_carry_out_request, this vector will be filled with old blocks, while + * `blocks` vector will be filled with newly created blocks. + * All references in `answer-structures` will be immediately modified, so that they would point + * to the right block. After you execute the command buffer, + * that MargaretMemAllocator_carry_out_request generates, you can (and should) wipe out old blocks + */ + VecMargaretMemAllocatorOneBlock old_blocks; + /* If your previous set of requests did not cause defragmentation, it could cause relocation of some data + * in a subbuffer that you wanted to resize */ + VecMargaretOldBufferResizeRecord old_buff_resize_record; OptionBuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment free_space_in_memory[MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP]; VkMemoryPropertyFlags mem_properties; } MargaretMemAllocatorOneMemType; @@ -459,23 +498,23 @@ typedef struct { #include "../../../gen/l1/eve/margaret/VecMargaretBufferKindInfo.h" +#define MARGARET_ALLOC_MAX_ALLOWED_BUFFER_JUTTING 255 + /* VkDevice and VkPhysicalDevice stay remembered here. Don't forget that, please */ struct MargaretMemAllocator { VecMargaretMemAllocatorOneMemType mem_types; VecMargaretBufferKindInfo buffer_types; - double alpha; VkDevice device; VkPhysicalDevice physical_device; }; MargaretMemAllocator MargaretMemAllocator_new( - VkDevice device, VkPhysicalDevice physical_device, SpanMargaretBufferKindDescription buffer_types, double alpha - ){ + VkDevice device, VkPhysicalDevice physical_device, SpanMargaretBufferKindDescription buffer_types, ){ VkPhysicalDeviceMemoryProperties phd_props; vkGetPhysicalDeviceMemoryProperties(physical_device, &phd_props); assert(phd_props.memoryTypeCount < VK_MAX_MEMORY_TYPES); MargaretMemAllocator self = {.buffer_types = VecMargaretBufferKindInfo_new_zeroinit(buffer_types.len), - .alpha = alpha, .device = device, .physical_device = physical_device, + .device = device, .physical_device = physical_device, .mem_types = VecMargaretMemAllocatorOneMemType_new_zeroinit(phd_props.memoryTypeCount)}; for (size_t i = 0; i < buffer_types.len; i++) { @@ -525,25 +564,60 @@ MargaretMemAllocator MargaretMemAllocator_new( return self; } -bool MargaretMemAllocator_request_needs_silence(MargaretMemAllocator* self, MargaretMemAllocatorRequest req){ - return false; - // todo -} - -bool MargaretMemAllocator_carry_out_request(MargaretMemAllocator* self, VkCommandBuffer cmd_buff, bool silence){ - return false; - // todo: add OLD flag to mem allocator and like uh, fuck this shit man bruh lol deng - // todo: +MargaretMemAllocatorDemands MargaretMemAllocator_carry_out_request( + MargaretMemAllocator* self, VkCommandBuffer cmd_buff, MargaretMemAllocatorRequest request + ){ + MargaretMemAllocator_wipe_old(self); + for (U8 mi = 0; mi < (U8)self->mem_types.len; mi++) { + MargaretMemAllocatorOneMemType* x = &self->mem_types.buf[mi]; + // for (U64 i = 0; i < request->) + } + return (MargaretMemAllocatorDemands){.defragmented_mem_types = 0, .need_command_buffer = false}; } void MargaretMemAllocator_wipe_old(MargaretMemAllocator* self){ - // todo + for (U64 mi = 0; mi < self->mem_types.len; mi++) { + MargaretMemAllocatorOneMemType* m = &self->mem_types.buf[mi]; + assert(!m->old_blocks.len || !m->old_buff_resize_record.len); + while (m->old_blocks.len > 0) { + MargaretMemAllocatorOneBlock block = VecMargaretMemAllocatorOneBlock_pop(&m->old_blocks); + assert(((m->mem_properties & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) == + (block.mapped_memory != NULL)); + if (block.mapped_memory) + vkUnmapMemory(self->device, block.mapped_memory); + { /* destroying images and buffers from this block. Binary tree detour takes O(n) time */ + U64 set_it = BuffRBTree_SetMargaretMemoryOccupation_find_min(&block.occupied_memory); + while (set_it > 0) { + assert(set_it < block.occupied_memory.tree.len && set_it > 0); + const MargaretMemoryOccupation* occ = &block.occupied_memory.el.buf[set_it - 1]; + if (occ->variant == MargaretMemoryOccupation_Buffer) { + const MargaretMemoryOccupationBuffer* wb = &occ->buf; + vkDestroyBuffer(self->device, wb->buffer, NULL); + } else if (occ->variant == MargaretMemoryOccupation_Image) { + const MargaretMemoryOccupationImage* wi = &occ->img; + vkDestroyImage(self->device, wi->image, NULL); + } + set_it = BuffRBTree_SetMargaretMemoryOccupation_find_next(&block.occupied_memory, set_it); + } + } + vkFreeMemory(self->device, block.mem_hand, NULL); + } + /* MargaretOldBufferResizeRecord is a primitive datatype */ + for (U64 ri = 0; ri < m->old_buff_resize_record.len; ri++) { + const MargaretOldBufferResizeRecord* resize = &m->old_buff_resize_record.buf[ri]; + if (resize->old_mem_block_id != resize->new_mem_block_id || resize->old_start != resize->new_start) { + // OptionMargaretBufferOccupationSubBuffer delete_me = BuffRBTree_SetMargaretBufferOccupationSubBuffer_pop(&) + // todo: AAAAAAAAAAAAA rewrite it all using maps + } + } + } } char* MargaretMemAllocator_get_host_visible_buffer_ptr( const MargaretMemAllocator* self, const MargaretMemAllocatorBufferPosition* pos){ check(pos->memory_type_id < VK_MAX_MEMORY_TYPES); const MargaretMemAllocatorOneMemType* memtype = &self->mem_types.buf[pos->memory_type_id]; + assert(memtype->old_blocks.len == 0); check((memtype->mem_properties & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)); check(pos->memory_allocation_id < memtype->blocks.len); const MargaretMemAllocatorOneBlock* bl = &memtype->blocks.buf[pos->memory_allocation_id]; diff --git a/src/l2/tests/data_structures/t2.c b/src/l2/tests/data_structures/t2.c index 4e1c894..3c23214 100644 --- a/src/l2/tests/data_structures/t2.c +++ b/src/l2/tests/data_structures/t2.c @@ -169,4 +169,23 @@ void vkDestroyImage( VkImage image, const VkAllocationCallbacks* pAllocator); +typedef int VkMemoryMapFlags; + +VkResult vkMapMemory( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize offset, + VkDeviceSize size, + VkMemoryMapFlags flags, + void** ppData); + +void vkUnmapMemory( + VkDevice device, + VkDeviceMemory memory); + +void vkFreeMemory( + VkDevice device, + VkDeviceMemory memory, + const VkAllocationCallbacks* pAllocator); + #include "../../margaret/vulkan_memory_claire.h" \ No newline at end of file