Compare commits

...

2 Commits

20 changed files with 1776 additions and 571 deletions

View File

@ -54,6 +54,7 @@ add_executable(codegen_l1_5 src/l1_5/anne/codegen.c)
#target_link_libraries(3_render_test -lwayland-client -lm -lvulkan -lxkbcommon)
#add_executable(l2t0_2 src/l2/tests/data_structures/t0_2.c) // todo: I will get back
add_executable(l2t0_3 src/l2/tests/data_structures/t0_3.c)
add_executable(l2t2 src/l2/tests/data_structures/t2.c)
#add_executable(l2t0 src/l2/tests/data_structures/t0.c)

View File

@ -60,7 +60,6 @@ out/l2/t0: src/l2/tests/data_structures/t0.c $(HEADERS_gen_l1_5)
.PHONY: run_l2_t0
run_l2_t0: out/l2/t0
mkdir -p src/l2/tests/data_structures/GRAPHS
cd src/l2/tests/data_structures && ../../../../out/l2/t0
out/l2/r0: src/l2/tests/r0/r0.c $(HEADERS_src_l2) $(l_wl_protocols)

View File

@ -10,7 +10,7 @@
#include "margaret/margaret_misc.h"
#include "marie/graphics_geom.h"
#include "liza.h"
#include "codegen_from_l1_5.h"
#include "embassy_l1_5.h"
#include "margaret/png_pixel_masses.h"
int main() {

View File

@ -10,7 +10,7 @@ void generate_l1_headers_for_l1_5() {
SpanU8 ns = cstr("embassy_l1_5");
generate_eve_span_company_for_primitive(l, ns, cstr("NamedVariableRecordRef"), false, true);
generate_eve_span_company_for_primitive(l, ns, cstr("NamedMethodSignatureRecordRef"), false, true);
generate_eve_span_company_for_primitive(l, ns, cstr("RBTreeNode"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("BufRBTreeNode"), true, false);
}
#endif

View File

@ -30,6 +30,9 @@ void generate_margaret_eve_for_vulkan_utils() {
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretBufferKindDescription"), false, true);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretOldBufferResizeRecord"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretResizeToNascentRecord"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretSubBufRelocationRequest"), true, false);
generate_eve_span_company_for_non_primitive_non_clonable(l, ns, cstr("VecMargaretSubBufRelocationRequest"), true, false);
generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
/* We won't need to clone this type, like, at all... It is actually clonable, but we just made
* it non-clonable */
@ -62,7 +65,8 @@ void generate_margaret_eve_for_vulkan_utils() {
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestFreeSubBuffer"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestFreeImage"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestResizeSubBuffer"), true, false);
generate_util_templ_inst_eve_header(l, ns, (util_templates_instantiation_options){
.T = cstr("MargaretMemAllocatorRequestResizeSubBuffer"), .t_primitive = true, .vec_extended = true});
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestAllocSubBuffer"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestAllocImage"), true, false);
generate_eve_span_company_for_non_primitive_non_clonable(l, ns,

View File

@ -38,6 +38,10 @@ void generate_headers_for_r0_r1_r2_r3() {
generate_eve_span_company_for_primitive(l, ns, cstr("I_FishNode"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("J_AlphaVertex"), true, false);
}
// mkdir_nofail("l1/eve/ds_test");
// { /* This structure is needed for testing purposes only */
// generate_eve_span_company_for_primitive(l, cstr("ds_test"), cstr("RefRBTreeNode_S64"), true, false);
// }
}
#endif

View File

@ -85,4 +85,35 @@ NODISCARD VecU8 prepend_spaces_to_SpanU8_lines(SpanU8 lines, int tabulation){
return res;
}
void generate_SOME_templ_inst_eve_header(SpanU8 layer, SpanU8 bonus_ns, VecU8 body, VecU8 name){
VecU8 text = VecU8_fmt("/* Automatically generated file. Don't edit it.\n"
"* Don't include it in more than one place */\n\n%v", body);
VecU8 nt_path = VecU8_fmt("%s/eve/%s/%v.h%c", layer, bonus_ns, name, 0);
write_whole_file_or_abort((const char*)nt_path.buf, VecU8_to_span(&text));
VecU8_drop(nt_path);
VecU8_drop(text);
}
void generate_SOME_templ_inst_guarded_header(SpanU8 layer, SpanU8 bonus_ns, VecU8 all_dependencies, VecU8 body, VecU8 name){
assert(layer.len > 1);
VecU8 path = VecU8_fmt("%s/%s%s%v.h", layer, bonus_ns, bonus_ns.len ? cstr("/") : cstr(""), name);
GeneratedHeader head = begin_header(VecU8_to_span(&path));
VecU8_drop(path);
VecU8_append_vec(&head.result, all_dependencies);
VecU8_append_span(&head.result, cstr("\n"));
VecU8_append_vec(&head.result, body);
finish_header(head);
}
/* Assumed we are at some_layer/bonus_ns/header.h header */
NODISCARD VecU8 codegen_include_relative_to_root(SpanU8 bonus_ns, SpanU8 abs_path){
VecU8 res = vcstr("#include \"../../");
int to_my_layer = get_number_of_parts_in_header_namespace(bonus_ns);
for (int i = 0; i < to_my_layer; i++)
VecU8_append_span(&res, cstr("../"));
VecU8_append_span(&res, abs_path);
VecU8_append_span(&res, cstr("\"\n"));
return res;
}
#endif

View File

@ -70,7 +70,7 @@ float pow2f(float x) {
}
bool U64_is_2pow(U64 n){
return n > 0 && (n & (n - 1) == 0);
return n > 0 && ((n & (n - 1)) == 0);
}
U8 U64_2pow_log(U64 n){

View File

@ -1,14 +1,21 @@
#ifndef prototype1_src_l1_5_anne_l1_5_templ_very_base_h
#define prototype1_src_l1_5_anne_l1_5_templ_very_base_h
#include "../codegen/rb_tree_set_map_template_inst.h"
#include "../codegen/buff_rbtree_set_map_template_inst.h"
#include "../codegen/rbtree_set_map_template_inst.h"
void generate_l1_5_template_instantiation_for_base_types(){
SpanU8 l = cstr("l1_5"), ns = cstr("");
generate_rb_tree_Set_templ_inst_guarded_header(l, ns,cstr("#include \"../l1/VecAndSpan_U64.h\""),
generate_buf_rbtree_Set_templ_inst_guarded_header(l, ns, cstr("#include \"../l1/VecAndSpan_U64.h\""),
(set_instantiation_op){.T = cstr("U64"), .t_integer = true});
generate_rb_tree_Set_templ_inst_guarded_header(l, ns, cstr("#include \"../l1/VecAndSpan_S64.h\""),
generate_buf_rbtree_Set_templ_inst_guarded_header(l, ns, cstr("#include \"../l1/VecAndSpan_S64.h\""),
(set_instantiation_op){.T = cstr("S64"), .t_integer = true});
// l1/core/int_primitives is included in l1_5/core/rb_tree_node.h, hence no additional dependencies needed
generate_rbtree_Set_templ_inst_guarded_header(l, ns, cstr(""), (set_instantiation_op){
.T = cstr("U64"), .t_integer = true }, true);
generate_rbtree_Set_templ_inst_guarded_header(l, ns, cstr(""), (set_instantiation_op){
.T = cstr("S64"), .t_integer = true }, true);
}
#endif

View File

@ -1,7 +1,7 @@
#ifndef prototype1_src_l1_5_anne_margaret_h
#define prototype1_src_l1_5_anne_margaret_h
#include "../codegen/rb_tree_set_map_template_inst.h"
#include "../codegen/buff_rbtree_set_map_template_inst.h"
void generate_l1_5_template_instantiations_for_margaret(){
SpanU8 l = cstr("l1_5"), ns = cstr("margaret");
@ -9,15 +9,14 @@ void generate_l1_5_template_instantiations_for_margaret(){
mkdir_nofail("l1_5/eve/margaret");
/* For MargaretMemAllocator */
generate_rb_tree_Set_templ_inst_eve_header(l, ns, (set_instantiation_op){
generate_buf_rbtree_Set_templ_inst_eve_header(l, ns, (set_instantiation_op){
.T = cstr("MargaretFreeMemSegment"),
.t_primitive = true,
.alternative_less = cstr("MargaretFreeMemSegment_less"),
.alternative_equal = cstr("MargaretFreeMemSegment_equal"),
.alternative_comp_set_name_embed = cstr("Len"),
.unconditional_equality = true,
});
generate_rb_tree_Set_templ_inst_eve_header(l, ns, (set_instantiation_op){
generate_buf_rbtree_Set_templ_inst_eve_header(l, ns, (set_instantiation_op){
.T = cstr("MargaretFreeMemSegment"),
.t_primitive = true,
/* comparison takes additional U8 parameter */
@ -25,16 +24,13 @@ void generate_l1_5_template_instantiations_for_margaret(){
.alternative_equal = cstr("MargaretFreeMemSegment_equal_resp_align"),
.alternative_comp_set_name_embed = cstr("LenRespAlign"),
.guest_data_T = cstr("U8"),
.unconditional_equality = true,
});
generate_rb_tree_Map_templ_inst_eve_header(l, ns, (map_instantiation_op){
.K = cstr("U64"), .k_integer = true, .V = cstr("MargaretMemoryOccupation"), /* MargaretMemoryOccupation is not primitive */
.unconditional_equality = true
});
generate_rb_tree_Map_templ_inst_eve_header(l, ns, (map_instantiation_op){
.K = cstr("U64"), .k_integer = true, .V = cstr("MargaretBufferOccupationSubBuffer"), .v_primitive = true,
.unconditional_equality = true
});
// generate_buf_rbtree_Map_templ_inst_eve_header(l, ns, (map_instantiation_op){
// .K = cstr("U64"), .k_integer = true, .V = cstr("MargaretMemoryOccupation"), /* MargaretMemoryOccupation is not primitive */
// });
// generate_buf_rbtree_Map_templ_inst_eve_header(l, ns, (map_instantiation_op){
// .K = cstr("U64"), .k_integer = true, .V = cstr("MargaretBufferOccupationSubBuffer"), .v_primitive = true,
// });
}
#endif

View File

@ -17,12 +17,6 @@ typedef struct {
* Leave empty if you don't need guest data (GT = void)
* GT must be primitive, or, even better, be integer */
SpanU8 guest_data_T;
/* If `unconditional_equality` is set, methods, that are needed to return value T
* or reference to T are not generated.
* I, alas, wrote support for `unconditional_equality = false` but it should be noted, that
* `unconditional_equality = false` it is absolutely useless */
bool unconditional_equality;
} set_instantiation_op;
void set_instantiation_op_fix(set_instantiation_op* self){
@ -40,8 +34,6 @@ void set_instantiation_op_fix(set_instantiation_op* self){
assert(self->alternative_comp_set_name_embed.len > 0);
assert(self->alternative_equal.len > 0 && self->alternative_less.len > 0);
}
if (self->t_integer && self->alternative_comp_set_name_embed.len == 0)
self->unconditional_equality = true;
}
/* We assume K and V are trivially movable */
@ -60,20 +52,21 @@ typedef struct {
SpanU8 alternative_comp_map_name_embed;
SpanU8 guest_data_T;
/* If `unconditional_equality` is set, methods, that are needed to return value (K, V)
* or reference to K and V are generated such that they return only V part. I don't plan to support
* `unconditional_equality=false`
*/
bool unconditional_equality;
} map_instantiation_op;
void map_instantiation_op_fix(map_instantiation_op* self){
assert(self->K.len > 0);
if (self->k_integer)
self->k_primitive = true;
if (self->k_primitive)
self->k_clonable = true;
assert(self->K.len > 0 && self->V.len > 0);
if (self->V.len == 0)
self->v_primitive = true;
if (self->v_integer)
self->v_primitive = true;
if (self->v_primitive)
self->v_clonable = true;
assert((self->alternative_less.len == 0 && self->alternative_equal.len == 0
&& self->alternative_comp_map_name_embed.len == 0
)||(
@ -83,10 +76,6 @@ void map_instantiation_op_fix(map_instantiation_op* self){
assert(self->alternative_comp_map_name_embed.len > 0);
assert(self->alternative_equal.len > 0 && self->alternative_less.len > 0);
}
if (self->k_integer && self->alternative_comp_map_name_embed.len == 0)
self->unconditional_equality = true;
if (!self->unconditional_equality)
abortf("map_instantiation_op_fix::unconditional_equality = false isn't supported\n");
}
#endif

View File

@ -3,99 +3,99 @@
#include "all_set_map_templ_util_inst.h"
SpanU8 codegen_rb_tree_map__key_of_cur_el(map_instantiation_op op){
SpanU8 codegen_buff_rbtree_map__key_of_cur_el(map_instantiation_op op){
return op.V.len > 0 ? cstr("self->el.buf[cur - 1].key") : cstr("self->el.buf[cur - 1]");
}
/* When key is given by value into some method of Buff_RBTreeSet */
NODISCARD VecU8 codegen_rb_tree_map__key_value_NOT_EQUAL_element(map_instantiation_op op){
NODISCARD VecU8 codegen_buff_rbtree_map__key_value_NOT_EQUAL_element(map_instantiation_op op){
if (op.guest_data_T.len > 0) {
assert(op.alternative_equal.len > 0);
if (op.k_integer)
return VecU8_fmt("!%s(key, %s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(&key, &%s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(key, %s, self->guest)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(&key, &%s, self->guest)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.alternative_equal.len > 0) {
if (op.k_integer)
return VecU8_fmt("!%s(key, %s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(&key, &%s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(key, %s)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(&key, &%s)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.k_integer)
return VecU8_fmt("key != %s", codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s_equal_%s(&key, &%s)", op.K, op.K, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("key != %s", codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("!%s_equal_%s(&key, &%s)", op.K, op.K, codegen_buff_rbtree_map__key_of_cur_el(op));
}
/* When key is given by value into some method of Buff_RBTreeSet */
NODISCARD VecU8 codegen_rb_tree_map__key_value_LESS_element(map_instantiation_op op){
NODISCARD VecU8 codegen_buff_rbtree_map__key_value_LESS_element(map_instantiation_op op){
if (op.guest_data_T.len > 0) {
assert(op.alternative_less.len > 0);
if (op.k_integer)
return VecU8_fmt("%s(key, %s, self->guest)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(&key, &%s, self->guest)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, %s, self->guest)", op.alternative_less, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s(&key, &%s, self->guest)", op.alternative_less, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.alternative_less.len > 0) {
if (op.k_integer)
return VecU8_fmt("%s(key, %s)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(&key, &%s)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, %s)", op.alternative_less, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s(&key, &%s)", op.alternative_less, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.k_integer)
return VecU8_fmt("key < %s", codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s_less_%s(&key, &%s)", op.K, op.K, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("key < %s", codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s_less_%s(&key, &%s)", op.K, op.K, codegen_buff_rbtree_map__key_of_cur_el(op));
}
/* When key is given by ref into some method of Buff_RBTreeSet
* Ofk when op.T is integer, argument is still taken by a value */
NODISCARD VecU8 codegen_rb_tree_map__key_ref_NOT_EQUAL_element(map_instantiation_op op){
NODISCARD VecU8 codegen_buff_rbtree_map__key_ref_NOT_EQUAL_element(map_instantiation_op op){
if (op.guest_data_T.len > 0) {
assert(op.alternative_equal.len > 0);
if (op.k_integer)
return VecU8_fmt("!%s(key, %s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(key, &%s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(key, %s, self->guest)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(key, &%s, self->guest)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.alternative_equal.len > 0) {
if (op.k_integer)
return VecU8_fmt("!%s(key, %s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(key, &%s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(key, %s)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("!%s(key, &%s)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.k_integer)
return VecU8_fmt("key != %s", codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("!%s_equal_%s(key, &%s)", op.K, op.K, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("key != %s", codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("!%s_equal_%s(key, &%s)", op.K, op.K, codegen_buff_rbtree_map__key_of_cur_el(op));
}
/* When key is given by a pointer into some method of Buff_RBTreeSet */
NODISCARD VecU8 codegen_rb_tree_map__key_ref_EQUAL_element(map_instantiation_op op){
NODISCARD VecU8 codegen_buff_rbtree_map__key_ref_EQUAL_element(map_instantiation_op op){
if (op.guest_data_T.len > 0) {
assert(op.alternative_equal.len > 0);
if (op.k_integer)
return VecU8_fmt("%s(key, %s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, &%s, self->guest)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, %s, self->guest)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, &%s, self->guest)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.alternative_equal.len > 0) {
if (op.k_integer)
return VecU8_fmt("%s(key, %s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, &%s)", op.alternative_equal, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, %s)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, &%s)", op.alternative_equal, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.k_integer)
return VecU8_fmt("key == %s", codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s_equal_%s(key, &%s)", op.K, op.K, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("key == %s", codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s_equal_%s(key, &%s)", op.K, op.K, codegen_buff_rbtree_map__key_of_cur_el(op));
}
/* When key is given by a pointer into some method of Buff_RBTreeSet */
NODISCARD VecU8 codegen_rb_tree_map__key_ref_LESS_element(map_instantiation_op op){
NODISCARD VecU8 codegen_buff_rbtree_map__key_ref_LESS_element(map_instantiation_op op){
if (op.guest_data_T.len > 0) {
assert(op.alternative_less.len > 0);
if (op.k_integer)
return VecU8_fmt("%s(key, %s, self->guest)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, &%s, self->guest)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, %s, self->guest)", op.alternative_less, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, &%s, self->guest)", op.alternative_less, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.alternative_less.len > 0) {
if (op.k_integer)
return VecU8_fmt("%s(key, %s)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, &%s)", op.alternative_less, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, %s)", op.alternative_less, codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s(key, &%s)", op.alternative_less, codegen_buff_rbtree_map__key_of_cur_el(op));
}
if (op.k_integer)
return VecU8_fmt("key < %s", codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("%s_less_%s(key, &%s)", op.K, op.K, codegen_rb_tree_map__key_of_cur_el(op));
return VecU8_fmt("key < %s", codegen_buff_rbtree_map__key_of_cur_el(op));
return VecU8_fmt("%s_less_%s(key, &%s)", op.K, op.K, codegen_buff_rbtree_map__key_of_cur_el(op));
}
@ -104,11 +104,11 @@ NODISCARD VecU8 codegen_rb_tree_map__key_ref_LESS_element(map_instantiation_op o
* integer is better than pointer to integer. (Though, notice that _pop family of methods don't exist for
* sets of integers
*/
NODISCARD VecU8 codegen_rb_tree_map__taking_ref_k_argument(map_instantiation_op op){
NODISCARD VecU8 codegen_buff_rbtree_map__taking_ref_k_argument(map_instantiation_op op){
return op.k_integer ? VecU8_from_span(op.K) : VecU8_fmt("const %s*", op.K);
}
NODISCARD VecU8 codegen_rb_tree_map__taking_t_argument(map_instantiation_op op){
NODISCARD VecU8 codegen_buff_rbtree_map__taking_t_argument(map_instantiation_op op){
return op.V.len > 0 ? VecU8_fmt("%s key, %s value", op.K, op.V) : VecU8_fmt("%s key", op.K);
}
@ -116,12 +116,12 @@ NODISCARD VecU8 codegen_rb_tree_map__taking_t_argument(map_instantiation_op op){
* set is either a set name or a map name. If we are instantiating set, TT is op.T from set options, if we are
* instantiating a map, TT is KVP{op.K}To{op.V} from map options
* */
void codegen_append_rb_tree_map__structure_and_simplest_methods(
void codegen_append_buff_rbtree_map__structure_and_simplest_methods(
VecU8* res, map_instantiation_op op, SpanU8 set, SpanU8 TT
){
VecU8_append_vec(res, VecU8_fmt(
"typedef struct {\n"
SPACE "VecRBTreeNode tree;\n"
SPACE "VecBufRBTreeNode tree;\n"
SPACE "U64 root;\n"
SPACE "Vec%s el;\n"
"%v"
@ -131,14 +131,14 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
if (op.guest_data_T.len > 0) {
VecU8_append_vec(res, VecU8_fmt(
"NODISCARD %s %s_new(%s guest) {\n" /* set, set, op.guest_data_T */
SPACE "return (%s){.tree = VecRBTreeNode_new_zeroinit(1), .root = 0, .el = Vec%s_new(), .guest = guest};\n" /* set, TT */
SPACE "return (%s){.tree = VecBufRBTreeNode_new_zeroinit(1), .root = 0, .el = Vec%s_new(), .guest = guest};\n" /* set, TT */
"}\n\n",
set, set, op.guest_data_T,
set, TT));
VecU8_append_vec(res, VecU8_fmt(
"NODISCARD %s %s_new_reserved(%s guest, size_t size) {\n" /* set, set, op.guest_data_T */
SPACE "return (%s){.tree = (VecRBTreeNode){\n" /* set */
SPACE SPACE ".buf = (RBTreeNode*)safe_calloc(size + 1, sizeof(RBTreeNode)), .len = 1, .capacity = size + 1},\n"
SPACE "return (%s){.tree = (VecBufRBTreeNode){\n" /* set */
SPACE SPACE ".buf = (BufRBTreeNode*)safe_calloc(size + 1, sizeof(BufRBTreeNode)), .len = 1, .capacity = size + 1},\n"
SPACE SPACE ".root = 0, .el = Vec%s_new_reserved(size), .guest = guest};\n" /* TT */
"}\n\n",
set, set, op.guest_data_T,
@ -146,14 +146,14 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
} else {
VecU8_append_vec(res, VecU8_fmt(
"NODISCARD %s %s_new() {\n" /* set, set */
SPACE "return (%s){.tree = VecRBTreeNode_new_zeroinit(1), .root = 0, .el = Vec%s_new()};\n" /* set, TT */
SPACE "return (%s){.tree = VecBufRBTreeNode_new_zeroinit(1), .root = 0, .el = Vec%s_new()};\n" /* set, TT */
"}\n\n",
set, set,
set, TT));
VecU8_append_vec(res, VecU8_fmt(
"NODISCARD %s %s_new_reserved(size_t size) {\n" /* set, set */
SPACE "return (%s){.tree = (VecRBTreeNode){\n"
SPACE SPACE ".buf = (RBTreeNode*)safe_calloc(size + 1, sizeof(RBTreeNode)), .len = 1, .capacity = size + 1},\n"
SPACE "return (%s){.tree = (VecBufRBTreeNode){\n"
SPACE SPACE ".buf = (BufRBTreeNode*)safe_calloc(size + 1, sizeof(BufRBTreeNode)), .len = 1, .capacity = size + 1},\n"
SPACE SPACE ".root = 0, .el = Vec%s_new_reserved(size)};\n" /* set, TT */
"}\n\n",
set, set,
@ -162,12 +162,19 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
VecU8_append_vec(res, VecU8_fmt(
"void %s_drop(%s self) {\n" /* set, set */
SPACE "VecRBTreeNode_drop(self.tree);\n"
SPACE "VecBufRBTreeNode_drop(self.tree);\n"
SPACE "Vec%s_drop(self.el);\n" /* TT */
"}\n\n", set, set, TT));
VecU8_append_vec(res, VecU8_fmt(
"U64 %s_find(const %s* self, %v key) {\n" /* set, set, taking_ref_t_argument */
"void %s_sink(%s* self) {\n" /* set, set */
SPACE "self->tree.len = 1;\n"
SPACE "self->tree.buf[0] = (BufRBTreeNode){0};\n"
SPACE "Vec%s_sink(&self->el, 0);\n" /* TT */
"}\n\n", set, set, TT));
VecU8_append_vec(res, VecU8_fmt(
"U64 %s_find(const %s* self, %v key) {\n" /* set, set, taking_ref_k_argument */
SPACE "U64 cur = self->root;\n"
SPACE "while (cur != 0 && %v) {\n" /* key reference not equal cur element */
SPACE SPACE "if (%v) {\n" /* key reference less than cur element */
@ -178,16 +185,16 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
SPACE "}\n"
SPACE "return cur;\n"
"}\n\n",
set, set, codegen_rb_tree_map__taking_ref_k_argument(op),
codegen_rb_tree_map__key_ref_NOT_EQUAL_element(op),
codegen_rb_tree_map__key_ref_LESS_element(op)
set, set, codegen_buff_rbtree_map__taking_ref_k_argument(op),
codegen_buff_rbtree_map__key_ref_NOT_EQUAL_element(op),
codegen_buff_rbtree_map__key_ref_LESS_element(op)
));
if (op.k_clonable && op.v_clonable) {
VecU8_append_vec(res, VecU8_fmt(
"NODISCARD %s %s_clone(const %s* self){\n" /* set, set, set */
SPACE "return (%s){.tree = VecRBTreeNode_clone(&self->tree), .root = self->root,\n" /* set */
SPACE "return (%s){.tree = VecBufRBTreeNode_clone(&self->tree), .root = self->root,\n" /* set */
SPACE SPACE ".el = Vec%s_clone(&self->el)%s};\n" /* TT, whether to clone guest or no */
"}\n\n",
set, set, set,
@ -195,11 +202,12 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
TT, op.guest_data_T.len > 0 ? cstr(", .guest = self->guest") : cstr("")));
}
// todo: move to common code
VecU8_append_vec(res, VecU8_fmt(
"U64 %s_find_next(const %s* self, U64 x){\n"
SPACE "assert(x != 0 && x < self->tree.len);\n"
SPACE "if (self->tree.buf[x].right != 0)\n"
SPACE SPACE "return RBTree_minimum_in_subtree(self->tree.buf, self->tree.buf[x].right);\n"
SPACE SPACE "return BufRBTree_minimum_in_subtree(self->tree.buf, self->tree.buf[x].right);\n"
SPACE "while (true) {\n"
SPACE SPACE "U64 p = self->tree.buf[x].parent;\n"
SPACE SPACE "if (p == 0)\n"
@ -210,11 +218,12 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
SPACE "}\n"
"}\n\n", set, set));
// todo: move to comon code
VecU8_append_vec(res, VecU8_fmt(
"U64 %s_find_prev(const %s* self, U64 x){\n"
SPACE "assert(x != 0 && x < self->tree.len);\n"
SPACE "if (self->tree.buf[x].left != 0)\n"
SPACE SPACE "return RBTree_maximum_in_subtree(self->tree.buf, self->tree.buf[x].left);\n"
SPACE SPACE "return BufRBTree_maximum_in_subtree(self->tree.buf, self->tree.buf[x].left);\n"
SPACE "while (true) {\n"
SPACE SPACE "U64 p = self->tree.buf[x].parent;\n"
SPACE SPACE "if (p == 0)\n"
@ -227,12 +236,12 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
VecU8_append_vec(res, VecU8_fmt(
"U64 %s_find_min(const %s* self) {\n"
SPACE "return self->root != 0 ? RBTree_minimum_in_subtree(self->tree.buf, self->root) : 0;\n"
SPACE "return self->root != 0 ? BufRBTree_minimum_in_subtree(self->tree.buf, self->root) : 0;\n"
"}\n\n", set, set));
VecU8_append_vec(res, VecU8_fmt(
"U64 %s_find_max(const %s* self) {\n"
SPACE "return self->root != 0 ? RBTree_maximum_in_subtree(self->tree.buf, self->root) : 0;\n"
SPACE "return self->root != 0 ? BufRBTree_maximum_in_subtree(self->tree.buf, self->root) : 0;\n"
"}\n\n", set, set));
VecU8_append_vec(res, VecU8_fmt(
@ -256,9 +265,9 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
SPACE "}\n"
SPACE "return last_less;\n"
"}\n\n",
set, set, codegen_rb_tree_map__taking_ref_k_argument(op),
codegen_rb_tree_map__key_ref_EQUAL_element(op),
codegen_rb_tree_map__key_ref_LESS_element(op)
set, set, codegen_buff_rbtree_map__taking_ref_k_argument(op),
codegen_buff_rbtree_map__key_ref_EQUAL_element(op),
codegen_buff_rbtree_map__key_ref_LESS_element(op)
));
VecU8_append_vec(res, VecU8_fmt(
@ -277,9 +286,9 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
SPACE "}\n"
SPACE "return last_less;\n"
"}\n\n",
set, set, codegen_rb_tree_map__taking_ref_k_argument(op),
codegen_rb_tree_map__key_ref_EQUAL_element(op),
codegen_rb_tree_map__key_ref_LESS_element(op)
set, set, codegen_buff_rbtree_map__taking_ref_k_argument(op),
codegen_buff_rbtree_map__key_ref_EQUAL_element(op),
codegen_buff_rbtree_map__key_ref_LESS_element(op)
));
VecU8_append_vec(res, VecU8_fmt(
@ -303,9 +312,9 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
SPACE "}\n"
SPACE "return last_grtr;\n"
"}\n\n",
set, set, codegen_rb_tree_map__taking_ref_k_argument(op),
codegen_rb_tree_map__key_ref_EQUAL_element(op),
codegen_rb_tree_map__key_ref_LESS_element(op)
set, set, codegen_buff_rbtree_map__taking_ref_k_argument(op),
codegen_buff_rbtree_map__key_ref_EQUAL_element(op),
codegen_buff_rbtree_map__key_ref_LESS_element(op)
));
@ -325,15 +334,15 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
SPACE "}\n"
SPACE "return last_grtr;\n"
"}\n\n",
set, set, codegen_rb_tree_map__taking_ref_k_argument(op),
codegen_rb_tree_map__key_ref_EQUAL_element(op),
codegen_rb_tree_map__key_ref_LESS_element(op)
set, set, codegen_buff_rbtree_map__taking_ref_k_argument(op),
codegen_buff_rbtree_map__key_ref_EQUAL_element(op),
codegen_buff_rbtree_map__key_ref_LESS_element(op)
));
}
/* Generates methods _insert() _pop_substitute() _erase_substitute() for SetT
* Takes ownership of strings Tc, Fc */
void codegen_append_rb_tree_map__insert_kind_method(
// todo: no need for a separate method. Just write _try_insert() and derive all the shit from it
void codegen_append_buf_rbtree_map__insert_kind_method(
VecU8* res, map_instantiation_op op, SpanU8 set, SpanU8 method_name, VecU8 RT, VecU8 Tc, VecU8 Fc
){
VecU8 Tc_root = prepend_spaces_to_SpanU8_lines(VecU8_to_span(&Tc), 2);
@ -343,6 +352,8 @@ void codegen_append_rb_tree_map__insert_kind_method(
VecU8_drop(Tc);
VecU8_drop(Fc);
// todo: fix it. No buffered rbttrees here. Only my pure poijter basa[ks tree
VecU8 line_that_appends_new_el_to_el_vec = op.V.len > 0 ?
VecU8_fmt("VecKVP%sTo%s_append(&self->el, (KVP%sTo%s){.key = key, .value = value});", op.K, op.V, op.K, op.V) :
VecU8_fmt("Vec%s_append(&self->el, key);", op.K);
@ -351,7 +362,7 @@ void codegen_append_rb_tree_map__insert_kind_method(
"%v %s_%s(%s* self, %v) {\n" /* RT, set, method_name, set, taking_t_argument */
SPACE "if (self->root == 0) {\n"
SPACE SPACE "assert(self->tree.len == 1);\n"
SPACE SPACE "VecRBTreeNode_append(&self->tree, (RBTreeNode){.color = RBTree_black});\n"
SPACE SPACE "VecBufRBTreeNode_append(&self->tree, (BufRBTreeNode){.color = RBTree_black});\n"
SPACE SPACE "%s\n" /* line_that_appends_new_el_to_el_vec */
SPACE SPACE "self->root = 1;\n"
"%v" /* Tc_root */
@ -365,9 +376,9 @@ void codegen_append_rb_tree_map__insert_kind_method(
SPACE SPACE SPACE "} else { \n"
/* We are inserting to the left of cur */
SPACE SPACE SPACE SPACE "U64 n = self->tree.len;\n"
SPACE SPACE SPACE SPACE "VecRBTreeNode_append(&self->tree, (RBTreeNode){.parent = cur, .color = RBTree_red});\n"
SPACE SPACE SPACE SPACE "VecBufRBTreeNode_append(&self->tree, (BufRBTreeNode){.parent = cur, .color = RBTree_red});\n"
SPACE SPACE SPACE SPACE "self->tree.buf[cur].left = n;\n"
SPACE SPACE SPACE SPACE "RBTree_fix_after_insert(self->tree.buf, &self->root, n);\n"
SPACE SPACE SPACE SPACE "BufRBTree_fix_after_insert(self->tree.buf, &self->root, n);\n"
SPACE SPACE SPACE SPACE "%s\n" /* line_that_appends_new_el_to_el_vec */
"%v" /* Tc_on_left */
/* Should have returned by now in Tc*/
@ -378,9 +389,9 @@ void codegen_append_rb_tree_map__insert_kind_method(
SPACE SPACE SPACE "} else {\n"
/* We are inserting to the right of cur */
SPACE SPACE SPACE SPACE "U64 n = self->tree.len;\n"
SPACE SPACE SPACE SPACE "VecRBTreeNode_append(&self->tree, (RBTreeNode){.parent = cur, .color = RBTree_red});\n"
SPACE SPACE SPACE SPACE "VecBufRBTreeNode_append(&self->tree, (BufRBTreeNode){.parent = cur, .color = RBTree_red});\n"
SPACE SPACE SPACE SPACE "self->tree.buf[cur].right = n;\n"
SPACE SPACE SPACE SPACE "RBTree_fix_after_insert(self->tree.buf, &self->root, n);\n"
SPACE SPACE SPACE SPACE "BufRBTree_fix_after_insert(self->tree.buf, &self->root, n);\n"
SPACE SPACE SPACE SPACE "%s\n" /* line_that_appends_new_el_to_el_vec */
"%v" /* Tc_on_right */
/* Should have returned by now in Tc*/
@ -390,11 +401,11 @@ void codegen_append_rb_tree_map__insert_kind_method(
"%v" /* Fc_exists */
/* Should have returned by now in Tc*/
"}\n\n",
RT, set, method_name, set, codegen_rb_tree_map__taking_t_argument(op),
RT, set, method_name, set, codegen_buff_rbtree_map__taking_t_argument(op),
VecU8_to_span(&line_that_appends_new_el_to_el_vec), // !!
Tc_root,
codegen_rb_tree_map__key_value_NOT_EQUAL_element(op),
codegen_rb_tree_map__key_value_LESS_element(op),
codegen_buff_rbtree_map__key_value_NOT_EQUAL_element(op),
codegen_buff_rbtree_map__key_value_LESS_element(op),
VecU8_to_span(&line_that_appends_new_el_to_el_vec),
Tc_on_left,
VecU8_to_span(&line_that_appends_new_el_to_el_vec),
@ -405,12 +416,13 @@ void codegen_append_rb_tree_map__insert_kind_method(
VecU8_drop(line_that_appends_new_el_to_el_vec);
}
void codegen_append_rb_tree_map__method_empty_index_erase(VecU8* res, SpanU8 set){
// todo: no need for a separate function. Do just like in normal RBTree
void codegen_append_buff_rbtree_map__method_empty_index_erase(VecU8* res, SpanU8 set){
VecU8_append_vec(res, VecU8_fmt(
"/* UNSAFE. Use when you dropped the symbol that is about to be deleted */\n"
"void %s_empty_index_erase(%s* self, U64 z) {\n" /* set, set */
SPACE "assert(z != 0 && z < self->tree.len);\n"
SPACE "U64 y = (self->tree.buf[z].left == 0 || self->tree.buf[z].right == 0) ? z : RBTree_minimum_in_subtree(self->tree.buf, self->tree.buf[z].right);\n"
SPACE "U64 y = (self->tree.buf[z].left == 0 || self->tree.buf[z].right == 0) ? z : BufRBTree_minimum_in_subtree(self->tree.buf, self->tree.buf[z].right);\n"
SPACE "U64 x = self->tree.buf[y].left != 0 ? self->tree.buf[y].left : self->tree.buf[y].right;\n"
SPACE "assert(x != y && x != z);\n"
SPACE "U64 x_adopter = self->tree.buf[y].parent;\n"
@ -423,13 +435,13 @@ void codegen_append_rb_tree_map__method_empty_index_erase(VecU8* res, SpanU8 set
SPACE SPACE "self->tree.buf[x_adopter].right = x;\n"
SPACE "RBTreeColor y_org_clr = self->tree.buf[y].color;\n"
SPACE "if (z != y) {\n"
SPACE SPACE "RBTree_steal_neighbours(self->tree.buf, &self->root, z, y);\n"
SPACE SPACE "BufRBTree_steal_neighbours(self->tree.buf, &self->root, z, y);\n"
SPACE SPACE "if (x_adopter == z)\n"
SPACE SPACE SPACE "x_adopter = y;\n"
SPACE "}\n"
SPACE "U64 L = self->el.len;\n"
SPACE "if (L != z) {\n"
SPACE SPACE "RBTree_steal_neighbours(self->tree.buf, &self->root, L, z);\n"
SPACE SPACE "BufRBTree_steal_neighbours(self->tree.buf, &self->root, L, z);\n"
SPACE SPACE "self->el.buf[z-1] = self->el.buf[L-1];\n"
SPACE SPACE "if (L == x)\n"
SPACE SPACE SPACE "x = z;\n"
@ -440,12 +452,13 @@ void codegen_append_rb_tree_map__method_empty_index_erase(VecU8* res, SpanU8 set
SPACE "self->tree.len--;\n"
SPACE "self->el.len--;\n"
SPACE "if (y_org_clr == RBTree_black)\n"
SPACE SPACE "RBTree_fix_after_delete(self->tree.buf, &self->root, x);\n"
SPACE SPACE "BufRBTree_fix_after_delete(self->tree.buf, &self->root, x);\n"
"}\n\n",
set, set));
}
void codegen_append_rb_tree_map__erase_kind_method(
// todo: no need for a separate method.
void codegen_append_buff_rbtree_map__erase_kind_method(
VecU8* res, map_instantiation_op op, SpanU8 set, SpanU8 method_name, VecU8 RT,
VecU8 Fc, VecU8 Tc_cur_available, VecU8 Tc_returning
){
@ -474,68 +487,39 @@ void codegen_append_rb_tree_map__erase_kind_method(
SPACE "%s_empty_index_erase(self, cur);\n" /* set */
"%v" /* ret_found_case */
"}\n\n",
RT, set, method_name, set, codegen_rb_tree_map__taking_ref_k_argument(op),
RT, set, method_name, set, codegen_buff_rbtree_map__taking_ref_k_argument(op),
not_found_case,
codegen_rb_tree_map__key_ref_EQUAL_element(op),
codegen_rb_tree_map__key_ref_LESS_element(op),
codegen_buff_rbtree_map__key_ref_EQUAL_element(op),
codegen_buff_rbtree_map__key_ref_LESS_element(op),
saving_prev,
set,
ret_found_case));
}
/* When method returns constant pointer to found key (wrapped in Option) we will use this type
* Of course, it can turn out that it is not generated. So be careful and generate it by yourself
*/
NODISCARD VecU8 codegen_rb_tree_map__option_returned_ref_t(map_instantiation_op op, bool mut){
/* Constant pointer to an integer is an integer */
// Because we don't generate such methods if V is void and we have unconditional_equality
assert(!op.unconditional_equality || op.V.len > 0);
// Because we don't support it
assert(!(op.V.len > 0) || op.unconditional_equality);
// Key can't be mutable
assert(!mut || op.V.len > 0);
if (op.V.len > 0) {
if (op.v_integer)
return VecU8_fmt("Option%s", op.V);
return mut ? VecU8_fmt("OptionRefMut%s", op.V) : VecU8_fmt("OptionRef%s", op.V);
}
return op.k_integer ? VecU8_fmt("Option%s", op.K) : VecU8_fmt("OptionRef%s", op.K);
NODISCARD VecU8 codegen_buff_rbtree_map__option_returned_ref_v(map_instantiation_op op, bool mut){
assert(op.V.len > 0);
if (op.v_integer)
return VecU8_fmt("Option%s", op.V);
return mut ? VecU8_fmt("OptionRefMut%s", op.V) : VecU8_fmt("OptionRef%s", op.V);
}
NODISCARD VecU8 codegen_rb_tree_map__some_ref_t(map_instantiation_op op, bool mut){
assert(!op.unconditional_equality || op.V.len > 0);
assert(!(op.V.len > 0) || op.unconditional_equality);
assert(!mut || op.V.len > 0);
if (op.V.len > 0) {
if (op.v_integer)
return VecU8_fmt("Some_%s(self->el.buf[cur - 1].value)", op.V);
if (mut)
return VecU8_fmt("Some_RefMut%s(&self->el.buf[cur - 1].value)", op.V);
return VecU8_fmt("Some_Ref%s(&self->el.buf[cur - 1].value)", op.V);
}
if (op.k_integer)
return VecU8_fmt("Some_%s(self->el.buf[cur - 1])", op.K);
return VecU8_fmt("Some_Ref%s(&self->el.buf[cur - 1])", op.K);
NODISCARD VecU8 codegen_buff_rbtree_map__some_ref_v(map_instantiation_op op, bool mut){
assert(op.V.len > 0);
if (op.v_integer)
return VecU8_fmt("Some_%s(self->el.buf[cur - 1].value)", op.V);
if (mut)
return VecU8_fmt("Some_RefMut%s(&self->el.buf[cur - 1].value)", op.V);
return VecU8_fmt("Some_Ref%s(&self->el.buf[cur - 1].value)", op.V);
}
NODISCARD VecU8 codegen_rb_tree_map__none_ref_t(map_instantiation_op op, bool mut){
assert(!op.unconditional_equality || op.V.len > 0);
assert(!(op.V.len > 0) || op.unconditional_equality);
assert(!mut || op.V.len > 0);
if (op.V.len > 0) {
if (op.v_integer)
return VecU8_fmt("None_%s()", op.V);
return mut ? VecU8_fmt("None_RefMut%s()", op.V) : VecU8_fmt("None_Ref%s()", op.V) ;
}
return op.k_integer ? VecU8_fmt("None_%s()", op.K) : VecU8_fmt("None_Ref%s()", op.K);
NODISCARD VecU8 codegen_buff_rbtree_map__none_ref_v(map_instantiation_op op, bool mut){
assert(op.V.len > 0);
if (op.v_integer)
return VecU8_fmt("None_%s()", op.V);
return mut ? VecU8_fmt("None_RefMut%s()", op.V) : VecU8_fmt("None_Ref%s()", op.V) ;
}
/* Implementing it for a set was the biggest mistake of my day */
void codegen_append_rb_tree_map__method_at(VecU8* res, map_instantiation_op op, SpanU8 set, bool mut){
void codegen_append_buff_rbtree_map__method_at(VecU8* res, map_instantiation_op op, SpanU8 set, bool mut){
VecU8_append_vec(res, VecU8_fmt(
"%v %s_%s(%s%s* self, %v key) {\n" /* option_returned_ref_t, set, mat/at, e/const, set, taking_ref_t_argument */
SPACE "U64 cur = self->root;\n"
@ -550,17 +534,17 @@ void codegen_append_rb_tree_map__method_at(VecU8* res, map_instantiation_op op,
SPACE "}\n"
SPACE "return %v;\n" /* none_ref_t */
"}\n\n",
codegen_rb_tree_map__option_returned_ref_t(op, mut), set, mut ? cstr("mat") : cstr("at"),
mut ? cstr("") : cstr("const "), set, codegen_rb_tree_map__taking_ref_k_argument(op),
codegen_buff_rbtree_map__option_returned_ref_v(op, mut), set, mut ? cstr("mat") : cstr("at"),
mut ? cstr("") : cstr("const "), set, codegen_buff_rbtree_map__taking_ref_k_argument(op),
codegen_rb_tree_map__key_ref_EQUAL_element(op),
codegen_rb_tree_map__some_ref_t(op, mut),
codegen_rb_tree_map__key_ref_LESS_element(op),
codegen_rb_tree_map__none_ref_t(op, mut)
codegen_buff_rbtree_map__key_ref_EQUAL_element(op),
codegen_buff_rbtree_map__some_ref_v(op, mut),
codegen_buff_rbtree_map__key_ref_LESS_element(op),
codegen_buff_rbtree_map__none_ref_v(op, mut)
));
}
NODISCARD VecU8 get_name_of_rb_tree_set_structure(set_instantiation_op op){
NODISCARD VecU8 get_name_of_buf_rbtree_set_structure(set_instantiation_op op){
if (op.alternative_comp_set_name_embed.len)
return VecU8_fmt("BuffRBTreeBy%s_Set%s", op.alternative_comp_set_name_embed, op.T);
return VecU8_fmt("BuffRBTree_Set%s", op.T);
@ -569,10 +553,10 @@ NODISCARD VecU8 get_name_of_rb_tree_set_structure(set_instantiation_op op){
/* src/l1_5/core/rb_tree_node.h is a dependency of all instantiations of rb_tree_set template
* Don't forget to include them
* */
NODISCARD VecU8 generate_rb_tree_Set_template_instantiation(set_instantiation_op op){
NODISCARD VecU8 generate_buf_rbtree_Set_template_instantiation(set_instantiation_op op){
set_instantiation_op_fix(&op);
VecU8 res = VecU8_new();
VecU8 g_set = get_name_of_rb_tree_set_structure(op);
VecU8 g_set = get_name_of_buf_rbtree_set_structure(op);
SpanU8 set = VecU8_to_span(&g_set);
map_instantiation_op map_op = {.K = op.T,
@ -580,14 +564,13 @@ NODISCARD VecU8 generate_rb_tree_Set_template_instantiation(set_instantiation_op
.V = cstr(""), .v_primitive = true, .v_clonable = true,
.alternative_equal = op.alternative_equal, .alternative_less = op.alternative_less,
.alternative_comp_map_name_embed = op.alternative_comp_set_name_embed, .guest_data_T = op.guest_data_T,
.unconditional_equality = op.unconditional_equality
};
codegen_append_rb_tree_map__structure_and_simplest_methods(&res, map_op, set, op.T);
codegen_append_buff_rbtree_map__structure_and_simplest_methods(&res, map_op, set, op.T);
/* Method _insert() does not try to replace the existing element with equal key,
* it returns true if insertion was done, false if collision happened and key was not inserted */
codegen_append_rb_tree_map__insert_kind_method(&res, map_op, set, cstr("insert"), vcstr("bool"),
codegen_append_buf_rbtree_map__insert_kind_method(&res, map_op, set, cstr("insert"), vcstr("bool"),
vcstr("return true;\n"),
op.t_primitive ?
vcstr("return false;\n") :
@ -596,48 +579,13 @@ NODISCARD VecU8 generate_rb_tree_Set_template_instantiation(set_instantiation_op
"return false;\n",
op.T));
if (!op.unconditional_equality) {
/* Method _erase_substitute() is a more bald version of _insert() method. It will substitute
* previous element with equal key if it was found. It still returns true if no conflict has happened, though */
codegen_append_rb_tree_map__insert_kind_method(&res, map_op, set, cstr("erase_substitute"), vcstr("bool"),
vcstr("return true;\n"),
op.t_primitive ?
vcstr("return false;\n") :
VecU8_fmt(
"%s_drop(self->el.buf[cur - 1]);\n"
"self->el.buf[cur - 1] = key;\n"
"return false;\n",
op.T));
codegen_append_buff_rbtree_map__method_empty_index_erase(&res, set);
/* Method _pop_substitute() is just like _erase_substitute(), but it returns a previous key
* that was overthrown after collision. Wrapped in option, ofcourse */
codegen_append_rb_tree_map__insert_kind_method(&res, map_op, set, cstr("pop_substitute"),
VecU8_fmt("Option%s", op.T),
VecU8_fmt("return None_%s();\n", op.T),
VecU8_fmt(
"%s saved = self->el.buf[cur - 1];\n" /* op.T */
"self->el.buf[cur - 1] = key;\n"
"return Some_%s(saved);", /* op.V */
op.T, op.T));
}
codegen_append_rb_tree_map__method_empty_index_erase(&res, set);
codegen_append_rb_tree_map__erase_kind_method(&res, map_op, set, cstr("erase"), vcstr("bool"),
codegen_append_buff_rbtree_map__erase_kind_method(&res, map_op, set, cstr("erase"), vcstr("bool"),
vcstr("return false;\n"),
op.t_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1]);\n", op.T),
vcstr("return true;\n"));
if (!op.unconditional_equality) {
codegen_append_rb_tree_map__erase_kind_method(&res, map_op, set, cstr("pop"),
VecU8_fmt("Option%s", op.T),
VecU8_fmt("return None_%s();\n", op.T),
VecU8_fmt("%s saved = self->el.buf[cur - 1];\n", op.T),
VecU8_fmt("return Some_%s(saved);\n", op.T));
codegen_append_rb_tree_map__method_at(&res, map_op, set, false);
}
VecU8_append_vec(&res, VecU8_fmt(
"const %s* %s_at_iter(const %s* self, U64 it) {\n" /* op.T, set, set */
SPACE "assert(0 < it && it < self->tree.len);\n"
@ -650,38 +598,23 @@ NODISCARD VecU8 generate_rb_tree_Set_template_instantiation(set_instantiation_op
}
void generate_rb_tree_Set_templ_inst_eve_header(SpanU8 layer, SpanU8 bonus_ns, set_instantiation_op op) {
VecU8 text = VecU8_from_cstr("/* Automatically generated file. Do not edit it.\n"
" * Do not include it in more than one place */\n\n");
VecU8_append_vec(&text, generate_rb_tree_Set_template_instantiation(op));
VecU8 nt_path = VecU8_fmt("%s/eve/%s/%v.h%c", layer, bonus_ns, get_name_of_rb_tree_set_structure(op), 0);
write_whole_file_or_abort((const char*)nt_path.buf, VecU8_to_span(&text));
VecU8_drop(nt_path);
VecU8_drop(text);
void generate_buf_rbtree_Set_templ_inst_eve_header(SpanU8 layer, SpanU8 bonus_ns, set_instantiation_op op) {
generate_SOME_templ_inst_eve_header(layer, bonus_ns,
generate_buf_rbtree_Set_template_instantiation(op), get_name_of_buf_rbtree_set_structure(op));
}
void generate_rb_tree_Set_templ_inst_guarded_header(
void generate_buf_rbtree_Set_templ_inst_guarded_header(
SpanU8 layer, SpanU8 bonus_ns, SpanU8 dependencies, set_instantiation_op op
){
assert(layer.len > 1);
VecU8 path = VecU8_fmt("%s/%s%s%v.h", layer, bonus_ns, bonus_ns.len ? cstr("/") : cstr(""),
get_name_of_rb_tree_set_structure(op));
GeneratedHeader head = begin_header(VecU8_to_span(&path));
VecU8_drop(path);
VecU8_append_span(&head.result, cstr("#include \"../../"));
int to_my_layer = get_number_of_parts_in_header_namespace(bonus_ns);
for (int i = 0; i < to_my_layer; i++)
VecU8_append_span(&head.result, cstr("../"));
VecU8_append_span(&head.result, cstr("src/l1_5/core/rb_tree_node.h\"\n"));
VecU8_append_span(&head.result, dependencies);
VecU8_append_span(&head.result, cstr("\n\n"));
VecU8_append_vec(&head.result, generate_rb_tree_Set_template_instantiation(op));
finish_header(head);
VecU8 all_dependencies = VecU8_fmt("%v%s",
codegen_include_relative_to_root(bonus_ns, cstr("src/l1_5/core/buff_rb_tree_node.h")), dependencies);
generate_SOME_templ_inst_guarded_header(layer, bonus_ns, all_dependencies,
generate_buf_rbtree_Set_template_instantiation(op), get_name_of_buf_rbtree_set_structure(op));
}
/* ========= Now we add Map<K, V> into the picture ======== */
void codegen_append_rb_tree_map__method_at_iter(VecU8* res, map_instantiation_op op, SpanU8 set, bool mut){
void codegen_append_buff_rbtree_map__method_at_iter(VecU8* res, map_instantiation_op op, SpanU8 set, bool mut){
assert(op.V.len > 0);
VecU8_append_vec(res, VecU8_fmt(
"void %s_%s(%s%s* self, U64 it, %v* ret_key, %v* ret_value) {\n" /* set, method name, self access modifier, set, key ret ptr, value ret ptr */
@ -696,30 +629,31 @@ void codegen_append_rb_tree_map__method_at_iter(VecU8* res, map_instantiation_op
op.k_integer ? cstr("") : cstr("&"), op.v_integer ? cstr("") : cstr("&")));
}
NODISCARD VecU8 get_name_of_rb_tree_map_structure(map_instantiation_op op){
NODISCARD VecU8 get_name_of_buf_rbtree_map_structure(map_instantiation_op op){
if (op.alternative_comp_map_name_embed.len)
return VecU8_fmt("BuffRBTreeBy%s_Map%sTo%s", op.alternative_comp_map_name_embed, op.K, op.V);
return VecU8_fmt("BuffRBTree_Map%sTo%s", op.K, op.V);
}
NODISCARD VecU8 generate_rb_tree_Map_template_instantiation(map_instantiation_op op){
NODISCARD VecU8 generate_buf_rbtree_Map_template_instantiation(map_instantiation_op op){
assert(op.V.len > 0);
map_instantiation_op_fix(&op);
VecU8 res = VecU8_new();
VecU8 map_g = get_name_of_rb_tree_map_structure(op);
VecU8 map_g = get_name_of_buf_rbtree_map_structure(op);
SpanU8 map = VecU8_to_span(&map_g);
VecU8 kvp_g = VecU8_fmt("KVP%sTo%s", op.K, op.V);
codegen_append_rb_tree_map__structure_and_simplest_methods(&res, op, map, VecU8_to_span(&kvp_g));
codegen_append_buff_rbtree_map__structure_and_simplest_methods(&res, op, map, VecU8_to_span(&kvp_g));
VecU8_drop(kvp_g);
codegen_append_rb_tree_map__insert_kind_method(&res, op, map, cstr("insert"), vcstr("bool"),
codegen_append_buf_rbtree_map__insert_kind_method(&res, op, map, cstr("insert"), vcstr("bool"),
vcstr("return true;\n"),
VecU8_fmt("%v%v" "return false;\n",
op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(key);\n", op.K),
op.v_primitive ? vcstr("") : VecU8_fmt("%s_drop(value);\n", op.V)));
codegen_append_rb_tree_map__insert_kind_method(&res, op, map, cstr("erase_substitute"), vcstr("bool"),
codegen_append_buf_rbtree_map__insert_kind_method(&res, op, map, cstr("erase_substitute"), vcstr("bool"),
vcstr("return true;\n"),
VecU8_fmt("%v%v"
"self->el.buf[cur - 1].key = key;\n"
@ -729,7 +663,7 @@ NODISCARD VecU8 generate_rb_tree_Map_template_instantiation(map_instantiation_op
op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].value);\n", op.V)
));
codegen_append_rb_tree_map__insert_kind_method(&res, op, map, cstr("pop_substitute"),
codegen_append_buf_rbtree_map__insert_kind_method(&res, op, map, cstr("pop_substitute"),
VecU8_fmt("Option%s", op.V),
VecU8_fmt("return None_%s();\n", op.V),
VecU8_fmt(
@ -741,16 +675,16 @@ NODISCARD VecU8 generate_rb_tree_Map_template_instantiation(map_instantiation_op
op.V, op.V));
/* Erasing time!!!! */
codegen_append_rb_tree_map__method_empty_index_erase(&res, map);
codegen_append_buff_rbtree_map__method_empty_index_erase(&res, map);
codegen_append_rb_tree_map__erase_kind_method(&res, op, map, cstr("erase"), vcstr("bool"),
codegen_append_buff_rbtree_map__erase_kind_method(&res, op, map, cstr("erase"), vcstr("bool"),
vcstr("return false;\n"),
VecU8_fmt("%v%v",
op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].key);\n", op.K),
op.v_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].value);\n", op.V)),
vcstr("return true;\n"));
codegen_append_rb_tree_map__erase_kind_method(&res, op, map, cstr("pop"),
codegen_append_buff_rbtree_map__erase_kind_method(&res, op, map, cstr("pop"),
VecU8_fmt("Option%s", op.V),
VecU8_fmt("return None_%s();\n", op.V),
VecU8_fmt("%v" "%s saved = self->el.buf[cur - 1].value;\n",
@ -759,43 +693,28 @@ NODISCARD VecU8 generate_rb_tree_Map_template_instantiation(map_instantiation_op
VecU8_fmt("return Some_%s(saved);\n", op.V));
/* We erased enough */
codegen_append_rb_tree_map__method_at(&res, op, map, false);
codegen_append_rb_tree_map__method_at(&res, op, map, true);
codegen_append_buff_rbtree_map__method_at(&res, op, map, false);
codegen_append_buff_rbtree_map__method_at(&res, op, map, true);
/* These functions breaks by design and returns answer through pointers given in arguments. For greater good ofk */
codegen_append_rb_tree_map__method_at_iter(&res, op, map, false);
codegen_append_rb_tree_map__method_at_iter(&res, op, map, true);
/* These functions break my design and return answer through pointers given in arguments. For greater good ofk */
codegen_append_buff_rbtree_map__method_at_iter(&res, op, map, false);
codegen_append_buff_rbtree_map__method_at_iter(&res, op, map, true);
return res;
}
void generate_rb_tree_Map_templ_inst_eve_header(SpanU8 layer, SpanU8 bonus_ns, map_instantiation_op op) {
VecU8 text = VecU8_from_cstr("/* Automatically generated file. Do not edit it.\n"
" * Do not include it in more than one place */\n\n");
VecU8_append_vec(&text, generate_rb_tree_Map_template_instantiation(op));
VecU8 nt_path = VecU8_fmt("%s/eve/%s/%v.h%c", layer, bonus_ns, get_name_of_rb_tree_map_structure(op), 0);
write_whole_file_or_abort((const char*)nt_path.buf, VecU8_to_span(&text));
VecU8_drop(nt_path);
VecU8_drop(text);
void generate_buf_rbtree_Map_templ_inst_eve_header(SpanU8 layer, SpanU8 bonus_ns, map_instantiation_op op) {
generate_SOME_templ_inst_eve_header(layer, bonus_ns,
generate_buf_rbtree_Map_template_instantiation(op), get_name_of_buf_rbtree_map_structure(op));
}
void generate_rb_tree_Map_templ_inst_guarded_header(
void generate_buf_rbtree_Map_templ_inst_guarded_header(
SpanU8 layer, SpanU8 bonus_ns, SpanU8 dependencies, map_instantiation_op op
){
assert(layer.len > 1);
VecU8 path = VecU8_fmt("%s/%s%s%v.h", layer, bonus_ns, bonus_ns.len ? cstr("/") : cstr(""),
get_name_of_rb_tree_map_structure(op));
GeneratedHeader head = begin_header(VecU8_to_span(&path));
VecU8_drop(path);
VecU8_append_span(&head.result, cstr("#include \"../../"));
int to_my_layer = get_number_of_parts_in_header_namespace(bonus_ns);
for (int i = 0; i < to_my_layer; i++)
VecU8_append_span(&head.result, cstr("../"));
VecU8_append_span(&head.result, cstr("src/l1_5/core/rb_tree_node.h\"\n"));
VecU8_append_span(&head.result, dependencies);
VecU8_append_span(&head.result, cstr("\n\n"));
VecU8_append_vec(&head.result, generate_rb_tree_Map_template_instantiation(op));
finish_header(head);
VecU8 all_dependencies = VecU8_fmt("%v%s",
codegen_include_relative_to_root(bonus_ns, cstr("src/l1_5/core/buff_rb_tree_node.h")), dependencies);
generate_SOME_templ_inst_guarded_header(layer, bonus_ns, all_dependencies,
generate_buf_rbtree_Map_template_instantiation(op), get_name_of_buf_rbtree_map_structure(op));
}
#endif
#endif

View File

@ -0,0 +1,443 @@
#ifndef prototype1_src_l1_5_codegen_rbtree_set_map_template_inst_h
#define prototype1_src_l1_5_codegen_rbtree_set_map_template_inst_h
#include "all_set_map_templ_util_inst.h"
NODISCARD VecU8 codegen_rbtree__node_struct_name(map_instantiation_op op){
return (op.V.len > 0) ? VecU8_fmt("RBTreeNode_KVP%sTo%s", op.K, op.V) : VecU8_fmt("RBTreeNode_%s", op.K);
}
NODISCARD VecU8 codegen_rbtree__node_structure(map_instantiation_op op){
map_instantiation_op_fix(&op);
VecU8 node_g = codegen_rbtree__node_struct_name(op);
SpanU8 node = VecU8_to_span(&node_g);
VecU8 res = VecU8_fmt(
"typedef struct {\n"
SPACE "RBTreeNode base;\n"
SPACE "%s key;\n" /* op.K*/
"" /* "" / op.V value; */
"} %s;\n\n", /* node */
op.K, op.V.len > 0 ? VecU8_fmt(SPACE "%s value;\n", op.V) : vcstr(""),
node);
VecU8_drop(node_g);
return res;
}
NODISCARD VecU8 codegen_rbtree_map__key_of_cur_el(map_instantiation_op op){
return VecU8_fmt("((%v *)cur)->key", codegen_rbtree__node_struct_name(op));
}
/* Assuming A nd B are passed as intended */
NODISCARD VecU8 codegen_rbtree_map__less(map_instantiation_op op, VecU8 A, VecU8 B){
if (op.guest_data_T.len > 0) {
assert(op.alternative_equal.len > 0);
return VecU8_fmt("%s(%v, %v, self->guest)", op.alternative_less, A, B);
}
if (op.alternative_equal.len > 0)
return VecU8_fmt("%s(%v, %v)", op.alternative_less, A, B);
if (op.k_integer)
return VecU8_fmt("%v < %v", A, B);
return VecU8_fmt("%s_less_%s(%v %v)", op.K, op.K, A, B);
}
NODISCARD VecU8 codegen_rbtree_map__exp_passing_key_ref(map_instantiation_op op){
return op.k_integer ? vcstr("key") : vcstr("&key");
}
NODISCARD VecU8 codegen_rbtree_map__exp_passing_cur_key(map_instantiation_op op){
return VecU8_fmt("%s" "((%v*)cur)->key", op.k_integer ? cstr("") : cstr("&"), codegen_rbtree__node_struct_name(op));
}
NODISCARD VecU8 codegen_rbtree_map__taking_ref_k_argument(map_instantiation_op op){
return op.k_integer ? VecU8_from_span(op.K) : VecU8_fmt("const %s*", op.K);
}
NODISCARD VecU8 codegen_rbtree_map__taking_t_argument(map_instantiation_op op){
return op.V.len > 0 ? VecU8_fmt("%s key, %s value", op.K, op.V) : VecU8_fmt("%s key", op.K);
}
void codegen_append_rbtree_map__structure_and_simplest_methods(
VecU8* res, map_instantiation_op op, SpanU8 set, SpanU8 TT
){
VecU8_append_vec(res, VecU8_fmt(
"typedef struct {\n"
SPACE "RBTreeNode* root;\n"
SPACE "RBTreeNode* NIL;\n"
"%v" /* "" / guest field */
"} %s;\n\n",
op.guest_data_T.len == 0 ? vcstr("") : VecU8_fmt("%s guest;\n", op.guest_data_T),
set));
VecU8_append_vec(res, VecU8_fmt(
"NODISCARD %s %s_new(" "%v" ") {\n" /* set, set, "" / GT guest */
/* Only color field initialization is important (should be 0) */
SPACE "RBTreeNode* NIL = (RBTreeNode*)safe_calloc(1, sizeof(RBTreeNode));\n"
SPACE "return (%s){.root = NIL, .NIL = NIL" "%s" "};\n" /* set, "" / , .guest = guest */
"}\n\n",
set, set, op.guest_data_T.len == 0 ? vcstr("") : VecU8_fmt("%s guest", op.guest_data_T),
set, op.guest_data_T.len == 0 ? cstr("") : cstr(", .guest = guest")));
// todo: figure out mutability restrictions shit later
VecU8_append_vec(res, VecU8_fmt(
"RBTreeNode_%s* %s_find_min(const %s* self) {\n" /* TT, set, set */
SPACE "if (self->root == self->NIL)\n"
SPACE SPACE "return NULL;\n"
SPACE "return (RBTreeNode_%s*)RBTreeNode_minimum_in_subtree(self->root, self->NIL);\n" /* TT */
"}\n\n", TT, set, set, TT));
VecU8_append_vec(res, VecU8_fmt(
"RBTreeNode_%s* %s_find_max(const %s* self) {\n" /* TT, set, set */
SPACE "if (self->root == self->NIL)\n"
SPACE SPACE "return NULL;\n"
SPACE "return (RBTreeNode_%s*)RBTreeNode_maximum_in_subtree(self->root, self->NIL);\n" /* TT */
"}\n\n", TT, set, set, TT));
VecU8_append_vec(res, VecU8_fmt(
"RBTreeNode_%s* %s_find_next(const %s* self, RBTreeNode_%s* x){\n" /* TT, set, set, TT */
SPACE "return (RBTreeNode_%s *)RBTreeNode_find_next((RBTreeNode*)x, self->NIL);\n" /* TT */
"}\n\n", TT, set, set, TT, TT));
VecU8_append_vec(res, VecU8_fmt(
"RBTreeNode_%s* %s_find_prev(const %s* self, RBTreeNode_%s* x){\n" /* TT, set, set, TT */
SPACE "return (RBTreeNode_%s *)RBTreeNode_find_prev((RBTreeNode*)x, self->NIL);\n" /* TT */
"}\n\n", TT, set, set, TT, TT));
/* Here we read and write something to ->left, ->right field of NIL sentinel node.
* These fields are correct pointers (NULL), so everything is ok */
VecU8_append_vec(res, VecU8_fmt(
"void %s_drop(%s self){\n" /* set, set */
SPACE "RBTreeNode* cur = self.root;\n"
SPACE "while (cur != self.NIL){\n"
SPACE SPACE "if (cur->left != self.NIL) {\n"
SPACE SPACE SPACE "cur = cur->left;\n"
SPACE SPACE "} else if (cur->right != self.NIL) {\n"
SPACE SPACE SPACE "cur = cur->right;\n"
SPACE SPACE "} else {\n"
/* This is actually safe, although cur->parent may be self.NIL */
SPACE SPACE SPACE "if (cur->parent->left == cur)\n"
SPACE SPACE SPACE SPACE "cur->parent->left = self.NIL;\n"
SPACE SPACE SPACE "else"
SPACE SPACE SPACE SPACE "cur->parent->right = self.NIL;\n"
SPACE SPACE SPACE "RBTreeNode* parent = cur->parent;\n"
"%v" /* "" / tabulation op.K_drop((RBTreeNode_TT*)cur->key */
"%v" /* "" / tabulation op.V_drop((RBTreeNode_TT*)cur->value */
SPACE SPACE SPACE "free((void*)cur);\n"
SPACE SPACE SPACE "cur = parent;\n"
SPACE SPACE "}\n"
SPACE "}\n"
SPACE "free(self.NIL);\n"
"}\n\n", set, set,
op.k_primitive ? vcstr("") : VecU8_fmt(SPACE SPACE SPACE "%s_drop(((RBTreeNode_TT*)cur)->key);\n", op.K, TT),
op.v_primitive ? vcstr("") : VecU8_fmt(SPACE SPACE SPACE "%s_drop(((RBTreeNode_TT*)cur)->value);\n", op.V, TT)
));
VecU8_append_vec(res, VecU8_fmt(
"RBTreeNode_%s* %s_find(const %s* self, %v key) {\n" /* TT, set, set, taking_ref_k_argument */
SPACE "RBTreeNode* cur = self->root;\n"
SPACE "while (cur != self->NIL) {\n"
SPACE SPACE "if (%v) {\n" /* key < cur->key */
SPACE SPACE SPACE "cur = cur->left;\n"
SPACE SPACE "} else if (%v) {\n" /* cur->key < key */
SPACE SPACE SPACE "cur = cur->right;\n"
SPACE SPACE "} else {\n"
SPACE SPACE SPACE "return (RBTreeNode_%s*)cur;\n" /* TT */
SPACE SPACE "}\n"
SPACE "}\n"
SPACE "return NULL;\n"
"}\n\n",
TT, set, set, codegen_rbtree_map__taking_ref_k_argument(op),
codegen_rbtree_map__less(op, codegen_rbtree_map__exp_passing_key_ref(op), codegen_rbtree_map__exp_passing_cur_key(op)),
codegen_rbtree_map__less(op, codegen_rbtree_map__exp_passing_cur_key(op), codegen_rbtree_map__exp_passing_key_ref(op)),
TT));
VecU8_append_vec(res, VecU8_fmt(
"RBTreeNode_%s* %s_find_max_less(const %s* self, %v key) {\n" /* TT, set, set, taking_ref_t_argument */
SPACE "RBTreeNode_%s* last_less = NULL;\n" /* TT */
SPACE "RBTreeNode* cur = self->root;\n"
SPACE "while (cur != self->NIL) {\n"
SPACE SPACE "if (%v) {\n" /* key < cur->key */
SPACE SPACE SPACE "cur = cur->left;\n"
SPACE SPACE "} else if (%v) {\n" /* cur->key < key */
SPACE SPACE SPACE "last_less = (RBTreeNode_%s*)cur;\n" /* TT */
SPACE SPACE SPACE "cur = cur->right;\n"
SPACE SPACE "} else {\n"
SPACE SPACE SPACE "cur = cur->left;\n"
SPACE SPACE SPACE "if (cur == self->NIL)\n"
SPACE SPACE SPACE SPACE "return last_less;\n"
SPACE SPACE SPACE "while (cur->right != self->NIL)\n"
SPACE SPACE SPACE SPACE "cur = cur->right;\n"
SPACE SPACE SPACE "return (RBTreeNode_%s*)cur;\n" /* TT */
SPACE SPACE "}\n"
SPACE "}\n"
SPACE "return last_less;\n"
"}\n\n",
TT, set, set, codegen_rbtree_map__taking_ref_k_argument(op), TT,
codegen_rbtree_map__less(op, codegen_rbtree_map__exp_passing_key_ref(op), codegen_rbtree_map__exp_passing_cur_key(op)),
codegen_rbtree_map__less(op, codegen_rbtree_map__exp_passing_cur_key(op), codegen_rbtree_map__exp_passing_key_ref(op)),
TT, TT));
VecU8_append_vec(res, VecU8_fmt(
"RBTreeNode_%s* %s_find_max_less_or_eq(const %s* self, %v key) {\n" /* TT, set, set, taking_ref_t_argument */
SPACE "RBTreeNode_%s* last_less = NULL;\n" /* TT */
SPACE "RBTreeNode* cur = self->root;\n"
SPACE "while (cur != self->NIL) {\n"
SPACE SPACE "if (%v) {\n" /* key < cur->key */
SPACE SPACE SPACE "cur = cur->left;\n"
SPACE SPACE "} else if (%v) {\n" /* cur->key < key */
SPACE SPACE SPACE "last_less = (RBTreeNode_%s*)cur;\n" /* TT */
SPACE SPACE SPACE "cur = cur->right;\n"
SPACE SPACE "} else {\n"
SPACE SPACE SPACE "return (RBTreeNode_%s*)cur;\n" /* TT */
SPACE SPACE "}\n"
SPACE "}\n"
SPACE "return last_less;\n"
"}\n\n",
TT, set, set, codegen_rbtree_map__taking_ref_k_argument(op), TT,
codegen_rbtree_map__less(op, codegen_rbtree_map__exp_passing_key_ref(op), codegen_rbtree_map__exp_passing_cur_key(op)),
codegen_rbtree_map__less(op, codegen_rbtree_map__exp_passing_cur_key(op), codegen_rbtree_map__exp_passing_key_ref(op)),
TT, TT));
VecU8_append_vec(res, VecU8_fmt(
"RBTreeNode_%s* %s_find_min_grtr(const %s* self, %v key) {\n" /* TT, set, set, taking_ref_t_argument */
SPACE "RBTreeNode_%s* last_grtr = NULL;\n" /* TT */
SPACE "RBTreeNode* cur = self->root;\n"
SPACE "while (cur != self->NIL) {\n"
SPACE SPACE "if (%v) {\n" /* key < cur->key */
SPACE SPACE SPACE "last_grtr = (RBTreeNode_%s*)cur;\n" /* TT */
SPACE SPACE SPACE "cur = cur->left;\n"
SPACE SPACE "} else if (%v) {\n" /* cur->key < key */
SPACE SPACE SPACE "cur = cur->right;\n"
SPACE SPACE "} else {\n"
SPACE SPACE SPACE "cur = cur->right;\n"
SPACE SPACE SPACE "if (cur == self->NIL)\n"
SPACE SPACE SPACE SPACE "return last_grtr;\n"
SPACE SPACE SPACE "while (cur->left != self->NIL)\n"
SPACE SPACE SPACE SPACE "cur = cur->left;\n"
SPACE SPACE SPACE "return (RBTreeNode_%s*)cur;\n" /* TT */
SPACE SPACE "}\n"
SPACE "}\n"
SPACE "return last_grtr;\n"
"}\n\n",
TT, set, set, codegen_rbtree_map__taking_ref_k_argument(op), TT,
codegen_rbtree_map__less(op, codegen_rbtree_map__exp_passing_key_ref(op), codegen_rbtree_map__exp_passing_cur_key(op)),
TT,
codegen_rbtree_map__less(op, codegen_rbtree_map__exp_passing_cur_key(op), codegen_rbtree_map__exp_passing_key_ref(op)),
TT));
VecU8_append_vec(res, VecU8_fmt(
"RBTreeNode_%s* %s_find_min_grtr_or_eq(const %s* self, %v key) {\n" /* TT, set, set, taking_ref_t_argument */
SPACE "RBTreeNode_%s* last_grtr = NULL;\n" /* TT */
SPACE "RBTreeNode* cur = self->root;\n"
SPACE "while (cur != self->NIL) {\n"
SPACE SPACE "if (%v) {\n" /* key < cur->key */
SPACE SPACE SPACE "last_grtr = (RBTreeNode_%s*)cur;\n" /* TT */
SPACE SPACE SPACE "cur = cur->left;\n"
SPACE SPACE "} else if (%v) {\n" /* cur->key < key */
SPACE SPACE SPACE "cur = cur->right;\n"
SPACE SPACE "} else {\n"
SPACE SPACE SPACE "return (RBTreeNode_%s*)cur;\n" /* TT */
SPACE SPACE "}\n"
SPACE "}\n"
SPACE "return last_grtr;\n"
"}\n\n",
TT, set, set, codegen_rbtree_map__taking_ref_k_argument(op), TT,
codegen_rbtree_map__less(op, codegen_rbtree_map__exp_passing_key_ref(op), codegen_rbtree_map__exp_passing_cur_key(op)),
TT,
codegen_rbtree_map__less(op, codegen_rbtree_map__exp_passing_cur_key(op), codegen_rbtree_map__exp_passing_key_ref(op)),
TT));
VecU8_append_vec(res, VecU8_fmt(
/* This method is unsafe. Arguments key, value will be taken if 0 is returned,
* or left on their place if not-0 is returned */
"/* UNSAFE */\n"
"NODISCARD RBTreeNode_%s* %s_try_insert(%s* self, %v) {\n" /* TT, set, set, taking_t_argument */
SPACE "RBTreeNode** surprising = &self->root;\n"
SPACE "RBTreeNode* prev = self->NIL;\n"
SPACE "RBTreeNode* cur = self->root;\n"
SPACE "while (cur != self->NIL){\n"
SPACE SPACE "prev = cur;\n"
SPACE SPACE "if (%v) {\n" /* key < cur->key */
SPACE SPACE SPACE "surprising = &cur->left;\n"
SPACE SPACE SPACE "cur = cur->left;\n"
SPACE SPACE "} else if (%v) {\n" /* cur->key < key */
SPACE SPACE SPACE "surprising = &cur->right;\n"
SPACE SPACE SPACE "cur = cur->right;\n"
SPACE SPACE "} else {\n"
SPACE SPACE SPACE "return (RBTreeNode_%s *)cur;\n" /* TT */
SPACE SPACE "}\n"
SPACE "}\n"
SPACE "RBTreeNode_%s* new_node = (RBTreeNode_%s *)safe_malloc(sizeof(RBTreeNode_%s));\n" /* TT, TT, TT */
SPACE "*new_node = (RBTreeNode_%s){ .base.parent = prev,\n" /* TT */
SPACE SPACE ".base.left = self->NIL, .base.right = self->NIL, .base.color = RBTREE_RED,\n"
SPACE SPACE ".key = key" "%s" "};\n" /* "" / ,.value = value */
SPACE "*surprising = (RBTreeNode*)new_node;\n"
SPACE "RBTree_fix_after_insert(&self->root, self->NIL, (RBTreeNode*)new_node);\n"
SPACE "return NULL;\n"
"}\n\n",
TT, set, set, codegen_rbtree_map__taking_t_argument(op),
codegen_rbtree_map__less(op, vcstr("key"), codegen_rbtree_map__exp_passing_cur_key(op)),
codegen_rbtree_map__less(op, codegen_rbtree_map__exp_passing_cur_key(op), vcstr("key")),
TT,
TT, TT, TT,
TT, op.v_primitive ? cstr("") : cstr(", .value = value")));
VecU8_append_vec(res, VecU8_fmt(
"bool %s_insert(%s* self, %v){\n" /* set, set, taking_t_argument */
SPACE "RBTreeNode_%s* col = %s_try_insert(self, key" "%s" ");\n" /* TT, set, "" /, value */
SPACE "if (col == NULL)\n"
SPACE SPACE "return true;\n"
"%v" "%v" /* "" / dropping key, "" / dropping value */
SPACE "return false;\n"
"}\n\n",
set, set, codegen_rbtree_map__taking_t_argument(op),
TT, set, op.V.len > 0 ? cstr(", value") : cstr(""),
op.k_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(key);\n", op.K),
op.v_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(value);\n", op.V)));
VecU8_append_vec(res, VecU8_fmt(
"bool %s_erase(%s* self, %v key) {\n" /* set, set, taking_ref_k_argument */
SPACE "RBTreeNode_%s* v = %s_find(self, key);\n" /* TT, set */
SPACE "if (v == NULL)\n"
SPACE SPACE "return false;\n"
"%v" /* "" / op.K_drop(v->key) */
"%v" /* "" / op.V_drop(v->value) */
SPACE "RBTree_erase_empty_by_iter(&self->root, self->NIL, (RBTreeNode*)v);\n"
SPACE "return true;\n"
"}\n\n", set, set, codegen_rbtree_map__taking_ref_k_argument(op),
TT, set,
op.k_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(v->key);\n", op.K),
op.v_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(v->value);\n", op.V)));
}
NODISCARD VecU8 get_name_of_rbtree_set_structure(set_instantiation_op op){
if (op.alternative_comp_set_name_embed.len > 0)
return VecU8_fmt("RBTreeBy%s_Set%s", op.alternative_comp_set_name_embed, op.T);
return VecU8_fmt("RBTree_Set%s", op.T);
}
NODISCARD VecU8 generate_rbtree_Set_template_instantiation(set_instantiation_op op, bool generate_node_struct){
set_instantiation_op_fix(&op);
map_instantiation_op map_op = {
.K = op.T, .k_integer = op.t_integer, .k_primitive = op.t_primitive, .k_clonable = op.t_clonable,
.V = cstr(""), .v_primitive = true, .v_clonable = true,
.alternative_comp_map_name_embed = op.alternative_comp_set_name_embed, .alternative_less = op.alternative_less,
.guest_data_T = op.guest_data_T
};
VecU8 res = VecU8_new();
VecU8 set_g = get_name_of_rbtree_set_structure(op);
if (generate_node_struct)
VecU8_append_vec(&res, codegen_rbtree__node_structure(map_op));
codegen_append_rbtree_map__structure_and_simplest_methods(&res, map_op, VecU8_to_span(&set_g), op.T);
VecU8_drop(set_g);
return res;
}
void generate_rbtree_Set_templ_inst_eve_header(SpanU8 layer, SpanU8 bonus_ns, set_instantiation_op op, bool generate_node_struct) {
generate_SOME_templ_inst_eve_header(layer, bonus_ns,
generate_rbtree_Set_template_instantiation(op, generate_node_struct), get_name_of_rbtree_set_structure(op));
}
void generate_rbtree_Set_templ_inst_guarded_header(
SpanU8 layer, SpanU8 bonus_ns, SpanU8 dependencies, set_instantiation_op op, bool generate_node_struct
){
VecU8 all_dependencies = VecU8_fmt("%v%s",
codegen_include_relative_to_root(bonus_ns, cstr("src/l1_5/core/rb_tree_node.h")), dependencies);
generate_SOME_templ_inst_guarded_header(layer, bonus_ns, all_dependencies,
generate_rbtree_Set_template_instantiation(op, generate_node_struct), get_name_of_rbtree_set_structure(op));
}
/* Don't forget to do the map stuff, Set is not enough */
NODISCARD VecU8 get_name_of_rbtree_map_structure(map_instantiation_op op){
if (op.alternative_comp_map_name_embed.len > 0)
return VecU8_fmt("RBTreeBy%s_Map%sTo%s", op.alternative_comp_map_name_embed, op.K, op.V);
return VecU8_fmt("RBTree_Map%sTo%s", op.K, op.V);
}
NODISCARD VecU8 generate_rbtree_Map_template_instantiation(map_instantiation_op op, bool generate_node_struct){
map_instantiation_op_fix(&op);
VecU8 res = VecU8_new();
VecU8 map_g = get_name_of_rbtree_map_structure(op);
SpanU8 map = VecU8_to_span(&map_g);
VecU8 kvp_g = VecU8_fmt("KVP%sTo%s", op.K, op.V);
SpanU8 kvp = VecU8_to_span(&kvp_g);
if (generate_node_struct)
VecU8_append_vec(&res, codegen_rbtree__node_structure(op));
codegen_append_rbtree_map__structure_and_simplest_methods(&res, op, map, kvp);
VecU8_append_vec(&res, VecU8_fmt(
"%s" "Option%s %s_pop_substitute(%s* self, %s key, %s value) {\n" /* "" / NODISCARD , op.V, map, map, op.K, op.V */
/* Using unsafe method with conditional ownership transfer */
SPACE "RBTreeNode_%s* col = %s_try_insert(self, key, value);\n" /* kvp, map */
SPACE "if (col == NULL) {\n"
SPACE SPACE "return None_%s();\n" /* op.V */
SPACE "} else {\n"
"%v" /* "" / dropping col->key */
SPACE SPACE "%s saved = col->value;\n" /* op.V */
SPACE SPACE "col->key = key;\n"
SPACE SPACE "col->value = value;\n"
SPACE SPACE "return Some_%s(saved);\n" /* op.V */
SPACE "}\n"
"}\n\n",
op.v_primitive ? cstr("") : cstr("NODISCARD "), op.V, map, map, op.K, op.V,
kvp, map, op.V,
op.v_primitive ? vcstr("") : VecU8_fmt(SPACE SPACE"%s_drop(col->key);\n", op.K),
op.V, op.V));
if (!op.v_primitive) {
VecU8_append_vec(&res, VecU8_fmt(
"bool %s_erase_substitute(%s* self, %s key, %s value) {\n" /* map, map, op.K, op.V */
SPACE "RBTreeNode_%s* col = %s_try_insert(self, key, value);\n" /* kvp, map */
SPACE "if (col == NULL)\n"
SPACE SPACE "return true;\n"
"%v" "%v" /* "" / op.K_drop(col->key), "" / op.V_drop(col->value) */
SPACE "col->key = key;\n"
SPACE "col->value = value;\n"
SPACE "return false;\n"
"}\n\n",
map, map, op.K, op.V, kvp, map,
op.k_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(col->key);\n", op.K),
op.v_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(col->value);\n", op.V)));
}
VecU8_append_vec(&res, VecU8_fmt(
"Option%s %s_pop(%s* self, %v key) {\n" /* op.V, map, map, taking_ref_k_argument */
SPACE "RBTreeNode_%s* v = %s_find(self, key);\n" /* kvp, map */
SPACE "if (v == NULL)\n"
SPACE SPACE "return None_%s();\n" /* op.V */
"%v" /* "" / op.K_drop(v->key) */
"%s saved = v->value;\n" /* op.V */
SPACE "RBTree_erase_empty_by_iter(&self->root, self->NIL, v);\n"
SPACE "return Some_%s();\n" /* op.V */
"}\n\n",
op.V, map, map, codegen_rbtree_map__taking_ref_k_argument(op),
kvp, map, op.V,
op.k_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(v->key);\n", op.K),
op.V, op.V));
return res;
}
void generate_rbtree_Map_templ_inst_eve_header(SpanU8 layer, SpanU8 bonus_ns, map_instantiation_op op, bool generate_node_struct) {
generate_SOME_templ_inst_eve_header(layer, bonus_ns,
generate_rbtree_Map_template_instantiation(op, generate_node_struct), get_name_of_rbtree_map_structure(op));
}
void generate_rbtree_Map_templ_inst_guarded_header(
SpanU8 layer, SpanU8 bonus_ns, SpanU8 dependencies, map_instantiation_op op, bool generate_node_struct
){
VecU8 all_dependencies = VecU8_fmt("%v%s",
codegen_include_relative_to_root(bonus_ns, cstr("src/l1_5/core/rb_tree_node.h")), dependencies);
generate_SOME_templ_inst_guarded_header(layer, bonus_ns, all_dependencies,
generate_rbtree_Map_template_instantiation(op, generate_node_struct), get_name_of_rbtree_map_structure(op));
}
#endif

View File

@ -0,0 +1,225 @@
#ifndef PROTOTYPE1_SRC_L1_5_CORE_RB_TREE_NODE_H
#define PROTOTYPE1_SRC_L1_5_CORE_RB_TREE_NODE_H
#include "../../l1/core/util.h"
// todo: jam that one bit into parent field of BufRBTreeNode
typedef enum {
RBTree_black = 0,
RBTree_red = 1,
} RBTreeColor;
typedef struct {
size_t left;
size_t right;
size_t parent;
/* 0 is black, 1 is red */
RBTreeColor color;
} BufRBTreeNode;
#include "../../../gen/l1/eve/embassy_l1_5/VecBufRBTreeNode.h"
void BufRBTree_left_rotate(BufRBTreeNode* tree, U64* root, U64 x){
assert(x != 0);
U64 y = tree[x].right;
assert(y != 0);
tree[x].right = tree[y].left;
if (tree[x].right != 0)
tree[tree[x].right].parent = x;
tree[y].parent = tree[x].parent;
if (tree[y].parent == 0) {
*root = y;
} else if (x == tree[tree[x].parent].left) {
tree[tree[x].parent].left = y;
} else {
tree[tree[x].parent].right = y;
}
tree[x].parent = y;
tree[y].left = x;
}
void BufRBTree_right_rotate(BufRBTreeNode* tree, U64* root, U64 x){
assert(x != 0);
U64 y = tree[x].left;
assert(y != 0);
tree[x].left = tree[y].right;
if (tree[x].left != 0)
tree[tree[x].left].parent = x;
tree[y].parent = tree[x].parent;
if (tree[y].parent == 0) {
*root = y;
} else if (x == tree[tree[x].parent].right) {
tree[tree[x].parent].right = y;
} else {
tree[tree[x].parent].left = y;
}
tree[x].parent = y;
tree[y].right = x;
}
/* Helper function. Called in automatically generated code */
void BufRBTree_fix_after_insert(BufRBTreeNode* tree, U64* root, U64 me){
assert(me);
while (true) {
U64 mom = tree[me].parent;
if (mom == 0)
break;
if (tree[mom].color == RBTree_black)
return;
U64 grandma = tree[mom].parent;
U64 aunt = tree[grandma].left == mom ? tree[grandma].right : tree[grandma].left;
assert(aunt != mom);
if (tree[aunt].color == RBTree_red) {
/* Easy case */
tree[mom].color = RBTree_black;
tree[aunt].color = RBTree_black;
tree[grandma].color = RBTree_red;
me = grandma;
} else if (tree[grandma].left == mom) {
/* Hard case: firstborn orientation */
if (tree[mom].right == me) {
BufRBTree_left_rotate(tree, root, mom);
tree[me].color = RBTree_black;
} else {
tree[mom].color = RBTree_black;
}
BufRBTree_right_rotate(tree, root, grandma);
tree[grandma].color = RBTree_red;
return;
} else {
/* Hard case: benjamin orientation */
if (tree[mom].left == me) {
BufRBTree_right_rotate(tree, root, mom);
tree[me].color = RBTree_black;
} else {
tree[mom].color = RBTree_black;
}
BufRBTree_left_rotate(tree, root, grandma);
tree[grandma].color = RBTree_red;
return;
}
}
assert(*root == me);
tree[me].color = RBTree_black;
}
/* fr index will be forgotten. to fields will be overwritten (all fields replaced by fr's values)
* If you need the old values of `to` position, you better save them on stack */
void BufRBTree_steal_neighbours(BufRBTreeNode* tree, U64* root, U64 fr, U64 to){
if (tree[fr].parent == 0)
*root = to;
else if (tree[tree[fr].parent].left == fr)
tree[tree[fr].parent].left = to;
else
tree[tree[fr].parent].right = to;
tree[tree[fr].left].parent = to;
tree[tree[fr].right].parent = to;
tree[to] = tree[fr];
}
/* helper function (used in _erase, _find_min methods). It is assumed that s is not null.
* Guaranteed to return no-null
*/
U64 BufRBTree_minimum_in_subtree(const BufRBTreeNode* tree, U64 s){
assert(s != 0);
while (tree[s].left != 0)
s = tree[s].left;
return s;
}
/* helper function (used in _find_max, _find_prev methods). It is assumed that s is not null.
* Guaranteed to return no-null
*/
U64 BufRBTree_maximum_in_subtree(const BufRBTreeNode* tree, U64 s){
assert(s != 0);
while (tree[s].right != 0)
s = tree[s].right;
return s;
}
void BufRBTree_fix_after_delete(BufRBTreeNode* tree, U64* root, U64 me){
assert(tree[*root].parent == 0);
while (me != *root && tree[me].color == RBTree_black) {
U64 mom = tree[me].parent;
if (me == tree[mom].left) { /* We are on the left */
U64 sister = tree[mom].right;
if (tree[sister].color == RBTree_red) { /* Case 1 */
tree[mom].color = RBTree_red;
tree[sister].color = RBTree_black;
BufRBTree_left_rotate(tree, root, mom);
/* Reassignation required */
sister = tree[mom].right;
}
/* Cases 2,3,4 (every instance of red-black tree has an itchy substring in source code containing 2,3,4) */
assert(sister != 0);
U64 nephew_firstborn = tree[sister].left;
U64 nephew_benjamin = tree[sister].right;
if (tree[nephew_firstborn].color == RBTree_black && tree[nephew_benjamin].color == RBTree_black) {
/* Case 2 */
tree[sister].color = RBTree_red;
me = mom;
continue;
}
/* Cases 3,4 */
if (tree[nephew_benjamin].color == RBTree_black) {
/* Case 3 */
tree[nephew_firstborn].color = RBTree_black;
tree[sister].color = RBTree_red;
BufRBTree_right_rotate(tree, root, sister);
/* Reassignation required */
nephew_benjamin = sister;
sister = nephew_firstborn;
nephew_firstborn = tree[sister].left;
}
/* Case 4 */
tree[sister].color = tree[mom].color;
tree[mom].color = RBTree_black;
tree[nephew_benjamin].color = RBTree_black;
BufRBTree_left_rotate(tree, root, mom);
me = *root;
} else if (me == tree[mom].right) { /* We are on the right */
U64 sister = tree[mom].left;
if (tree[sister].color == RBTree_red) { /* Case 1 */
tree[mom].color = RBTree_red;
tree[sister].color = RBTree_black;
BufRBTree_right_rotate(tree, root, mom);
/* Reassignation required */
sister = tree[mom].left;
}
/* Cases 2,3,4 (every instance of red-black tree has an itchy substring in source code containing 2,3,4) */
assert(sister != 0);
U64 nephew_firstborn = tree[sister].left;
U64 nephew_benjamin = tree[sister].right;
if (tree[nephew_firstborn].color == RBTree_black && tree[nephew_benjamin].color == RBTree_black) {
/* Case 2 */
tree[sister].color = RBTree_red;
me = mom;
continue;
}
/* Cases 3,4 */
if (tree[nephew_firstborn].color == RBTree_black) {
/* Case 3 */
tree[nephew_benjamin].color = RBTree_black;
tree[sister].color = RBTree_red;
BufRBTree_left_rotate(tree, root, sister);
/* Reassignation required */
nephew_firstborn = sister;
sister = nephew_benjamin;
nephew_benjamin = tree[sister].right;
}
/* Case 4 */
tree[sister].color = tree[mom].color;
tree[mom].color = RBTree_black;
tree[nephew_firstborn].color = RBTree_black;
BufRBTree_right_rotate(tree, root, mom);
me = *root;
}
}
tree[me].color = RBTree_black;
}
// todo: move here erase_empty method (renmaed to _erase_empty_by_index)
#endif

View File

@ -1,222 +1,279 @@
#ifndef PROTOTYPE1_SRC_L1_5_CORE_RB_TREE_NODE_H
#define PROTOTYPE1_SRC_L1_5_CORE_RB_TREE_NODE_H
#ifndef prototype1_src_l1_5_core_rb_tree_node_h
#define prototype1_src_l1_5_core_rb_tree_node_h
#include "../../l1/core/util.h"
typedef enum {
RBTree_black = 0,
RBTree_red = 1,
} RBTreeColor;
RBTREE_BLACK = 0,
RBTREE_RED = 1,
} RBTreeClr;
typedef struct {
size_t left;
size_t right;
size_t parent;
/* 0 is black, 1 is red */
RBTreeColor color;
} RBTreeNode;
typedef struct RBTreeNode RBTreeNode;
struct RBTreeNode{
RBTreeNode* parent;
RBTreeNode* left;
RBTreeNode* right;
RBTreeClr color;
};
#include "../../../gen/l1/eve/embassy_l1_5/VecRBTreeNode.h"
void RBTree_left_rotate(RBTreeNode** root, RBTreeNode* NIL, RBTreeNode* x){
assert(x != NIL);
RBTreeNode* y = x->right;
assert(y != NIL);
x->right = y->left;
if (y->left != NIL)
y->left->parent = x;
void RBTree_left_rotate(RBTreeNode* tree, U64* root, U64 x){
assert(x != 0);
U64 y = tree[x].right;
assert(y != 0);
tree[x].right = tree[y].left;
if (tree[x].right != 0)
tree[tree[x].right].parent = x;
tree[y].parent = tree[x].parent;
if (tree[y].parent == 0) {
y->parent = x->parent;
if (x->parent == NIL)
*root = y;
} else if (x == tree[tree[x].parent].left) {
tree[tree[x].parent].left = y;
} else {
tree[tree[x].parent].right = y;
}
tree[x].parent = y;
tree[y].left = x;
else if (x == x->parent->left)
x->parent->left = y;
else
x->parent->right = y;
x->parent = y;
y->left = x;
}
void RBTree_right_rotate(RBTreeNode* tree, U64* root, U64 x){
assert(x != 0);
U64 y = tree[x].left;
assert(y != 0);
tree[x].left = tree[y].right;
if (tree[x].left != 0)
tree[tree[x].left].parent = x;
void RBTree_right_rotate(RBTreeNode** root, RBTreeNode* NIL, RBTreeNode* x){
assert(x != NIL);
RBTreeNode* y = x->left;
assert(y != NIL);
x->left = y->right;
if (y->right != NIL)
y->right->parent = x;
tree[y].parent = tree[x].parent;
if (tree[y].parent == 0) {
y->parent = x->parent;
if (x->parent == NIL)
*root = y;
} else if (x == tree[tree[x].parent].right) {
tree[tree[x].parent].right = y;
} else {
tree[tree[x].parent].left = y;
}
tree[x].parent = y;
tree[y].right = x;
else if (x->parent->right == x)
x->parent->right = y;
else
x->parent->left = y;
x->parent = y;
y->right = x;
}
/* Helper function. Called in automatically generated code */
void RBTree_fix_after_insert(RBTreeNode* tree, U64* root, U64 me){
assert(me);
void RBTree_fix_after_insert(RBTreeNode** root, RBTreeNode* NIL, RBTreeNode* me){
assert(me != NIL);
while (true) {
U64 mom = tree[me].parent;
if (mom == 0)
RBTreeNode* mom = me->parent;
if (mom == NIL)
break;
if (tree[mom].color == RBTree_black)
if (mom->color == RBTREE_BLACK)
return;
U64 grandma = tree[mom].parent;
U64 aunt = tree[grandma].left == mom ? tree[grandma].right : tree[grandma].left;
RBTreeNode* grandma = mom->parent;
assert(grandma != NIL);
assert(grandma->color == RBTREE_BLACK);
RBTreeNode* aunt = grandma->left == mom ? grandma->right : grandma->left;
assert(aunt != mom);
if (tree[aunt].color == RBTree_red) {
if (aunt->color == RBTREE_RED) {
/* Easy case */
tree[mom].color = RBTree_black;
tree[aunt].color = RBTree_black;
tree[grandma].color = RBTree_red;
mom->color = RBTREE_BLACK;
aunt->color = RBTREE_BLACK;
grandma->color = RBTREE_RED;
me = grandma;
} else if (tree[grandma].left == mom) {
} else if (grandma->left == mom) {
/* Hard case: firstborn orientation */
if (tree[mom].right == me) {
RBTree_left_rotate(tree, root, mom);
tree[me].color = RBTree_black;
if (mom->right == me) {
RBTree_left_rotate(root, NIL, mom);
me->color = RBTREE_BLACK;
} else {
tree[mom].color = RBTree_black;
mom->color = RBTREE_BLACK;
}
RBTree_right_rotate(tree, root, grandma);
tree[grandma].color = RBTree_red;
RBTree_right_rotate(root, NIL, grandma);
grandma->color = RBTREE_RED;
return;
} else {
/* Hard case: benjamin orientation */
if (tree[mom].left == me) {
RBTree_right_rotate(tree, root, mom);
tree[me].color = RBTree_black;
if (mom->left == me) {
RBTree_right_rotate(root, NIL, mom);
me->color = RBTREE_BLACK;
} else {
tree[mom].color = RBTree_black;
mom->color = RBTREE_BLACK;
}
RBTree_left_rotate(tree, root, grandma);
tree[grandma].color = RBTree_red;
RBTree_left_rotate(root, NIL, grandma);
grandma->color = RBTREE_RED;
return;
}
}
assert(*root == me);
tree[me].color = RBTree_black;
me->color = RBTREE_BLACK;
}
/* fr index will be forgotten. to fields will be overwritten (all fields replaced by fr's values)
* If you need the old values of `to` position, you better save them on stack */
void RBTree_steal_neighbours(RBTreeNode* tree, U64* root, U64 fr, U64 to){
if (tree[fr].parent == 0)
void RBTree_steal_neighbours(RBTreeNode** root, RBTreeNode* NIL, RBTreeNode* fr, RBTreeNode* to){
if (fr->parent == NIL)
*root = to;
else if (tree[tree[fr].parent].left == fr)
tree[tree[fr].parent].left = to;
else if (fr->parent->left == fr)
fr->parent->left = to;
else
tree[tree[fr].parent].right = to;
tree[tree[fr].left].parent = to;
tree[tree[fr].right].parent = to;
tree[to] = tree[fr];
fr->parent->right = to;
fr->left->parent = to;
fr->right->parent = to;
*to = *fr;
}
/* helper function (used in _erase, _find_min methods). It is assumed that s is not null.
* Guaranteed to return no-null
*/
U64 RBTree_minimum_in_subtree(const RBTreeNode* tree, U64 s){
assert(s != 0);
while (tree[s].left != 0)
s = tree[s].left;
return s;
RBTreeNode* RBTreeNode_minimum_in_subtree(RBTreeNode* x, RBTreeNode* NIL){
assert(x != NIL);
while (x->left != NIL)
x = x->left;
return x;
}
/* helper function (used in _find_max, _find_prev methods). It is assumed that s is not null.
* Guaranteed to return no-null
*/
U64 RBTree_maximum_in_subtree(const RBTreeNode* tree, U64 s){
assert(s != 0);
while (tree[s].right != 0)
s = tree[s].right;
return s;
RBTreeNode* RBTreeNode_maximum_in_subtree(RBTreeNode* x, RBTreeNode* NIL){
assert(x != NIL);
while (x->right != NIL)
x = x->right;
return x;
}
void RBTree_fix_after_delete(RBTreeNode* tree, U64* root, U64 me){
assert(tree[*root].parent == 0);
while (me != *root && tree[me].color == RBTree_black) {
U64 mom = tree[me].parent;
if (me == tree[mom].left) { /* We are on the left */
U64 sister = tree[mom].right;
if (tree[sister].color == RBTree_red) { /* Case 1 */
tree[mom].color = RBTree_red;
tree[sister].color = RBTree_black;
RBTree_left_rotate(tree, root, mom);
/* Returns NULL instead of NIL */
RBTreeNode* RBTreeNode_find_next(RBTreeNode* x, RBTreeNode* NIL){
assert(x != NIL);
if (x->right != NIL)
return RBTreeNode_minimum_in_subtree(x->right, NIL);
while (true) {
RBTreeNode* p = x->parent;
if (p == NIL)
return NULL;
if (p->left == x)
return p;
x = p;
}
}
/* Returns NULL instead of NIL */
RBTreeNode* RBTreeNode_find_prev(RBTreeNode* x, RBTreeNode* NIL){
assert(x != NIL);
if (x->left != NIL)
return RBTreeNode_maximum_in_subtree(x->left, NIL);
while (true) {
RBTreeNode* p = x->parent;
if (p == NIL)
return NULL;
if (p->right == x)
return p;
x = p;
}
}
/* me may be NIL at the beginning. ->parent field of NIL is meaningful */
void RBTree_fix_after_delete(RBTreeNode** root, RBTreeNode* NIL, RBTreeNode* me){
assert((*root)->parent == NIL);
while (me != *root && me->color == RBTREE_BLACK) {
RBTreeNode* mom = me->parent;
assert(mom != NIL);
if (me == mom->left) { /* We are on the left */
RBTreeNode* sister = mom->right;
assert(sister != NIL);
if (sister->color == RBTREE_RED) { /* Case 1 */
mom->color = RBTREE_RED;
sister->color = RBTREE_BLACK;
RBTree_left_rotate(root, NIL, mom);
/* Reassignation required */
sister = tree[mom].right;
sister = mom->right;
assert(sister != NIL);
}
/* Cases 2,3,4 (every instance of red-black tree has an itchy substring in source code containing 2,3,4) */
assert(sister != 0);
U64 nephew_firstborn = tree[sister].left;
U64 nephew_benjamin = tree[sister].right;
if (tree[nephew_firstborn].color == RBTree_black && tree[nephew_benjamin].color == RBTree_black) {
/* Cases 2,3,4 */
RBTreeNode* nephew_firstborn = sister->left;
RBTreeNode* nephew_benjamin = sister->right;
if (nephew_firstborn->color == RBTREE_BLACK && nephew_benjamin->color == RBTREE_BLACK) {
/* Case 2 */
tree[sister].color = RBTree_red;
sister->color = RBTREE_RED;
me = mom;
continue;
}
/* Cases 3,4 */
if (tree[nephew_benjamin].color == RBTree_black) {
if (nephew_benjamin->color == RBTREE_BLACK) {
/* Case 3 */
tree[nephew_firstborn].color = RBTree_black;
tree[sister].color = RBTree_red;
RBTree_right_rotate(tree, root, sister);
nephew_firstborn->color = RBTREE_BLACK;
sister->color = RBTREE_RED;
RBTree_right_rotate(root, NIL, sister);
/* Reassignation required */
nephew_benjamin = sister;
sister = nephew_firstborn;
nephew_firstborn = tree[sister].left;
assert(sister != NIL);
/*nephew_firstborn = sister->left;*/
}
/* Case 4 */
tree[sister].color = tree[mom].color;
tree[mom].color = RBTree_black;
tree[nephew_benjamin].color = RBTree_black;
RBTree_left_rotate(tree, root, mom);
sister->color = mom->color;
mom->color = RBTREE_BLACK;
nephew_benjamin->color = RBTREE_BLACK;
RBTree_left_rotate(root, NIL, mom);
me = *root;
} else if (me == tree[mom].right) { /* We are on the right */
U64 sister = tree[mom].left;
if (tree[sister].color == RBTree_red) { /* Case 1 */
tree[mom].color = RBTree_red;
tree[sister].color = RBTree_black;
RBTree_right_rotate(tree, root, mom);
} else { /* We are on the right */
assert(me == mom->right);
RBTreeNode* sister = mom->left;
assert(sister != NULL);
if (sister->color == RBTREE_RED) { /* Case 1 */
mom->color = RBTREE_RED;
sister->color = RBTREE_BLACK;
RBTree_right_rotate(root, NIL, mom);
/* Reassignation required */
sister = tree[mom].left;
sister = mom->left;
assert(sister != NIL);
}
/* Cases 2,3,4 (every instance of red-black tree has an itchy substring in source code containing 2,3,4) */
assert(sister != 0);
U64 nephew_firstborn = tree[sister].left;
U64 nephew_benjamin = tree[sister].right;
if (tree[nephew_firstborn].color == RBTree_black && tree[nephew_benjamin].color == RBTree_black) {
RBTreeNode* nephew_firstborn = sister->left;
RBTreeNode* nephew_benjamin = sister->right;
if (nephew_firstborn->color == RBTREE_BLACK && nephew_benjamin->color == RBTREE_BLACK) {
/* Case 2 */
tree[sister].color = RBTree_red;
sister->color = RBTREE_RED;
me = mom;
continue;
}
/* Cases 3,4 */
if (tree[nephew_firstborn].color == RBTree_black) {
if (nephew_firstborn->color == RBTREE_BLACK) {
/* Case 3 */
tree[nephew_benjamin].color = RBTree_black;
tree[sister].color = RBTree_red;
RBTree_left_rotate(tree, root, sister);
nephew_benjamin->color = RBTREE_BLACK;
sister->color = RBTREE_RED;
RBTree_left_rotate(root, NIL, sister);
/* Reassignation required */
nephew_firstborn = sister;
sister = nephew_benjamin;
nephew_benjamin = tree[sister].right;
assert(sister != NIL);
/*nephew_benjamin = sister->right;*/
}
/* Case 4 */
tree[sister].color = tree[mom].color;
tree[mom].color = RBTree_black;
tree[nephew_firstborn].color = RBTree_black;
RBTree_right_rotate(tree, root, mom);
sister->color = mom->color;
mom->color = RBTREE_BLACK;
nephew_firstborn->color = RBTREE_BLACK;
RBTree_right_rotate(root, NIL, mom);
me = *root;
}
}
tree[me].color = RBTree_black;
me->color = RBTREE_BLACK;
}
#endif
/* Assumes that z->key and z->value were already dropped properly. Frees z node */
void RBTree_erase_empty_by_iter(RBTreeNode** root, RBTreeNode* NIL, RBTreeNode* z) {
assert(z != NULL);
assert(z != NIL);
RBTreeNode* y = (z->left == NULL || z->right == 0) ? z : RBTreeNode_minimum_in_subtree(z, NIL);
RBTreeNode* x = y->left == NIL ? y->right : y->left;
assert(x != y && x != z);
RBTreeNode* x_adopter = y->parent;
x->parent = x_adopter;
if (x_adopter == NIL)
*root = x;
else if (x_adopter->left == y)
x_adopter->left = x;
else
x_adopter->right = x;
RBTreeClr y_org_clr = y->color;
if (z != y) {
RBTree_steal_neighbours(root, NIL, z, y);
if (x_adopter == z)
x_adopter = y;
}
x->parent = x_adopter;
if (y_org_clr == RBTREE_BLACK)
RBTree_fix_after_delete(root, NIL, x);
free((void*)z);
}
#endif

View File

@ -243,7 +243,7 @@ typedef struct {
/* It is users job to put resize and alloca requests for sub-buffers of type T to the corresponding request
* vectors for this exact type T */
typedef struct {
VecMargaretMemAllocatorRequestResizeSubBuffer resize;
VecMargaretMemAllocatorRequestResizeSubBuffer expand;
VecMargaretMemAllocatorRequestAllocSubBuffer alloc;
} MargaretMemAllocatorRequestsForCertainBufferKindAllocation;
#include "../../../gen/l1/eve/margaret/VecMargaretMemAllocatorRequestsForCertainBufferKindAllocation.h"
@ -251,7 +251,8 @@ typedef struct {
typedef struct {
VecMargaretMemAllocatorRequestFreeSubBuffer free_subbuffer;
VecMargaretMemAllocatorRequestFreeImage free_image;
VecMargaretMemAllocatorRequestsForCertainBufferKindAllocation resize_alloc_buffer;
VecMargaretMemAllocatorRequestResizeSubBuffer shrink_subbuffer;
VecMargaretMemAllocatorRequestsForCertainBufferKindAllocation expand_alloc_buffer;
VecMargaretMemAllocatorRequestAllocImage alloc_image;
} MargaretMemAllocatorRequest;
@ -296,7 +297,7 @@ char* MargaretMemAllocator_get_host_visible_buffer_ptr(
#include "../../l1/core/uint_segments.h"
#include "../../l1/core/util.h"
#include "../../l1_5/core/rb_tree_node.h"
#include "../../l1_5/core/buff_rb_tree_node.h"
typedef struct {
U64 width;
@ -455,7 +456,6 @@ typedef struct{
U64 new_start;
U64 new_len;
} MargaretOldBufferResizeRecord;
#include "../../../gen/l1/eve/margaret/VecMargaretOldBufferResizeRecord.h"
typedef struct {
@ -466,6 +466,15 @@ typedef struct {
} MargaretResizeToNascentRecord;
#include "../../../gen/l1/eve/margaret/VecMargaretResizeToNascentRecord.h"
/* This is not a request from a user to MMA, this is a request from MMA to MMA-defragmentation subroutine */
typedef struct{
U64 old_size; /* in buffer in old VkDeviceMemory */
U64 new_size; /* in buffer in new VkDeviceMemory */
MargaretMemAllocatorSubBufferPosition* ans;
} MargaretSubBufRelocationRequest;
#include "../../../gen/l1/eve/margaret/VecMargaretSubBufRelocationRequest.h"
#include "../../../gen/l1/eve/margaret/VecVecMargaretSubBufRelocationRequest.h"
#include "../../../gen/l1/VecAndSpan_U8.h"
@ -476,9 +485,12 @@ typedef struct {
} MargaretMemFreeSpaceManager;
MargaretMemFreeSpaceManager MargaretMemFreeSpaceManager_new(){
MargaretMemFreeSpaceManager res = {.set_present = VecU8_new()};
MargaretMemFreeSpaceManager res = {.set_present = VecU8_new_zeroinit(1)};
res.set_present.buf[0] = 3;
for (U8 algn = 0; algn < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP; algn++)
res.free_space_in_memory[algn] = None_BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment();
res.free_space_in_memory[3] = Some_BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment(
BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_new_reserved(3, 1));
return res;
}
@ -488,6 +500,62 @@ void MargaretMemFreeSpaceManager_drop(MargaretMemFreeSpaceManager self){
VecU8_drop(self.set_present);
}
void MargaretMemFreeSpaceManager_sink(MargaretMemFreeSpaceManager* self){
for (U8 ae = 0; ae < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP; ae++)
if (self->free_space_in_memory[ae].variant == Option_Some)
BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_sink(&self->free_space_in_memory[ae].some);
}
void MargaretMemFreeSpaceManager_erase(MargaretMemFreeSpaceManager* man, U64 start, U64 len, U32 dev_mem_block){
if (len == 0)
return;
assert(man->set_present.len > 0);
for (size_t aj = 0; aj < man->set_present.len; aj++) {
U8 alignment = man->set_present.buf[aj];
assert(alignment < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP);
assert(man->free_space_in_memory[alignment].variant == Option_Some);
bool eret = BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_erase(&
man->free_space_in_memory[alignment].some, &(MargaretFreeMemSegment){start, len, dev_mem_block});
assert(eret);
}
}
void MargaretMemFreeSpaceManager_insert(MargaretMemFreeSpaceManager* man, U64 start, U64 len, U32 dev_mem_block){
assert(len > 0);
assert(man->set_present.len > 0); /* MargaretMemFreeSpaceManager will do that for us with 2^3 */
for (size_t aj = 0; aj < man->set_present.len; aj++) {
U8 alignment = man->set_present.buf[aj];
assert(alignment < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP);
assert(man->free_space_in_memory[alignment].variant == Option_Some);
bool iret = BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_insert(&
man->free_space_in_memory[alignment].some, (MargaretFreeMemSegment){start, len, dev_mem_block});
assert(iret);
}
}
OptionMargaretFreeMemSegment MargaretMemFreeSpaceManager_search(MargaretMemFreeSpaceManager* man, U64 len, U8 alignment_exp) {
check(alignment_exp < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP);
if (man->free_space_in_memory[alignment_exp].variant == Option_None) {
assert(man->set_present.len > 0);
assert(man->free_space_in_memory[man->set_present.buf[0]].variant == Option_Some);
BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment* have = &man->free_space_in_memory[man->set_present.buf[0]].some;
man->free_space_in_memory[alignment_exp] = Some_BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment(
BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_new_reserved(alignment_exp, have->el.len));
for (size_t i = 0; i < have->el.len; i++) {
// MargaretFreeMemSegment
BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_insert(&man->free_space_in_memory[alignment_exp].some,
*VecMargaretFreeMemSegment_at(&have->el, i));
}
}
assert(man->free_space_in_memory[alignment_exp].variant == Option_Some);
U64 sit = BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_find_min_grtr_or_eq(&man->free_space_in_memory[alignment_exp].some,
&(MargaretFreeMemSegment){.start = 0, .len = len, .dev_mem_block = 0});
if (sit == 0)
return None_MargaretFreeMemSegment();
return Some_MargaretFreeMemSegment(*BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_at_iter(
&man->free_space_in_memory[alignment_exp].some, sit));
}
typedef struct {
VkBufferUsageFlags usage;
U8 inner_alignment_exp;
@ -501,6 +569,22 @@ void MargaretBufferKindInfo_drop(MargaretBufferKindInfo self){
BuffRBTreeByLen_SetMargaretFreeMemSegment_drop(self.free_space_inside_buffers);
}
void MargaretBufferKindInfo_erase_free_space(MargaretBufferKindInfo* self, U64 start, U64 len, U32 dev_mem_block){
if (len == 0)
return;
bool eret = BuffRBTreeByLen_SetMargaretFreeMemSegment_erase(&self->free_space_inside_buffers,
&(MargaretFreeMemSegment){.start = start, .len = len, .dev_mem_block = dev_mem_block});
assert(eret);
}
void MargaretBufferKindInfo_insert_free_space(MargaretBufferKindInfo* self, U64 start, U64 len, U32 dev_mem_block){
if (len == 0)
return;
bool iret = BuffRBTreeByLen_SetMargaretFreeMemSegment_insert(&self->free_space_inside_buffers,
(MargaretFreeMemSegment){start, len, dev_mem_block});
assert(iret);
}
#include "../../../gen/l1/eve/margaret/VecMargaretBufferKindInfo.h"
/* VkDevice and VkPhysicalDevice stay remembered here. Don't forget that, please */
@ -557,111 +641,115 @@ MargaretMemAllocator MargaretMemAllocator_new(
return self;
}
void MargaretMemAllocator__erase_free_space_for_subbufers(
BuffRBTreeByLen_SetMargaretFreeMemSegment* free_space_inside_buffers, U64 start, U64 len, U32 dev_mem_block
){
if (len > 0) {
bool eret = BuffRBTreeByLen_SetMargaretFreeMemSegment_erase(free_space_inside_buffers, &(MargaretFreeMemSegment){
.start = start, .len = len, .dev_mem_block = dev_mem_block});
assert(eret);
}
void MargaretMemAllocator__sink_memory(MargaretMemAllocator* self){
for (size_t i = 0; i < self->buffer_types.len; i++)
BuffRBTreeByLen_SetMargaretFreeMemSegment_sink(&self->buffer_types.buf[i].free_space_inside_buffers);
MargaretMemFreeSpaceManager_sink(&self->mem_free_space);
}
/* Not in a dedicated buffer, just in general memory */
void MargaretMemAllocator__erase_free_space_in_memory(
MargaretMemAllocator* self, U64 start, U64 len, U32 dev_mem_block){
if (len == 0)
return;
MargaretMemFreeSpaceManager* man = &self->mem_free_space;
assert(man->set_present.len > 0);
for (size_t aj = 0; aj < man->set_present.len; aj++) {
U8 alignment = man->set_present.buf[aj];
assert(alignment < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP);
assert(man->free_space_in_memory[alignment].variant == Option_Some);
bool eret = BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_erase(&
man->free_space_in_memory[alignment].some, &(MargaretFreeMemSegment){start, len, dev_mem_block});
assert(eret);
U64Segment MargaretMemAllocatorOneBlock_get_left_free_space(const MargaretMemAllocatorOneBlock* self, U64 occ_it){
U64 occ_start;
const MargaretMemoryOccupation* occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_at_iter(&self->occupied_memory, occ_it, &occ_start, &occ);
U64 prev_occ_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find_prev(&self->occupied_memory, occ_it);
if (prev_occ_it != 0) {
U64 prev_occ_start;
const MargaretMemoryOccupation* prev_occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_at_iter(&self->occupied_memory, prev_occ_it, &prev_occ_start, &prev_occ);
assert(prev_occ_start + prev_occ->taken_size <= occ_start);
return (U64Segment){.start = prev_occ_start + prev_occ->taken_size, .len = occ_start - (prev_occ_start + prev_occ->taken_size)};
}
return (U64Segment){.start = 0, .len = occ_start};
}
void MargaretMemAllocator__insert_free_space_in_memory(
MargaretMemAllocator* self, U64 start, U64 len, U32 dev_mem_block){
assert(len > 0);
MargaretMemFreeSpaceManager* man = &self->mem_free_space;
assert(man->set_present.len > 0); /* MargaretMemFreeSpaceManager will do that for us with 2^3 */
for (size_t aj = 0; aj < man->set_present.len; aj++) {
U8 alignment = man->set_present.buf[aj];
assert(alignment < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP);
assert(man->free_space_in_memory[alignment].variant == Option_Some);
bool iret = BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_insert(&
man->free_space_in_memory[alignment].some, (MargaretFreeMemSegment){start, len, dev_mem_block});
assert(iret);
}
}
U64Segment MargaretMemAllocatorOneBlock_get_right_free_space(const MargaretMemAllocatorOneBlock* self, U64 occ_it){
U64 occ_start;
const MargaretMemoryOccupation* occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_at_iter(&self->occupied_memory, occ_it, &occ_start, &occ);
// void MargaretFreeMemSegment
U64 next_occ_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find_next(&self->occupied_memory, occ_it);
if (next_occ_it != 0) {
U64 next_occ_start;
const MargaretMemoryOccupation* next_occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_at_iter(&self->occupied_memory, next_occ_it, &next_occ_start, &next_occ);
assert(occ_start + occ->taken_size <= next_occ_start);
return (U64Segment){.start = occ_start + occ->taken_size, .len = next_occ_start - (occ_start + occ->taken_size)};
}
return (U64Segment){.start = occ_start + occ->taken_size, .len = self->length - (occ_start + occ->taken_size)};
}
/* If mem occupant in question is VkBuffer, it won't delete anything from the set of available free mem segments
* for that buffer kindred. It is your job to remove free buffer subsegments from this set*/
void MargaretMemAllocator__get_rid_of_memory_occupant(
MargaretMemAllocator* self, U32 mem_block_id, U64 occ_it){
MargaretMemAllocatorOneBlock* block = VecMargaretMemAllocatorOneBlock_mat(&self->blocks, mem_block_id);
assert(0 < occ_it && occ_it < block->occupied_memory.tree.len);
U64 occ_start = block->occupied_memory.el.buf[occ_it - 1].key;
/* We are doing silent pop. But right now we only copied buf occ, we will pop it silently at the end */
MargaretMemoryOccupation occ = block->occupied_memory.el.buf[occ_it - 1].value;
U64 occ_start;
const MargaretMemoryOccupation* occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_at_iter(&block->occupied_memory, occ_it, &occ_start, &occ);
/* Updating block usage counter */
block->occupation_counter -= occ.taken_size;
block->occupation_counter -= occ->taken_size;
U64 left_free_space_start, left_free_space_length;
U64Segment left_free_space = MargaretMemAllocatorOneBlock_get_left_free_space(block, occ_it);
U64Segment right_free_space = MargaretMemAllocatorOneBlock_get_right_free_space(block, occ_it);
U64 prev_occ_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find_prev(&block->occupied_memory, occ_it);
if (prev_occ_it != 0) {
U64 prev_occ_start;
const MargaretMemoryOccupation* prev_occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_at_iter(&block->occupied_memory, prev_occ_it, &prev_occ_start, &prev_occ);
assert(prev_occ_start + prev_occ->taken_size <= occ_start);
left_free_space_start = prev_occ_start + prev_occ->taken_size;
left_free_space_length = occ_start - (prev_occ_start + prev_occ->taken_size);
} else {
left_free_space_start = 0;
left_free_space_length = occ_start;
}
U64 right_free_space_start, right_free_space_length;
U64 next_occ_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find_next(&block->occupied_memory, occ_it);
if (next_occ_it != 0) {
U64 next_occ_start;
const MargaretMemoryOccupation* next_occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_at_iter(&block->occupied_memory, next_occ_it, &next_occ_start, &next_occ);
assert(occ_start + occ.taken_size <= next_occ_start);
right_free_space_start = occ_start + occ.taken_size;
right_free_space_length = next_occ_start - (occ_start + occ.taken_size);
} else {
right_free_space_start = occ_start + occ.taken_size;
right_free_space_length = block->length - (occ_start + occ.taken_size);
if (occ->variant == MargaretMemoryOccupation_Buffer) {
vkDestroyBuffer(self->device, occ->buf.buffer, NULL);
} else if (occ->variant == MargaretMemoryOccupation_Image) {
vkDestroyImage(self->device, occ->img.image, NULL);
}
/* All these iterators and pointers will get invalidated */
MargaretMemoryOccupation_drop(*occ); // yes, this is illegal, but it works. Don't do it again, please
BuffRBTree_MapU64ToMargaretMemoryOccupation_empty_index_erase(&block->occupied_memory, occ_it);
/* All these iterators and pointers just got invalidated */
if (occ.variant == MargaretMemoryOccupation_Buffer) {
vkDestroyBuffer(self->device, occ.buf.buffer, NULL);
} else {
vkDestroyImage(self->device, occ.img.image, NULL);
MargaretMemFreeSpaceManager_erase(&self->mem_free_space, left_free_space.start, left_free_space.len, mem_block_id);
MargaretMemFreeSpaceManager_erase(&self->mem_free_space, right_free_space.start, right_free_space.len, mem_block_id);
MargaretMemFreeSpaceManager_insert(&self->mem_free_space, left_free_space.start,
right_free_space.start + right_free_space.len - left_free_space.start, mem_block_id);
}
/* Given a subbuffer inside given buffer, returns segment of free space on the left */
U64Segment MargaretMemoryOccupationBuffer_get_left_free_space(
MargaretMemoryOccupationBuffer* buf, U64 subbuf_it){
U64 subbuf_start;
const MargaretBufferOccupationSubBuffer* subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_at_iter(&buf->subbuffers, subbuf_it, &subbuf_start, &subbuf);
assert(subbuf_start + subbuf->length <= buf->capacity);
U64 prev_subbuf_it = BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_find_prev(&buf->subbuffers, subbuf_it);
if (prev_subbuf_it != 0) {
U64 prev_subbuf_start;
const MargaretBufferOccupationSubBuffer* prev_subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_at_iter(&buf->subbuffers, prev_subbuf_it, &prev_subbuf_start, &prev_subbuf);
assert(prev_subbuf_start + prev_subbuf->length <= subbuf_start);
return (U64Segment){.start = prev_subbuf_start + prev_subbuf->length, .len = subbuf_start - (prev_subbuf_start + prev_subbuf->length)};
}
MargaretMemoryOccupation_drop(occ);
/* Occ is out of the game. And invalidated */
return (U64Segment){.start = 0, .len = subbuf_start};
}
MargaretMemAllocator__erase_free_space_in_memory(self, left_free_space_start, left_free_space_length, mem_block_id);
MargaretMemAllocator__erase_free_space_in_memory(self, right_free_space_start, right_free_space_length, mem_block_id);
/* Given a subbuffer inside this buffer, return segment of free space on the right */
U64Segment MargaretMemoryOccupationBuffer_get_right_free_space(
MargaretMemoryOccupationBuffer* buf, U64 subbuf_it){
U64 subbuf_start;
const MargaretBufferOccupationSubBuffer* subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_at_iter(&buf->subbuffers, subbuf_it, &subbuf_start, &subbuf);
assert(subbuf_start + subbuf->length <= buf->capacity);
U64 LEN = right_free_space_start + right_free_space_length - left_free_space_start;
MargaretMemAllocator__insert_free_space_in_memory(self, left_free_space_start, LEN, mem_block_id);
U64 next_subbuf_it = BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_find_next(&buf->subbuffers, subbuf_it);
if (next_subbuf_it != 0) {
U64 next_subbuf_start;
const MargaretBufferOccupationSubBuffer* next_subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_at_iter(&buf->subbuffers, next_subbuf_it, &next_subbuf_start, &next_subbuf);
assert(subbuf_start + subbuf->length <= next_subbuf_start);
return (U64Segment){.start = subbuf_start + subbuf->length, .len = next_subbuf_start - (subbuf_start + subbuf->length)};
}
return (U64Segment){.start = subbuf_start + subbuf->length, .len = buf->capacity - (subbuf_start + subbuf->length)};
}
/* Don't forget that all the iterators for MMA maps for buffers and sets for free space just got invalidated */
@ -684,57 +772,27 @@ void MargaretMemAllocator__get_rid_of_sub_buffer(MargaretMemAllocator* self, U32
assert(subbuf_start == start - occ_start);
assert(subbuf_start + subbuf->length <= buf->capacity);
assert(start + subbuf->length <= occ_start + buf->capacity);
kindred->total_occupation -= subbuf->length;
kindred->total_occupation -= subbuf->length; // todo: give a thought to memory counting
U64 left_free_space_start, left_free_space_length;
U64 prev_subbuf_it = BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_find_prev(&buf->subbuffers, subbuf_it);
if (prev_subbuf_it != 0) {
U64 prev_subbuf_start;
const MargaretBufferOccupationSubBuffer* prev_subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_at_iter(&buf->subbuffers, prev_subbuf_it, &prev_subbuf_start, &prev_subbuf);
assert(prev_subbuf_start + prev_subbuf->length <= subbuf_start);
left_free_space_start = prev_subbuf_start + prev_subbuf->length;
left_free_space_length = subbuf_start - (prev_subbuf_start + prev_subbuf->length);
} else {
left_free_space_start = 0;
left_free_space_length = subbuf_start;
}
U64 right_free_space_start, right_free_space_length;
U64 next_subbuf_it = BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_find_next(&buf->subbuffers, subbuf_it);
if (next_subbuf_it != 0) {
U64 next_subbuf_start;
const MargaretBufferOccupationSubBuffer* next_subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_at_iter(&buf->subbuffers, next_subbuf_it, &next_subbuf_start, &next_subbuf);
assert(subbuf_start + subbuf->length <= next_subbuf_start);
right_free_space_start = subbuf_start + subbuf->length;
right_free_space_length = next_subbuf_start - (subbuf_start + subbuf->length);
} else {
right_free_space_start = subbuf_start + subbuf->length;
right_free_space_length = buf->capacity - (subbuf_start + subbuf->length);
}
U64Segment left_free_space = MargaretMemoryOccupationBuffer_get_left_free_space(buf, subbuf_it);
U64Segment right_free_space = MargaretMemoryOccupationBuffer_get_right_free_space(buf, subbuf_it);
/* all these iterators and pointers will get invalidated */
/* Because MargaretBufferOccupationSubBuffer is primitive, we don't need to drop it before erasing */
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_empty_index_erase(&buf->subbuffers, subbuf_it);
/* all these iterators and pointers just got invalidated */
MargaretMemAllocator__erase_free_space_for_subbufers(&kindred->free_space_inside_buffers,
left_free_space_start, left_free_space_length, mem_block_id);
MargaretMemAllocator__erase_free_space_for_subbufers(&kindred->free_space_inside_buffers,
right_free_space_start, right_free_space_length, mem_block_id);
MargaretBufferKindInfo_erase_free_space(kindred, left_free_space.start, left_free_space.len, mem_block_id);
MargaretBufferKindInfo_erase_free_space(kindred, right_free_space.start, right_free_space.len, mem_block_id);
if (buf->subbuffers.el.len == 0) {
/* We don't actually need to add the BIG free memory segment because we are already deleting the whole buffer */
MargaretMemAllocator__get_rid_of_memory_occupant(self, mem_block_id, occ_it);
} else {
U64 LEN = right_free_space_start + right_free_space_length - left_free_space_start;
U64 LEN = right_free_space.start + right_free_space.len - left_free_space.start;
assert(LEN > 0);
bool iret = BuffRBTreeByLen_SetMargaretFreeMemSegment_insert(&kindred->free_space_inside_buffers,
(MargaretFreeMemSegment){.start = left_free_space_start, .len = LEN, .dev_mem_block = mem_block_id});
assert(iret);
MargaretBufferKindInfo_insert_free_space(kindred, left_free_space.start, LEN, mem_block_id);
}
}
@ -780,6 +838,7 @@ MargaretMemAllocatorDemands MargaretMemAllocator_carry_out_request(
MargaretMemAllocator* self, VkCommandBuffer cmd_buff, MargaretMemAllocatorRequest* request
){
MargaretMemAllocator_wipe_old(self);
assert(request->expand_alloc_buffer.len == self->buffer_types.len);
for (size_t i = 0; i < request->free_subbuffer.len; i++) {
MargaretMemAllocatorSubBufferPosition* req = request->free_subbuffer.buf[i];
MargaretMemAllocator__get_rid_of_sub_buffer(self, req->memory_allocation_id, req->offset_in_device_memory_nubble);
@ -790,11 +849,180 @@ MargaretMemAllocatorDemands MargaretMemAllocator_carry_out_request(
U64 occ_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find(&block->occupied_memory, req->offset_in_device_memory_nubble);
MargaretMemAllocator__get_rid_of_memory_occupant(self, req->memory_allocation_id, occ_it);
}
/* We iterate even over those buffer kinds, that we don't use. It is okay, there won't be alot of buffer kinds
* ,and we tend to use all of them */
for (U16 bk = 0; bk < (U16)self->buffer_types.len; bk++) {
MargaretBufferKindInfo* kindred = &self->buffer_types.buf[bk];
// todo: fix alignment issues here too
for (size_t shr = 0; shr < request->shrink_subbuffer.len; shr++) {
U64 new_size = request->shrink_subbuffer.buf[shr].new_size;
assert(new_size > 0);
MargaretMemAllocatorSubBufferPosition* ans = request->shrink_subbuffer.buf[shr].prev_ans;
MargaretMemAllocatorOneBlock* block = VecMargaretMemAllocatorOneBlock_mat(&self->blocks, ans->memory_allocation_id);
OptionRefMutMargaretMemoryOccupation Oocc = BuffRBTree_MapU64ToMargaretMemoryOccupation_mat(&block->occupied_memory,
ans->offset_in_device_memory_nubble - ans->offset);
MargaretMemoryOccupation* occ = OptionRefMutMargaretMemoryOccupation_expect(Oocc);
assert(occ->variant == MargaretMemoryOccupation_Buffer);
MargaretMemoryOccupationBuffer* buf = &occ->buf;
MargaretBufferKindInfo* kindred = VecMargaretBufferKindInfo_mat(&self->buffer_types, buf->kind);
U64 subbuf_it = BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_find(&buf->subbuffers, ans->offset);
U64 subbuf_start;
MargaretBufferOccupationSubBuffer* subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_mat_iter(&buf->subbuffers, subbuf_it, &subbuf_start, &subbuf);
assert(new_size <= subbuf->length);
if (new_size == subbuf->length)
continue;
U64Segment right_free_space = MargaretMemoryOccupationBuffer_get_right_free_space(buf, subbuf_it);
MargaretBufferKindInfo_erase_free_space(kindred, right_free_space.start, right_free_space.len, ans->memory_allocation_id);
MargaretBufferKindInfo_insert_free_space(kindred, right_free_space.start - (subbuf->length - new_size),
right_free_space.len + (subbuf->length - new_size), ans->memory_allocation_id);
subbuf->length = new_size;
}
VkPhysicalDeviceMaintenance4Properties maintenance4_properties = {
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES,
};
VkPhysicalDeviceMaintenance3Properties maintenance3_properties = {
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
.pNext = &maintenance4_properties
};
VkPhysicalDeviceProperties2 properties = {
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
.pNext = &maintenance3_properties,
};
vkGetPhysicalDeviceProperties2(self->physical_device, &properties);
// todo: resize requests for buffer kinds with preserve_at_quiet = false is a ridiculous thing, ban it
// todo: fix alignment on all buffer requests
VecMargaretOldBufferResizeRecord old_buf_resize_record = VecMargaretOldBufferResizeRecord_new();
/* We iterate even over those buffer kinds, that we don't use. It is okay, there won't be alot of buffer kinds
* + and we tend to use all of them */
for (U16 bk = 0; bk < (U16)self->buffer_types.len;) {
MargaretBufferKindInfo* kindred = &self->buffer_types.buf[bk];
MargaretMemAllocatorRequestsForCertainBufferKindAllocation* buf_requests = &request->expand_alloc_buffer.buf[bk];
/* We first try to do all the resize requests, that COULD be done using method 1 and 2. */
for (U64 rr = 0; rr < buf_requests->expand.len;) {
U64 new_size = buf_requests->expand.buf[rr].new_size;
MargaretMemAllocatorSubBufferPosition* ans = buf_requests->expand.buf[rr].prev_ans;
MargaretMemAllocatorOneBlock* block = VecMargaretMemAllocatorOneBlock_mat(&self->blocks, ans->memory_allocation_id);
U64 occ_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find(&block->occupied_memory,
ans->offset_in_device_memory_nubble - ans->offset);
U64 occ_start;
MargaretMemoryOccupation* occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_mat_iter(&block->occupied_memory, occ_it, &occ_start, &occ);
assert(occ->variant == MargaretMemoryOccupation_Buffer);
MargaretMemoryOccupationBuffer* buf = &occ->buf;
assert(ans->offset_in_device_memory_nubble == occ_start + ans->offset);
U64 subbuf_it = BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_find(&buf->subbuffers, ans->offset);
U64 subbuf_start;
MargaretBufferOccupationSubBuffer* subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_mat_iter(&buf->subbuffers, subbuf_it, &subbuf_start, &subbuf);
assert(subbuf->length <= new_size);
/* Method 1 */
U64Segment right_free_space = MargaretMemoryOccupationBuffer_get_right_free_space(buf, subbuf_it);
if (new_size - subbuf->length <= right_free_space.len) {
MargaretBufferKindInfo_erase_free_space(kindred, right_free_space.start, right_free_space.len, ans->memory_allocation_id);
MargaretBufferKindInfo_insert_free_space(kindred, right_free_space.start + (new_size - subbuf->length),
right_free_space.len - (new_size - subbuf->length), ans->memory_allocation_id);
VecMargaretOldBufferResizeRecord_append(&old_buf_resize_record, (MargaretOldBufferResizeRecord){
.old_mem_block_id = ans->memory_allocation_id, .old_start = ans->offset, .old_len = subbuf->length,
.new_mem_block_id = ans->memory_allocation_id, .new_start = ans->offset, .new_len = new_size});
subbuf->length = new_size; /* Success */
VecMargaretMemAllocatorRequestResizeSubBuffer_unordered_pop(&buf_requests->expand, rr);
continue;
}
/* Method 2 */
U64Segment RIGHT_FREE_SPACE = MargaretMemAllocatorOneBlock_get_right_free_space(block, occ_it);
if (
(RIGHT_FREE_SPACE.start + RIGHT_FREE_SPACE.len < buf->capacity) ||
(ans->offset_in_device_memory_nubble + new_size > RIGHT_FREE_SPACE.start + RIGHT_FREE_SPACE.len) ||
(subbuf_start + new_size > maintenance4_properties.maxBufferSize)) {
rr++;
continue;
}
VkBuffer temp_buf_extension;
VkBufferCreateInfo temp_buf_extension_crinfo = {
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
.size = subbuf_start + new_size,
.usage = kindred->usage,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
};
if (vkCreateBuffer(self->device, &temp_buf_extension_crinfo, NULL, &temp_buf_extension) != VK_SUCCESS)
abortf("vkCreateBuffer");
VkMemoryRequirements temp_buf_extension_req;
vkGetBufferMemoryRequirements(self->device, temp_buf_extension, &temp_buf_extension_req);
check(U64_is_2pow(temp_buf_extension_req.alignment));
check((temp_buf_extension_req.memoryTypeBits & (1ull << self->memory_type_id)) > 0)
if ((occ_start + temp_buf_extension_req.size > RIGHT_FREE_SPACE.start + RIGHT_FREE_SPACE.len) ||
((occ_start & (temp_buf_extension_req.alignment)) != 0)
){
vkDestroyBuffer(self->device, temp_buf_extension, NULL);
rr++;
continue;
}
MargaretBufferKindInfo_erase_free_space(kindred, right_free_space.start, right_free_space.len, ans->memory_allocation_id);
MargaretMemFreeSpaceManager_erase(&self->mem_free_space, RIGHT_FREE_SPACE.start, RIGHT_FREE_SPACE.len, ans->memory_allocation_id);
MargaretMemFreeSpaceManager_insert(&self->mem_free_space,
occ_start + temp_buf_extension_req.size,
RIGHT_FREE_SPACE.start + RIGHT_FREE_SPACE.len - (occ_start + temp_buf_extension_req.size),
ans->memory_allocation_id);
VecMargaretOldBufferResizeRecord_append(&old_buf_resize_record, (MargaretOldBufferResizeRecord){
.old_mem_block_id = ans->memory_allocation_id, .old_start = ans->offset, .old_len = subbuf->length,
.new_mem_block_id = ans->memory_allocation_id, .new_start = ans->offset, .new_len = new_size});
/* Success */
subbuf->length = new_size;
occ->taken_size = temp_buf_extension_req.size;
buf->capacity = subbuf_start + new_size;
vkDestroyBuffer(self->device, buf->buffer, NULL);
buf->buffer = temp_buf_extension;
/* TODO: write RBTree_Map, rewrite the whole MMA to use normal RBTree */
for (size_t si = 0; si < buf->subbuffers.el.len; buf++) {
buf->subbuffers.el.buf[si].value.ans->buffer = temp_buf_extension;
}
VecMargaretMemAllocatorRequestResizeSubBuffer_unordered_pop(&buf_requests->expand, rr);
}
}
VecMargaretResizeToNascentRecord to_nascent = VecMargaretResizeToNascentRecord_new();
VkBuffer wart_hand = VK_NULL_HANDLE;
U64 wart_capacity = 0;
VkMemoryRequirements wart_mem_req; /* undefined when wart_hand is 0 */
for (U16 bk = 0; bk < (U16)self->buffer_types.len;) {
MargaretBufferKindInfo* kindred = &self->buffer_types.buf[bk];
MargaretMemAllocatorRequestsForCertainBufferKindAllocation* buf_requests = &request->expand_alloc_buffer.buf[bk];
/* We tried methods 1, 2, now we start with method 3 and if it fails we do defragmentation */
for (U64 rr = 0; rr < buf_requests->expand.len;) {
U64 new_size = buf_requests->expand.buf[rr].new_size;
MargaretMemAllocatorSubBufferPosition* ans = buf_requests->expand.buf[rr].prev_ans;
MargaretMemAllocatorOneBlock* block = VecMargaretMemAllocatorOneBlock_mat(&self->blocks, ans->memory_allocation_id);
U64 occ_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find(&block->occupied_memory,
ans->offset_in_device_memory_nubble - ans->offset);
U64 occ_start;
MargaretMemoryOccupation* occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_mat_iter(&block->occupied_memory, occ_it, &occ_start, &occ);
assert(occ->variant == MargaretMemoryOccupation_Buffer);
MargaretMemoryOccupationBuffer* buf = &occ->buf;
assert(ans->offset_in_device_memory_nubble == occ_start + ans->offset);
U64 subbuf_it = BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_find(&buf->subbuffers, ans->offset);
U64 subbuf_start;
MargaretBufferOccupationSubBuffer* subbuf;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_mat_iter(&buf->subbuffers, subbuf_it, &subbuf_start, &subbuf);
assert(subbuf->length <= new_size);
for (int attempt = 0; attempt < 2; attempt++) {
U64 temp_ext_wart_capacity = wart_capacity + new_size;
}
}
}
return 0;
}

View File

@ -287,15 +287,14 @@ NODISCARD VecU8 margaret_stringify_device_memory_properties_2(VkPhysicalDevice p
VkPhysicalDeviceMaintenance3Properties maintenance3_properties = {
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
};
VkPhysicalDeviceMaintenance4PropertiesKHR maintenance4_properties = {
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR,
VkPhysicalDeviceMaintenance4Properties maintenance4_properties = {
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES,
.pNext = &maintenance3_properties,
};
VkPhysicalDeviceProperties2 properties = {
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
.pNext = &maintenance4_properties,
};
/* Needs VK_KHR_maintenance4 */
vkGetPhysicalDeviceProperties2(physical_device, &properties);
U64 maxBufferSize = maintenance4_properties.maxBufferSize;
U64 maxMemoryAllocationCount = properties.properties.limits.maxMemoryAllocationCount;

View File

@ -5,17 +5,17 @@
#include "../../../l1/system/fsmanip.h"
#include "../../../l1/system/creating_child_proc.h"
void check_structure_h_dfs(const VecRBTreeNode* tree, U64 x, VecU8* f){
void check_structure_h_dfs(const VecBufRBTreeNode* tree, U64 x, VecU8* f){
if (x == 0)
return;
if (*VecU8_at(f, x - 1) != 0)
check(false);
*VecU8_mat(f, x - 1) = 1;
check_structure_h_dfs(tree, VecRBTreeNode_at(tree, x)->left, f);
check_structure_h_dfs(tree, VecRBTreeNode_at(tree, x)->right, f);
check_structure_h_dfs(tree, VecBufRBTreeNode_at(tree, x)->left, f);
check_structure_h_dfs(tree, VecBufRBTreeNode_at(tree, x)->right, f);
}
U32 check_structure_h_dfs_2(RBTreeNode* tree, U64 x){
U32 check_structure_h_dfs_2(BufRBTreeNode* tree, U64 x){
if (x == 0) {
return 0;
}
@ -420,10 +420,10 @@ void chaos(){
void triple_black(){
BuffRBTree_SetS64 set;
{
VecRBTreeNode tree = VecRBTreeNode_new_zeroinit(4);
tree.buf[1] = (RBTreeNode){.parent = 3};
tree.buf[2] = (RBTreeNode){.parent = 3};
tree.buf[3] = (RBTreeNode){.left = 2, .right = 1};
VecBufRBTreeNode tree = VecBufRBTreeNode_new_zeroinit(4);
tree.buf[1] = (BufRBTreeNode){.parent = 3};
tree.buf[2] = (BufRBTreeNode){.parent = 3};
tree.buf[3] = (BufRBTreeNode){.left = 2, .right = 1};
VecS64 el = VecS64_new_reserved(3);
VecS64_append(&el, 300);
VecS64_append(&el, 100);

View File

@ -0,0 +1,262 @@
#include "../../../../gen/l1_5/RBTree_SetS64.h"
#include "../../../../gen/l1/VecAndSpan_U8.h"
#include "../../../../gen/l1/VecAndSpan_S64.h"
#include "../../../l1/core/VecU8_as_str.h"
#include "../../../l1/system/fileio.h"
#include "../../../l1/system/fsmanip.h"
#include "../../../l1/system/creating_child_proc.h"
typedef RBTreeNode_S64* RefRBTreeNode_S64;
#include "../../../../gen/l1/eve/ds_test/VecRefRBTreeNode_S64.h"
S64 min_key_in_subtree(RBTreeNode_S64* x, RBTreeNode* NIL){
check(x != NULL && &x->base != NIL);
S64 ans = x->key;
if (x->base.left != NIL)
ans = MIN_S64(ans, min_key_in_subtree((RBTreeNode_S64*)x->base.left, NIL));
if (x->base.right != NIL)
ans = MIN_S64(ans, min_key_in_subtree((RBTreeNode_S64*)x->base.right, NIL));
return ans;
}
S64 max_key_in_subtree(RBTreeNode_S64* x, RBTreeNode* NIL){
check(x != NULL && &x->base != NIL);
S64 ans = x->key;
if (x->base.left != NIL)
ans = MAX_S64(ans, max_key_in_subtree((RBTreeNode_S64*)x->base.left, NIL));
if (x->base.right != NIL)
ans = MAX_S64(ans, max_key_in_subtree((RBTreeNode_S64*)x->base.right, NIL));
return ans;
}
int check_structure_h_dfs(RBTree_SetS64* set, RBTreeNode_S64* x){
check(x != NULL);
if (&x->base == set->NIL)
return 0;
int a = x->base.left == set->NIL ? 0 : check_structure_h_dfs(set, (RBTreeNode_S64*)x->base.left);
int b = x->base.right == set->NIL ? 0 : check_structure_h_dfs(set, (RBTreeNode_S64*)x->base.right);
check(a == b);
if (&x->base == set->root) {
check(x->base.parent == set->NIL);
} else {
check(x->base.parent != set->NIL);
}
if (x->base.left != set->NIL)
check(x->base.left->parent == &x->base);
if (x->base.right != set->NIL)
check(x->base.right->parent == &x->base);
if (x->base.left != set->NIL)
check(max_key_in_subtree((RBTreeNode_S64*)x->base.left, set->NIL) < x->key);
if (x->base.right != set->NIL)
check(x->key < min_key_in_subtree((RBTreeNode_S64*)x->base.right, set->NIL));
if (x->base.color == RBTREE_RED) {
check(x->base.left->color == RBTREE_BLACK);
check(x->base.right->color == RBTREE_BLACK);
}
return a + (x->base.color == RBTREE_BLACK ? 1 : 0);
}
void check_correctness(RBTree_SetS64* set){
check(set->root->color == RBTREE_BLACK);
check_structure_h_dfs(set, (RBTreeNode_S64*)set->root);
}
void save_tree_to_file(const RBTree_SetS64* set, SpanU8 name){
// check_only_structure(set);
VecU8 graph = vcstr(
"digraph rbtree {\n"
" fontsize = 20\n"
" rankdir = TB\n"
" bgcolor = \"lightgray\"\n"
" node [fontname = \"Arial\", style=\"rounded,filled\", shape=circle];\n\n");
VecRefRBTreeNode_S64 bfs_nxt = VecRefRBTreeNode_S64_new();
VecRefRBTreeNode_S64 bfs = VecRefRBTreeNode_S64_new();
if (set->root != set->NIL)
VecRefRBTreeNode_S64_append(&bfs_nxt, (RBTreeNode_S64*)set->root);
while (bfs_nxt.len > 0) {
VecRefRBTreeNode_S64 t = bfs_nxt;
bfs_nxt = bfs;
bfs = t;
for (size_t j = 0; j < bfs.len; j++) {
RBTreeNode_S64* cur = *VecRefRBTreeNode_S64_at(&bfs, j);
assert(cur != NULL);
if (cur->base.left != set->NIL)
VecRefRBTreeNode_S64_append(&bfs, (RBTreeNode_S64*)cur->base.left);
if (cur->base.right != set->NIL)
VecRefRBTreeNode_S64_append(&bfs, (RBTreeNode_S64*)cur->base.right);
VecU8_append_vec(&graph, VecU8_fmt(
" v%i [label=%i, color=%s, fontcolor=%s]\n",
cur->key, cur->key,
cur->base.color == RBTREE_BLACK ? cstr("black") : cstr("red"),
cur->base.color == RBTREE_BLACK ? cstr("white") : cstr("black")));
if (cur->base.left != set->NIL)
VecU8_append_vec(&graph, VecU8_fmt(" v%i -> v%i [label=left]\n",
cur->key, ((RBTreeNode_S64*)cur->base.left)->key));
if (cur->base.right != set->NIL)
VecU8_append_vec(&graph, VecU8_fmt(" v%i -> v%i [label=right]\n",
cur->key, ((RBTreeNode_S64*)cur->base.right)->key));
}
bfs.len = 0;
}
VecU8_append_span(&graph, cstr("}\n"));
mkdir_nofail("GRAPHS");
VecU8 dot_filename_nt = VecU8_fmt("GRAPHS/GRAPH_%s.gv%c", name, 0);
write_whole_file_or_abort((CSTR)dot_filename_nt.buf, VecU8_to_span(&graph));
VecU8_drop(graph);
VecU8_drop(dot_filename_nt);
VecU8 command_nt = VecU8_fmt("dot -Tpng GRAPHS/GRAPH_%s.gv -o GRAPHS/GRAPH_%s.png%c", name, name, 0);
calling_system_func_nofail((CSTR)command_nt.buf);
VecU8_drop(command_nt);
}
void insert_only(){
{
RBTree_SetS64 set = RBTree_SetS64_new();
check_correctness(&set);
RBTree_SetS64_drop(set);
}
{
RBTree_SetS64 set = RBTree_SetS64_new();
for (S64 p = 0; p < 100; p++) {
bool ret = RBTree_SetS64_insert(&set, 2 * p);
check(ret);
check_correctness(&set);
// save_tree_to_file(&set, cstr("last_correct"));
for (S64 i = 0; i <= p; i++) {
RBTreeNode_S64* ret1 = RBTree_SetS64_find(&set, 2 * i);
check(ret1 != NULL);
RBTreeNode_S64* ret2 = RBTree_SetS64_find(&set, 2 * i + 1);
check(ret2 == NULL);
}
}
RBTree_SetS64_drop(set);
}
{
RBTree_SetS64 set = RBTree_SetS64_new();
for (S64 p = -99; p <= 0; p++) {
bool ret = RBTree_SetS64_insert(&set, 2 * p);
check(ret);
check_correctness(&set);
// save_tree_to_file(&set, cstr("last_correct"));
for (S64 i = -99; i <= p; i++) {
RBTreeNode_S64* ret1 = RBTree_SetS64_find(&set, 2 * i);
check(ret1 != NULL);
RBTreeNode_S64* ret2 = RBTree_SetS64_find(&set, 2 * i + 1);
check(ret2 == NULL);
}
}
RBTree_SetS64_drop(set);
}
for (int s = 0; s < 1000; s++) {
srand(s);
VecS64 have = VecS64_new();
RBTree_SetS64 set = RBTree_SetS64_new();
for (int m = 0; m < 1000; m++) {
S64 x = (S64)rand();
bool in = false;
for (size_t j = 0; j < have.len; j++) {
if (have.buf[j] == x)
in = true;
}
bool iret = RBTree_SetS64_insert(&set, x);
if (in) {
check(!iret);
} else {
check(iret);
VecS64_append(&have, x);
}
}
RBTree_SetS64_drop(set);
}
}
void s_4_t_42(){
RBTreeNode_S64* v0 = safe_malloc(sizeof(RBTreeNode_S64));
RBTreeNode_S64* v10 = safe_malloc(sizeof(RBTreeNode_S64));
RBTreeNode_S64* v20 = safe_malloc(sizeof(RBTreeNode_S64));
RBTreeNode_S64* v30 = safe_malloc(sizeof(RBTreeNode_S64));
RBTreeNode* NIL = safe_calloc(1, sizeof(RBTreeNode));
*v0 = (RBTreeNode_S64){ .base.parent = &v20->base, .base.left = NIL, .base.right = &v10->base,
.base.color = RBTREE_BLACK, .key = 0};
*v10 = (RBTreeNode_S64){ .base.parent = &v0->base, .base.left = NIL, .base.right = NIL,
.base.color = RBTREE_RED, .key = 10};
*v20 = (RBTreeNode_S64){ .base.parent = NIL, .base.left = &v0->base, .base.right = &v30->base,
.base.color = RBTREE_BLACK, .key = 20};
*v30 = (RBTreeNode_S64){ .base.parent = &v20->base, .base.left = NIL, .base.right = NIL,
.base.color = RBTREE_BLACK, .key = 30};
RBTree_SetS64 set = {.root = &v20->base, .NIL = NIL};
check_correctness(&set);
save_tree_to_file(&set, cstr("SOROK_ONE"));
check(RBTree_SetS64_erase(&set, 20));
save_tree_to_file(&set, cstr("I_AM_LOOSING_MY_FUCKING_MIND"));
check_correctness(&set);
}
void stress(){
printf("Are you ready for real stress?\n");
for (int s = 4; s < 1000; s++) {
srand(s);
RBTree_SetS64 set = RBTree_SetS64_new();
VecS64 real_set = VecS64_new();
U32 n = (U32)s;
VecS64 complementary_set = VecS64_new_reserved(n);
for (size_t i = 0; i < n; i++) {
VecS64_append(&complementary_set, (S64)i * 10);
}
for (int t = 0; t < 1000; t++) {
/* Do something */
int do_insertion = rand() % 2;
if (real_set.len == 0)
do_insertion = 1;
if (complementary_set.len == 0)
do_insertion = 0;
if (do_insertion) {
assert(complementary_set.len > 0);
size_t j = rand() % complementary_set.len;
S64 v = VecS64_unordered_pop(&complementary_set, j);
VecS64_append(&real_set, v);
check(RBTree_SetS64_insert(&set, v));
check_correctness(&set);
} else {
assert(real_set.len > 0);
size_t j = rand() % real_set.len;
S64 v = VecS64_unordered_pop(&real_set, j);
VecS64_append(&complementary_set, v);
check(RBTree_SetS64_erase(&set, v));
check_correctness(&set);
}
// VecU8 name = VecU8_fmt("t_%u", (U64)t);
// save_tree_to_file(&set, VecU8_to_span(&name));
/* We did something */
for (size_t j = 0; j < real_set.len; j++) {
check(RBTree_SetS64_find(&set, real_set.buf[j]) != NULL);
}
for (size_t j = 0; j < complementary_set.len; j++) {
check(RBTree_SetS64_find(&set, complementary_set.buf[j]) == NULL);
}
}
VecS64_drop(real_set);
VecS64_drop(complementary_set);
RBTree_SetS64_drop(set);
printf("Seed s=%d passed test\n", s);
}
}
int main(){
// insert_only();
// stress();
s_4_t_42();
return 0;
}

View File

@ -1,7 +1,7 @@
#include "../../../l1/core/util.h"
typedef U64 VkDeviceSize;
#define VK_NULL_HANDLE NULL
typedef int VkResult;
const VkResult VK_SUCCESS = 120;
@ -188,4 +188,45 @@ void vkFreeMemory(
VkDeviceMemory memory,
const VkAllocationCallbacks* pAllocator);
typedef struct VkPhysicalDeviceLimits {
/* ... */
uint32_t maxMemoryAllocationCount;
/* ... */
} VkPhysicalDeviceLimits;
typedef struct VkPhysicalDeviceProperties {
/* ... */
VkPhysicalDeviceLimits limits;
/* ... */
} VkPhysicalDeviceProperties;
typedef struct VkPhysicalDeviceMaintenance4Properties {
VkStructureType sType;
void* pNext;
VkDeviceSize maxBufferSize;
} VkPhysicalDeviceMaintenance4Properties;
const VkStructureType VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES = 14542;
typedef struct VkPhysicalDeviceMaintenance3Properties {
VkStructureType sType;
void* pNext;
/* ... */
VkDeviceSize maxMemoryAllocationSize;
} VkPhysicalDeviceMaintenance3Properties;
const VkStructureType VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES = 145;
typedef struct VkPhysicalDeviceProperties2 {
VkStructureType sType;
void* pNext;
VkPhysicalDeviceProperties properties;
} VkPhysicalDeviceProperties2;
const VkStructureType VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 = 5324;
void vkGetPhysicalDeviceProperties2(
VkPhysicalDevice physicalDevice,
VkPhysicalDeviceProperties2* pProperties);
#include "../../margaret/vulkan_memory_claire.h"