I rethought everything again (how surprising) + fixed some bugs at map template

This commit is contained in:
Андреев Григорий 2025-11-12 16:01:24 +03:00
parent e68a16d8fc
commit 6ba756fe1a
7 changed files with 340 additions and 272 deletions

View File

@ -36,10 +36,10 @@ add_compile_options(-fno-trapping-math)
#
#add_executable(l1_4_t2 src/l1_4/tests/t2.c)
#add_executable(codegen_l1_5 src/l1_5/anne/codegen.c)
add_executable(codegen_l1_5 src/l1_5/anne/codegen.c)
add_executable(0_render_test src/l2/tests/r0/r0.c gen/l_wl_protocols/xdg-shell-private.c)
target_link_libraries(0_render_test -lvulkan -lwayland-client -lm -lxkbcommon -lpng)
#add_executable(0_render_test src/l2/tests/r0/r0.c gen/l_wl_protocols/xdg-shell-private.c)
#target_link_libraries(0_render_test -lvulkan -lwayland-client -lm -lxkbcommon -lpng)
#add_executable(0r_tex_init_prep src/l2/tests/r0/r0_tex_init_prep.c)
#target_link_libraries(0r_tex_init_prep -lm -lpng)
@ -53,7 +53,7 @@ target_link_libraries(0_render_test -lvulkan -lwayland-client -lm -lxkbcommon -l
#add_executable(3_render_test src/l2/tests/r3/r3.c gen/l_wl_protocols/xdg-shell-private.c)
#target_link_libraries(3_render_test -lwayland-client -lm -lvulkan -lxkbcommon)
add_executable(l2t0_2 src/l2/tests/data_structures/t0_2.c)
#add_executable(l2t0_2 src/l2/tests/data_structures/t0_2.c) // todo: I will get back
add_executable(l2t2 src/l2/tests/data_structures/t2.c)
#add_executable(l2t0 src/l2/tests/data_structures/t0.c)

View File

@ -12,6 +12,7 @@ void generate_margaret_eve_for_vulkan_utils() {
.mut_span = true, .collab_vec_span = true, .span_sort = true
});
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretCommandForImageCopying"), true, true);
/* Под снос */
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretBufferInMemoryInfo"), true, false);
@ -34,35 +35,37 @@ void generate_margaret_eve_for_vulkan_utils() {
.T = cstr("BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment")
});
generate_util_templ_inst_eve_header(l, ns, (util_templates_instantiation_options){
.T = cstr("MargaretMemAllocatorOneBlock"), .vec = true, .vec_extended = true,
.T = cstr("MargaretMemAllocatorOneBlock"), .vec = true,
});
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorOneMemType"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretBufferKindInfo"), true, false);
generate_eve_span_company_for_non_primitive_non_clonable(l, ns, cstr("MargaretBufferKindInfo"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemoryOccupation"), true, false);
generate_eve_span_company_for_non_primitive_non_clonable(l, ns, cstr("KVPU64ToMargaretMemoryOccupation"), true, false);
generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
.T = cstr("MargaretMemoryOccupation"), .t_primitive = true});
.T = cstr("MargaretMemoryOccupation")});
generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
.T = cstr("RefMargaretMemoryOccupation"), .t_primitive = true});
.T = cstr("RefMargaretMemoryOccupation"), .t_ptr = true});
generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
.T = cstr("RefMutMargaretMemoryOccupation"), .t_ptr = true});
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretBufferOccupationSubBuffer"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("KVPU64ToMargaretBufferOccupationSubBuffer"), true, false);
generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
.T = cstr("MargaretBufferOccupationSubBuffer"), .t_primitive = true});
generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
.T = cstr("RefMargaretBufferOccupationSubBuffer"), .t_primitive = true});
.T = cstr("RefMargaretBufferOccupationSubBuffer"), .t_ptr = true});
generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
.T = cstr("RefMutMargaretBufferOccupationSubBuffer"), .t_ptr = true});
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretFreeMemSegment"), true, false);
generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
.T = cstr("MargaretFreeMemSegment"), .t_primitive = true});
generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
.T = cstr("RefMargaretFreeMemSegment"), .t_primitive = true});
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretCommandForImageCopying"), true, true);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestAllocBuffer"), true, true);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestResizeBuffer"), true, true);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestFreeBuffer"), true, true);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestAllocImage"), true, true);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestFreeImage"), true, true);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestFreeSubBuffer"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestFreeImage"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestResizeSubBuffer"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestAllocSubBuffer"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestAllocImage"), true, false);
generate_eve_span_company_for_non_primitive_non_clonable(l, ns,
cstr("MargaretMemAllocatorRequestsForCertainBufferKindAllocation"), true, false);
}

View File

@ -57,6 +57,17 @@ NODISCARD VecU8 generate_VecT_struct_and_base_methods(SpanU8 T, bool primitive,
SPACE "return &self->buf[i];\n"
"}\n\n", T, VecT, VecT));
VecU8_append_vec(&res, VecU8_fmt(
"void %s_sink(%s* self, size_t new_len) {\n" /* VecT, VecT */
SPACE "assert(new_len <= self->len);\n"
"%v" /* dropping */
SPACE "self->len = new_len;\n"
"}\n\n", VecT, VecT,
primitive ? vcstr("") : VecU8_fmt(
SPACE "for (size_t i = new_len; i < self->len; i++)\n"
SPACE SPACE "%s_drop(self->buf[i]);\n", /* T */
T)));
if (clonable) {
VecU8_append_vec(&res, VecU8_fmt(
"NODISCARD %s %s_clone(const %s* self) {\n" /* VecT, VecT, VecT */

View File

@ -25,11 +25,13 @@ void generate_l1_5_template_instantiations_for_margaret(){
.alternative_comp_set_name_embed = cstr("LenRespAlign"),
.guest_data_T = cstr("U8")
});
generate_rb_tree_Set_templ_inst_eve_header(l, ns, (set_instantiation_op){
.T = cstr("MargaretMemoryOccupation"), .t_primitive = true,
generate_rb_tree_Map_templ_inst_eve_header(l, ns, (map_instantiation_op){
.K = cstr("U64"), .k_integer = true, .V = cstr("MargaretMemoryOccupation"), /* MargaretMemoryOccupation is not primitive */
.unconditional_equality = true
});
generate_rb_tree_Set_templ_inst_eve_header(l, ns, (set_instantiation_op){
.T = cstr("MargaretBufferOccupationSubBuffer"), .t_primitive = true,
generate_rb_tree_Map_templ_inst_eve_header(l, ns, (map_instantiation_op){
.K = cstr("U64"), .k_integer = true, .V = cstr("MargaretBufferOccupationSubBuffer"), .v_primitive = true,
.unconditional_equality = true
});
}

View File

@ -109,7 +109,7 @@ NODISCARD VecU8 codegen_rb_tree_map__taking_ref_k_argument(map_instantiation_op
}
NODISCARD VecU8 codegen_rb_tree_map__taking_t_argument(map_instantiation_op op){
return op.V.len > 0 ? VecU8_fmt("%s key, %s value") : VecU8_fmt("%s key");
return op.V.len > 0 ? VecU8_fmt("%s key, %s value", op.K, op.V) : VecU8_fmt("%s key", op.K);
}
/* Yes, both sets and maps use this function to instantiate themselves. No, user does not need to use it
@ -124,7 +124,7 @@ void codegen_append_rb_tree_map__structure_and_simplest_methods(
SPACE "VecRBTreeNode tree;\n"
SPACE "U64 root;\n"
SPACE "Vec%s el;\n"
"%s"
"%v"
"} %s;\n\n",
TT, op.guest_data_T.len > 0 ? VecU8_fmt(SPACE "%s guest;\n", op.guest_data_T) : vcstr(""), set));
@ -484,7 +484,7 @@ void codegen_append_rb_tree_map__erase_kind_method(
}
/* When method returns constant pointer to found key (wrapped in Option) we will use this type
* Ofcourse, it can turn out that it is not generated. So be careful and generate it by yourself
* Of course, it can turn out that it is not generated. So be careful and generate it by yourself
*/
NODISCARD VecU8 codegen_rb_tree_map__option_returned_ref_t(map_instantiation_op op, bool mut){
/* Constant pointer to an integer is an integer */
@ -499,38 +499,42 @@ NODISCARD VecU8 codegen_rb_tree_map__option_returned_ref_t(map_instantiation_op
if (op.V.len > 0) {
if (op.v_integer)
return VecU8_fmt("Option%s", op.V);
return mut ? VecU8_fmt("OptionMutRef%s", op.V) : VecU8_fmt("OptionRef%s", op.V);
return mut ? VecU8_fmt("OptionRefMut%s", op.V) : VecU8_fmt("OptionRef%s", op.V);
}
return op.k_integer ? VecU8_fmt("Option%s", op.K) : VecU8_fmt("OptionRef%s", op.K);
}
/* Suppose some method returns pointer to key (ofc wrapped in option). And we found what to return
* we return it from self->el array */
NODISCARD VecU8 codegen_rb_tree_map__some_ref_t(map_instantiation_op op){
NODISCARD VecU8 codegen_rb_tree_map__some_ref_t(map_instantiation_op op, bool mut){
assert(!op.unconditional_equality || op.V.len > 0);
assert(!(op.V.len > 0) || op.unconditional_equality);
assert(!mut || op.V.len > 0);
if (op.V.len > 0) {
if (op.v_integer)
return VecU8_fmt("Some_%s(self->el.buf[cur - 1].value)", op.V);
return VecU8_fmt("Some_Ref%s(&self->el.buf[cur - 1]->value)", op.V);
if (mut)
return VecU8_fmt("Some_RefMut%s(&self->el.buf[cur - 1].value)", op.V);
return VecU8_fmt("Some_Ref%s(&self->el.buf[cur - 1].value)", op.V);
}
if (op.k_integer)
return VecU8_fmt("Some_%s(self->el.buf[cur - 1])", op.K);
return VecU8_fmt("Some_Ref%s(&self->el.buf[cur - 1])", op.K);
}
/* Suppose some method returns pointer to key (ofc wrapped in option). But this time we found nothing */
NODISCARD VecU8 codegen_rb_tree_map__none_ref_t(map_instantiation_op op){
NODISCARD VecU8 codegen_rb_tree_map__none_ref_t(map_instantiation_op op, bool mut){
assert(!op.unconditional_equality || op.V.len > 0);
assert(!(op.V.len > 0) || op.unconditional_equality);
assert(!mut || op.V.len > 0);
if (op.V.len > 0)
return op.v_integer ? VecU8_fmt("None_%s()", op.V) : VecU8_fmt("None_Ref%s()", op.V);
return op.k_integer ? VecU8_fmt("None_%s()", op.K) : VecU8_fmt("None_Ref%s", op.K);
if (op.V.len > 0) {
if (op.v_integer)
return VecU8_fmt("None_%s()", op.V);
return mut ? VecU8_fmt("None_RefMut%s()", op.V) : VecU8_fmt("None_Ref%s()", op.V) ;
}
return op.k_integer ? VecU8_fmt("None_%s()", op.K) : VecU8_fmt("None_Ref%s()", op.K);
}
/* Implementing it for a set was a biggest mistake of my day */
/* Implementing it for a set was the biggest mistake of my day */
void codegen_append_rb_tree_map__method_at(VecU8* res, map_instantiation_op op, SpanU8 set, bool mut){
VecU8_append_vec(res, VecU8_fmt(
"%v %s_%s(%s%s* self, %v key) {\n" /* option_returned_ref_t, set, mat/at, e/const, set, taking_ref_t_argument */
@ -550,9 +554,9 @@ void codegen_append_rb_tree_map__method_at(VecU8* res, map_instantiation_op op,
mut ? cstr("") : cstr("const "), set, codegen_rb_tree_map__taking_ref_k_argument(op),
codegen_rb_tree_map__key_ref_EQUAL_element(op),
codegen_rb_tree_map__some_ref_t(op),
codegen_rb_tree_map__some_ref_t(op, mut),
codegen_rb_tree_map__key_ref_LESS_element(op),
codegen_rb_tree_map__none_ref_t(op)
codegen_rb_tree_map__none_ref_t(op, mut)
));
}
@ -637,6 +641,13 @@ NODISCARD VecU8 generate_rb_tree_Set_template_instantiation(set_instantiation_op
if (!op.unconditional_equality)
codegen_append_rb_tree_map__method_at(&res, map_op, set, false);
VecU8_append_vec(&res, VecU8_fmt(
"const %s* %s_at_iter(const %s* self, U64 it) {\n" /* op.T, set, set */
SPACE "assert(0 < it && it < self->tree.len);\n"
SPACE "return &self->el.buf[it - 1];\n"
"}\n\n",
op.T, set, set));
VecU8_drop(g_set);
return res;
}
@ -671,6 +682,23 @@ void generate_rb_tree_Set_templ_inst_guarded_header(
finish_header(head);
}
/* ========= Now we add Map<K, V> into the picture ======== */
void codegen_append_rb_tree_map__method_at_iter(VecU8* res, map_instantiation_op op, SpanU8 set, bool mut){
assert(op.V.len > 0);
VecU8_append_vec(res, VecU8_fmt(
"void %s_%s(%s%s* self, U64 it, %v* ret_key, %v* ret_value) {\n" /* set, method name, self access modifier, set, key ret ptr, value ret ptr */
SPACE "assert(0 < it && it < self->tree.len);\n"
SPACE "*ret_key = %s" "self->el.buf[it - 1].key;\n" /* epsilon / ampersand */
SPACE "*ret_value = %s" "self->el.buf[it - 1].value;\n" /* epsilon / ampersand */
"}\n\n",
set, mut ? cstr("mat_iter") : cstr("at_iter"), mut ? cstr("") : cstr("const "), set,
op.k_integer ? VecU8_from_span(op.K) : VecU8_fmt("const %s*", op.K),
mut ? VecU8_fmt("%s*", op.V) : (op.v_integer ? VecU8_from_span(op.V) : VecU8_fmt("const %s*", op.V)),
op.k_integer ? cstr("") : cstr("&"), op.v_integer ? cstr("") : cstr("&")));
}
NODISCARD VecU8 get_name_of_rb_tree_map_structure(map_instantiation_op op){
if (op.alternative_comp_map_name_embed.len)
return VecU8_fmt("BuffRBTreeBy%s_Map%sTo%s", op.alternative_comp_map_name_embed, op.K, op.V);
@ -682,36 +710,37 @@ NODISCARD VecU8 generate_rb_tree_Map_template_instantiation(map_instantiation_op
VecU8 res = VecU8_new();
VecU8 map_g = get_name_of_rb_tree_map_structure(op);
SpanU8 map = VecU8_to_span(&map_g);
VecU8 kvp_g = get_name_of_rb_tree_map_structure(op);
SpanU8 kvp = VecU8_to_span(&kvp_g);
codegen_append_rb_tree_map__structure_and_simplest_methods(&res, op, map, kvp);
VecU8 kvp_g = VecU8_fmt("KVP%sTo%s", op.K, op.V);
codegen_append_rb_tree_map__structure_and_simplest_methods(&res, op, map, VecU8_to_span(&kvp_g));
VecU8_drop(kvp_g);
VecU8_append_span(&res, cstr("return false\n"));
codegen_append_rb_tree_map__insert_kind_method(&res, op, map, cstr("insert"), vcstr("bool"),
vcstr("return true;\n"),
VecU8_fmt("%v%v" "return false;\n",
op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(key);\n", op.k_primitive),
op.v_primitive ? vcstr("") : VecU8_fmt("%s_drop(value);\n", op.v_primitive)));
op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(key);\n", op.K),
op.v_primitive ? vcstr("") : VecU8_fmt("%s_drop(value);\n", op.V)));
codegen_append_rb_tree_map__insert_kind_method(&res, op, map, cstr("erase_substitute"), vcstr("bool"),
vcstr("return true;\n"),
VecU8_fmt("%v" "self->el.buf[cur - 1].key = key;\n" "%v" "self->el.buf[cur - 1].value = key;\n"
VecU8_fmt("%v%v"
"self->el.buf[cur - 1].key = key;\n"
"self->el.buf[cur - 1].value = value;\n"
"return false;\n",
op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].key);\n", op.k_primitive),
op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].value);\n", op.v_primitive)
op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].key);\n", op.K),
op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].value);\n", op.V)
));
codegen_append_rb_tree_map__insert_kind_method(&res, op, map, cstr("pop_substitute"),
VecU8_fmt("Option%s", op.V),
VecU8_fmt("return None_%s;\n", op.V),
VecU8_fmt("return None_%s();\n", op.V),
VecU8_fmt(
"%v" "self->el.buf[cur - 1].key = key;\n" /**/
"%s saved = self->el.buf[cur - 1].value;\n" /* op.V */
"self->el.buf[cur - 1].value = value;\n"
"return Some_%s(saved);\n", /* op.V */
op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].key);\n", op.k_primitive),
op.k_primitive ? vcstr("") : VecU8_fmt("%s_drop(self->el.buf[cur - 1].key);\n", op.K),
op.V, op.V));
/* Erasing time!!!! */
@ -736,7 +765,40 @@ NODISCARD VecU8 generate_rb_tree_Map_template_instantiation(map_instantiation_op
codegen_append_rb_tree_map__method_at(&res, op, map, false);
codegen_append_rb_tree_map__method_at(&res, op, map, true);
/* These functions breaks by design and returns answer through pointers given in arguments. For greater good ofk */
codegen_append_rb_tree_map__method_at_iter(&res, op, map, false);
codegen_append_rb_tree_map__method_at_iter(&res, op, map, true);
return res;
}
void generate_rb_tree_Map_templ_inst_eve_header(SpanU8 layer, SpanU8 bonus_ns, map_instantiation_op op) {
VecU8 text = VecU8_from_cstr("/* Automatically generated file. Do not edit it.\n"
" * Do not include it in more than one place */\n\n");
VecU8_append_vec(&text, generate_rb_tree_Map_template_instantiation(op));
VecU8 nt_path = VecU8_fmt("%s/eve/%s/%v.h%c", layer, bonus_ns, get_name_of_rb_tree_map_structure(op), 0);
write_whole_file_or_abort((const char*)nt_path.buf, VecU8_to_span(&text));
VecU8_drop(nt_path);
VecU8_drop(text);
}
void generate_rb_tree_Map_templ_inst_guarded_header(
SpanU8 layer, SpanU8 bonus_ns, SpanU8 dependencies, map_instantiation_op op
){
assert(layer.len > 1);
VecU8 path = VecU8_fmt("%s/%s%s%v.h", layer, bonus_ns, bonus_ns.len ? cstr("/") : cstr(""),
get_name_of_rb_tree_map_structure(op));
GeneratedHeader head = begin_header(VecU8_to_span(&path));
VecU8_drop(path);
VecU8_append_span(&head.result, cstr("#include \"../../"));
int to_my_layer = get_number_of_parts_in_header_namespace(bonus_ns);
for (int i = 0; i < to_my_layer; i++)
VecU8_append_span(&head.result, cstr("../"));
VecU8_append_span(&head.result, cstr("src/l1_5/core/rb_tree_node.h\"\n"));
VecU8_append_span(&head.result, dependencies);
VecU8_append_span(&head.result, cstr("\n\n"));
VecU8_append_vec(&head.result, generate_rb_tree_Map_template_instantiation(op));
finish_header(head);
}
#endif

View File

@ -196,31 +196,36 @@ typedef struct {
U64 offset_in_device_memory_nubble;
/* If your buffer kind requested VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, MargaretMemAllocator
* will map all nubbles with it. Use `offset_in_device_memory_nubble` to access this buffer from cpu */
U8 memory_type_id;
U32 memory_allocation_id;
} MargaretMemAllocatorBufferPosition;
} MargaretMemAllocatorSubBufferPosition;
typedef struct {
U64 size;
U32 kind;
/* If I were you, I would just store this in heap and did not care */
MargaretMemAllocatorBufferPosition* ans;
} MargaretMemAllocatorRequestAllocBuffer;
typedef struct {
MargaretMemAllocatorBufferPosition* prev_ans;
U64 new_size;
} MargaretMemAllocatorRequestResizeBuffer;
typedef MargaretMemAllocatorBufferPosition* MargaretMemAllocatorRequestFreeBuffer;
typedef struct{
VkImage image;
U64 offset_in_device_memory_nubble;
U8 memory_type_id;
U16 memory_allocation_id;
} MargaretMemAllocatorImagePosition;
typedef MargaretMemAllocatorSubBufferPosition* MargaretMemAllocatorRequestFreeSubBuffer;
#include "../../../gen/l1/eve/margaret/VecMargaretMemAllocatorRequestFreeSubBuffer.h"
typedef MargaretMemAllocatorImagePosition* MargaretMemAllocatorRequestFreeImage;
#include "../../../gen/l1/eve/margaret/VecMargaretMemAllocatorRequestFreeImage.h"
typedef struct {
MargaretMemAllocatorSubBufferPosition* prev_ans;
U64 new_size;
} MargaretMemAllocatorRequestResizeSubBuffer;
#include "../../../gen/l1/eve/margaret/VecMargaretMemAllocatorRequestResizeSubBuffer.h"
typedef struct {
/* We don't specify the buffer kind, because you should place this request in a vector,
* corresponding to needed buffer kind */
U64 size;
/* If I were you, I would just store this in heap and did not care */
MargaretMemAllocatorSubBufferPosition* ans;
} MargaretMemAllocatorRequestAllocSubBuffer;
#include "../../../gen/l1/eve/margaret/VecMargaretMemAllocatorRequestAllocSubBuffer.h"
typedef struct {
U64 width;
U64 height;
@ -233,60 +238,59 @@ typedef struct {
/* If I were you, I would just store this in heap and did not care*/
MargaretMemAllocatorImagePosition* ans;
} MargaretMemAllocatorRequestAllocImage;
#include "../../../gen/l1/eve/margaret/VecMargaretMemAllocatorRequestAllocImage.h"
typedef MargaretMemAllocatorImagePosition* MargaretMemAllocatorRequestFreeImage;
#include "../../../gen/l1/eve/margaret/VecAndSpan_MargaretMemAllocatorRequestAllocBuffer.h"
#include "../../../gen/l1/eve/margaret/VecAndSpan_MargaretMemAllocatorRequestResizeBuffer.h"
#include "../../../gen/l1/eve/margaret/VecAndSpan_MargaretMemAllocatorRequestFreeBuffer.h"
#include "../../../gen/l1/eve/margaret/VecAndSpan_MargaretMemAllocatorRequestAllocImage.h"
#include "../../../gen/l1/eve/margaret/VecAndSpan_MargaretMemAllocatorRequestFreeImage.h"
/* It is users job to put resize and alloca requests for sub-buffers of type T to the corresponding request
* vectors for this exact type T */
typedef struct {
VecMargaretMemAllocatorRequestResizeSubBuffer resize;
VecMargaretMemAllocatorRequestAllocSubBuffer alloc;
} MargaretMemAllocatorRequestsForCertainBufferKindAllocation;
#include "../../../gen/l1/eve/margaret/VecMargaretMemAllocatorRequestsForCertainBufferKindAllocation.h"
typedef struct {
SpanMargaretMemAllocatorRequestAllocBuffer alloc_buffer;
SpanMargaretMemAllocatorRequestResizeBuffer realloc_buffer;
SpanMargaretMemAllocatorRequestFreeBuffer free_buffer;
SpanMargaretMemAllocatorRequestAllocImage alloc_image;
SpanMargaretMemAllocatorRequestFreeImage free_image;
VecMargaretMemAllocatorRequestFreeSubBuffer free_subbuffer;
VecMargaretMemAllocatorRequestFreeImage free_image;
VecMargaretMemAllocatorRequestsForCertainBufferKindAllocation resize_alloc_buffer;
VecMargaretMemAllocatorRequestAllocImage alloc_image;
} MargaretMemAllocatorRequest;
/* That is our guy! */
typedef struct MargaretMemAllocator MargaretMemAllocator;
MargaretMemAllocator MargaretMemAllocator_new(
VkDevice device, VkPhysicalDevice physical_device, SpanMargaretBufferKindDescription buffer_types);
VkDevice device, VkPhysicalDevice physical_device, SpanMargaretBufferKindDescription buffer_types,
VkMemoryPropertyFlags mem_properties, U8 memory_type_id);
/* Demands + Warnings */
typedef struct {
/* If for some memory types we will do defragmentation, MargaretMemAllocator warns us that for
* these memory types position-structures, that it filled, will be updated. If these values
* (buffer/image handlers + sub-buffer positions) are dependencies of other
* objects, these objects need to be updated (or rebuilt) */
U32 defragmented_mem_types;
/* If for some set of requests MargaretMemAllocator needs to execute some Vulkan copying commands,
* it will demand you to actually execute the command buffer that you gave it. If this is `true` it does
* not necessarily mean that defragmentation is happening right now, no, defragmentation is indicated by
* `defragmented_mem_types` warning field, but if you are doing a DIRECT BUFFER (sub-buffer in terms of
* vulkan) RESIZE, this sub-buffer may be copied.
* It won't affect other data structures in your memory,
* of course, (still, notice that position of your sub-buffer will be updated).
*/
bool need_command_buffer;
} MargaretMemAllocatorDemands;
typedef U8 MargaretMemAllocatorDemands;
/* If we do defragmentation, MargaretMemAllocator warns us that for
* position-structures, that it filled, will be updated. If these values
* (buffer/image handlers + sub-buffer positions) are dependencies of other
* objects, these objects need to be updated (or rebuilt) */
#define MARGARET_MEM_ALLOCATOR_DEMANDS_DEFRAGMENTATION_BITS 1
/* If for some set of requests MargaretMemAllocator needs to execute some Vulkan copying commands,
* it will demand you to actually execute the command buffer that you gave it. If this is `true` it does
* not necessarily mean that defragmentation is happening right now, no, defragmentation is indicated by
* `MARGARET_MEM_ALLOCATOR_DEMANDS_DEFRAGMENTATION_BITS` warning bit,
* but if you are doing a DIRECT BUFFER (sub-buffer in terms of
* vulkan) RESIZE, this sub-buffer may be copied.
* It won't affect other data structures in your memory,
* of course, (still, notice that position of your sub-buffer will be updated).
*/
#define MARGARET_MEM_ALLOCATOR_DEMANDS_CMD_BUFFER 2
/* Appends copying commands into cmd_buff. It may append none. Defragmentation, device memory relocation
* need copying commands, but buffer resize may also require copying */
MargaretMemAllocatorDemands MargaretMemAllocator_carry_out_request(
MargaretMemAllocator* self, VkCommandBuffer cmd_buff, MargaretMemAllocatorRequest request);
MargaretMemAllocator* self, VkCommandBuffer cmd_buff, MargaretMemAllocatorRequest* request);
void MargaretMemAllocator_wipe_old(MargaretMemAllocator* self);
char* MargaretMemAllocator_get_host_visible_buffer_ptr(
const MargaretMemAllocator* self, const MargaretMemAllocatorBufferPosition* pos);
// todo: add same shit for images in host visible buffer
const MargaretMemAllocator* self, const MargaretMemAllocatorSubBufferPosition* pos);
#define MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP 28
#define MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP 21
/* ======= End of MargaretMemAllocator client interface ======== */
@ -306,11 +310,35 @@ typedef struct {
MargaretMemAllocatorImagePosition* ans;
} MargaretMemoryOccupationImage;
/* Stored in MargaretMemoryOccupation::buf */
typedef struct {
U64 length;
MargaretMemAllocatorSubBufferPosition* ans;
} MargaretBufferOccupationSubBuffer;
/* Needed for Map<U64, MargaretBufferOccupationSubBuffer> */
typedef const MargaretBufferOccupationSubBuffer* RefMargaretBufferOccupationSubBuffer;
typedef MargaretBufferOccupationSubBuffer* RefMutMargaretBufferOccupationSubBuffer;
typedef struct {
U64 key; /* start */
MargaretBufferOccupationSubBuffer value;
} KVPU64ToMargaretBufferOccupationSubBuffer;
#include "../../../gen/l1/eve/margaret/VecKVPU64ToMargaretBufferOccupationSubBuffer.h"
#include "../../../gen/l1/eve/margaret/OptionMargaretBufferOccupationSubBuffer.h"
#include "../../../gen/l1/eve/margaret/OptionRefMargaretBufferOccupationSubBuffer.h"
#include "../../../gen/l1/eve/margaret/OptionRefMutMargaretBufferOccupationSubBuffer.h"
#include "../../../gen/l1_5/eve/margaret/BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer.h"
/* Not primitive */
typedef struct {
U16 kind;
VkBuffer buffer;
U64 occupied_by_sub_buffers;
U64 capacity;
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer subbuffers;
} MargaretMemoryOccupationBuffer;
typedef enum {
@ -319,64 +347,37 @@ typedef enum {
} MargaretMemoryOccupation_variant;
typedef struct {
MargaretMemoryOccupation_variant variant;
U64 start;
U64 taken_size;
MargaretMemoryOccupation_variant variant;
union {
MargaretMemoryOccupationImage img;
MargaretMemoryOccupationBuffer buf;
};
} MargaretMemoryOccupation;
bool MargaretMemoryOccupation_equal_MargaretMemoryOccupation(
const MargaretMemoryOccupation* A, const MargaretMemoryOccupation* B
){
return A->start == B->start;
void MargaretMemoryOccupation_drop(MargaretMemoryOccupation self){
if (self.variant == MargaretMemoryOccupation_Buffer) {
BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_drop(self.buf.subbuffers);
}
}
bool MargaretMemoryOccupation_less_MargaretMemoryOccupation(
const MargaretMemoryOccupation* A, const MargaretMemoryOccupation* B
){
return A->start < B->start;
}
/* Needed for Set<MargaretMemoryOccupation> */
/* Needed for Map<U64, MargaretMemoryOccupation> */
typedef const MargaretMemoryOccupation* RefMargaretMemoryOccupation;
typedef MargaretMemoryOccupation* RefMutMargaretMemoryOccupation;
#include "../../../gen/l1/eve/margaret/VecMargaretMemoryOccupation.h"
typedef struct{
U64 key;
MargaretMemoryOccupation value;
} KVPU64ToMargaretMemoryOccupation;
#include "../../../gen/l1/eve/margaret/VecKVPU64ToMargaretMemoryOccupation.h"
#include "../../../gen/l1/eve/margaret/OptionMargaretMemoryOccupation.h"
#include "../../../gen/l1/eve/margaret/OptionRefMargaretMemoryOccupation.h"
#include "../../../gen/l1_5/eve/margaret/BuffRBTree_SetMargaretMemoryOccupation.h"
#include "../../../gen/l1/eve/margaret/OptionRefMutMargaretMemoryOccupation.h"
#include "../../../gen/l1_5/eve/margaret/BuffRBTree_MapU64ToMargaretMemoryOccupation.h"
typedef struct {
U64 start;
U64 length;
MargaretMemAllocatorBufferPosition* ans;
} MargaretBufferOccupationSubBuffer;
bool MargaretBufferOccupationSubBuffer_equal_MargaretBufferOccupationSubBuffer(
const MargaretBufferOccupationSubBuffer* A, const MargaretBufferOccupationSubBuffer* B
){
return A->start == B->start;
}
bool MargaretBufferOccupationSubBuffer_less_MargaretBufferOccupationSubBuffer(
const MargaretBufferOccupationSubBuffer* A, const MargaretBufferOccupationSubBuffer* B
){
return A->start < B->start;
}
/* Needed for Set<MargaretBufferOccupationSubBuffer> */
typedef const MargaretBufferOccupationSubBuffer* RefMargaretBufferOccupationSubBuffer;
#include "../../../gen/l1/eve/margaret/VecMargaretBufferOccupationSubBuffer.h"
#include "../../../gen/l1/eve/margaret/OptionMargaretBufferOccupationSubBuffer.h"
#include "../../../gen/l1/eve/margaret/OptionRefMargaretBufferOccupationSubBuffer.h"
#include "../../../gen/l1_5/eve/margaret/BuffRBTree_SetMargaretBufferOccupationSubBuffer.h"
typedef struct {
BuffRBTree_SetMargaretMemoryOccupation occupied_memory;
BuffRBTree_SetMargaretBufferOccupationSubBuffer occupied_buffers;
BuffRBTree_MapU64ToMargaretMemoryOccupation occupied_memory;
U64 length;
U64 occupation_counter;
VkDeviceMemory mem_hand;
@ -384,8 +385,7 @@ typedef struct {
} MargaretMemAllocatorOneBlock;
void MargaretMemAllocatorOneBlock_drop(MargaretMemAllocatorOneBlock self){
BuffRBTree_SetMargaretMemoryOccupation_drop(self.occupied_memory);
BuffRBTree_SetMargaretBufferOccupationSubBuffer_drop(self.occupied_buffers);
BuffRBTree_MapU64ToMargaretMemoryOccupation_drop(self.occupied_memory);
}
#include "../../../gen/l1/eve/margaret/VecMargaretMemAllocatorOneBlock.h"
@ -395,8 +395,8 @@ void MargaretMemAllocatorOneBlock_drop(MargaretMemAllocatorOneBlock self){
typedef struct {
U64 start;
U64 len;
/* If this value somehow got higher than zero, your life fucking sucks */
U8 dev_mem_block;
/* If this value somehow got higher than zero, your life f****ng sucks */
U32 dev_mem_block;
} MargaretFreeMemSegment;
bool MargaretFreeMemSegment_equal(
@ -440,11 +440,8 @@ bool MargaretFreeMemSegment_less_resp_align(
return A_len < B_len;
}
typedef const MargaretFreeMemSegment* RefMargaretFreeMemSegment;
#include "../../../gen/l1/eve/margaret/VecMargaretFreeMemSegment.h"
#include "../../../gen/l1/eve/margaret/OptionMargaretFreeMemSegment.h"
#include "../../../gen/l1/eve/margaret/OptionRefMargaretFreeMemSegment.h"
#include "../../../gen/l1_5/eve/margaret/BuffRBTreeByLen_SetMargaretFreeMemSegment.h"
#include "../../../gen/l1_5/eve/margaret/BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment.h"
#include "../../../gen/l1/eve/margaret/OptionBuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment.h"
@ -459,8 +456,44 @@ typedef struct{
} MargaretOldBufferResizeRecord;
#include "../../../gen/l1/eve/margaret/VecMargaretOldBufferResizeRecord.h"
#include "../../../gen/l1/VecAndSpan_U8.h"
/* Superstructure for managing free segments of memory of some type in ALL BLOCKS */
typedef struct {
OptionBuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment free_space_in_memory[MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP];
VecU8 set_present;
} MargaretMemFreeSpaceManager;
MargaretMemFreeSpaceManager MargaretMemFreeSpaceManager_new(){
MargaretMemFreeSpaceManager res = {.set_present = VecU8_new()};
for (U8 algn = 0; algn < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP; algn++)
res.free_space_in_memory[algn] = None_BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment();
return res;
}
void MargaretMemFreeSpaceManager_drop(MargaretMemFreeSpaceManager self){
for (U8 alignment_exp = 0; alignment_exp < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP; alignment_exp++)
OptionBuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_drop(self.free_space_in_memory[alignment_exp]);
VecU8_drop(self.set_present);
}
typedef struct {
VkBufferUsageFlags usage;
U8 inner_alignment_exp;
bool preserve_at_quiet;
U64 total_occupation;
BuffRBTreeByLen_SetMargaretFreeMemSegment free_space_inside_buffers;
} MargaretBufferKindInfo;
void MargaretBufferKindInfo_drop(MargaretBufferKindInfo self){
BuffRBTreeByLen_SetMargaretFreeMemSegment_drop(self.free_space_inside_buffers);
}
#include "../../../gen/l1/eve/margaret/VecMargaretBufferKindInfo.h"
/* VkDevice and VkPhysicalDevice stay remembered here. Don't forget that, please */
struct MargaretMemAllocator {
VecMargaretMemAllocatorOneBlock blocks;
/* old_blocks is usually empty. BUT! When you generated a defragmentation command buffer with
* MargaretMemAllocator_carry_out_request, this vector will be filled with old blocks, while
@ -473,154 +506,108 @@ typedef struct {
/* If your previous set of requests did not cause defragmentation, it could cause relocation of some data
* in a subbuffer that you wanted to resize */
VecMargaretOldBufferResizeRecord old_buff_resize_record;
OptionBuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment free_space_in_memory[MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP];
VkMemoryPropertyFlags mem_properties;
} MargaretMemAllocatorOneMemType;
#include "../../../gen/l1/eve/margaret/VecMargaretMemAllocatorOneMemType.h"
/* My template instantiator requires the drop method */
void MargaretMemAllocatorOneMemType_drop(MargaretMemAllocatorOneMemType self){
VecMargaretMemAllocatorOneBlock_drop(self.blocks);
// VecBuffRBTreeByLen_SetMargaretFreeMemSegment_drop(self.free_space_inside_buffers);
for (U8 alignment_exp = 0; alignment_exp < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP; alignment_exp++)
OptionBuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment_drop(self.free_space_in_memory[alignment_exp]);
}
typedef struct {
VkMemoryPropertyFlags mem_properties;
VkBufferUsageFlags usage;
U8 chosen_memory_type;
U8 inner_alignment_exp;
U64 total_occupation;
BuffRBTreeByLen_SetMargaretFreeMemSegment free_space_inside_buffers;
} MargaretBufferKindInfo;
#include "../../../gen/l1/eve/margaret/VecMargaretBufferKindInfo.h"
#define MARGARET_ALLOC_MAX_ALLOWED_BUFFER_JUTTING 255
/* VkDevice and VkPhysicalDevice stay remembered here. Don't forget that, please */
struct MargaretMemAllocator {
VecMargaretMemAllocatorOneMemType mem_types;
MargaretMemFreeSpaceManager mem_free_space;
VecMargaretBufferKindInfo buffer_types;
VkMemoryPropertyFlags mem_properties;
U8 memory_type_id;
VkDevice device;
VkPhysicalDevice physical_device;
};
void MargaretMemAllocator_drop(MargaretMemAllocator self){
// todo: first: drop absolutely everything
VecMargaretMemAllocatorOneBlock_drop(self.blocks);
MargaretMemFreeSpaceManager_drop(self.mem_free_space);
}
MargaretMemAllocator MargaretMemAllocator_new(
VkDevice device, VkPhysicalDevice physical_device, SpanMargaretBufferKindDescription buffer_types, ){
VkPhysicalDeviceMemoryProperties phd_props;
vkGetPhysicalDeviceMemoryProperties(physical_device, &phd_props);
assert(phd_props.memoryTypeCount < VK_MAX_MEMORY_TYPES);
MargaretMemAllocator self = {.buffer_types = VecMargaretBufferKindInfo_new_zeroinit(buffer_types.len),
.device = device, .physical_device = physical_device,
.mem_types = VecMargaretMemAllocatorOneMemType_new_zeroinit(phd_props.memoryTypeCount)};
VkDevice device, VkPhysicalDevice physical_device, SpanMargaretBufferKindDescription buffer_types,
VkMemoryPropertyFlags mem_properties, U8 memory_type_id
){
MargaretMemAllocator self = {
.buffer_types = VecMargaretBufferKindInfo_new_reserved(buffer_types.len),
.blocks = VecMargaretMemAllocatorOneBlock_new(),
.old_blocks = VecMargaretMemAllocatorOneBlock_new(),
.old_buff_resize_record = VecMargaretOldBufferResizeRecord_new(),
.mem_free_space = MargaretMemFreeSpaceManager_new(),
.memory_type_id = memory_type_id,
.mem_properties = mem_properties,
.device = device,
.physical_device = physical_device
};
for (size_t i = 0; i < buffer_types.len; i++) {
const MargaretBufferKindDescription* desc = &buffer_types.data[i];
check(U64_is_2pow(desc->inner_alignment));
/* We create a test buffer to check which memory types will support it. We decide the link between
* buffer kind and memory type PERMANENTLY. Yes, I don't care what the specification says, if your
* buffer memory type support changes with size, EAT A DICK, fuck you, get the segfault you deserve
*/
VkBufferCreateInfo crinfo = {
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
.size = desc->inner_alignment, /* Why not */
VecMargaretBufferKindInfo_append(&self.buffer_types, (MargaretBufferKindInfo){
.usage = desc->usage_flags,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
};
VkBuffer test_buffer;
if (vkCreateBuffer(device, &crinfo, NULL, &test_buffer) != VK_SUCCESS)
abortf("Kill yourself!\n");
VkMemoryRequirements mem_requirements;
vkGetBufferMemoryRequirements(device, test_buffer, &mem_requirements);
U8 first_good_memory_type;
for (U8 mt = 0; mt < (U8)phd_props.memoryTypeCount; mt++) {
if ((mem_requirements.memoryTypeBits & (1u << mt)) != 0 &&
(phd_props.memoryTypes[mt].propertyFlags & desc->memory_properties) == desc->memory_properties) {
first_good_memory_type = mt;
goto found_good_memory_type;
}
}
abortf("No good memory type for %" PRIu64 " buffer kind\n", i);
found_good_memory_type:
vkDestroyBuffer(device, test_buffer, NULL);
self.buffer_types.buf[i] = (MargaretBufferKindInfo){
.mem_properties = desc->memory_properties, .usage = desc->usage_flags,
.chosen_memory_type = first_good_memory_type, .inner_alignment_exp = U64_2pow_log(desc->inner_alignment),
.total_occupation = 0, .free_space_inside_buffers = BuffRBTreeByLen_SetMargaretFreeMemSegment_new()
};
}
assert(self.mem_types.len == phd_props.memoryTypeCount);
for (U32 i = 0; i < phd_props.memoryTypeCount; i++) {
self.mem_types.buf[i].blocks = VecMargaretMemAllocatorOneBlock_new();
for (U8 ae = 0; ae < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP; ae++) {
self.mem_types.buf[i].free_space_in_memory[ae] = None_BuffRBTreeByLenRespAlign_SetMargaretFreeMemSegment();
}
self.mem_types.buf[i].mem_properties = phd_props.memoryTypes[i].propertyFlags;
.inner_alignment_exp = U64_2pow_log(desc->inner_alignment),
.preserve_at_quiet = desc->preserve_at_quiet,
.total_occupation = 0,
.free_space_inside_buffers = BuffRBTreeByLen_SetMargaretFreeMemSegment_new()
});
}
return self;
}
MargaretMemAllocatorDemands MargaretMemAllocator_carry_out_request(
MargaretMemAllocator* self, VkCommandBuffer cmd_buff, MargaretMemAllocatorRequest request
MargaretMemAllocator* self, VkCommandBuffer cmd_buff, MargaretMemAllocatorRequest* request
){
MargaretMemAllocator_wipe_old(self);
for (U8 mi = 0; mi < (U8)self->mem_types.len; mi++) {
MargaretMemAllocatorOneMemType* x = &self->mem_types.buf[mi];
// for (U64 i = 0; i < request->)
}
return (MargaretMemAllocatorDemands){.defragmented_mem_types = 0, .need_command_buffer = false};
return 0;
}
void MargaretMemAllocator_wipe_old(MargaretMemAllocator* self){
for (U64 mi = 0; mi < self->mem_types.len; mi++) {
MargaretMemAllocatorOneMemType* m = &self->mem_types.buf[mi];
assert(!m->old_blocks.len || !m->old_buff_resize_record.len);
while (m->old_blocks.len > 0) {
MargaretMemAllocatorOneBlock block = VecMargaretMemAllocatorOneBlock_pop(&m->old_blocks);
assert(((m->mem_properties & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) ==
(block.mapped_memory != NULL));
if (block.mapped_memory)
vkUnmapMemory(self->device, block.mapped_memory);
{ /* destroying images and buffers from this block. Binary tree detour takes O(n) time */
U64 set_it = BuffRBTree_SetMargaretMemoryOccupation_find_min(&block.occupied_memory);
while (set_it > 0) {
assert(set_it < block.occupied_memory.tree.len && set_it > 0);
const MargaretMemoryOccupation* occ = &block.occupied_memory.el.buf[set_it - 1];
if (occ->variant == MargaretMemoryOccupation_Buffer) {
const MargaretMemoryOccupationBuffer* wb = &occ->buf;
vkDestroyBuffer(self->device, wb->buffer, NULL);
} else if (occ->variant == MargaretMemoryOccupation_Image) {
const MargaretMemoryOccupationImage* wi = &occ->img;
vkDestroyImage(self->device, wi->image, NULL);
}
set_it = BuffRBTree_SetMargaretMemoryOccupation_find_next(&block.occupied_memory, set_it);
assert(!self->old_blocks.len || !self->old_buff_resize_record.len);
for (size_t blind = 0; blind < self->old_blocks.len; blind++) {
MargaretMemAllocatorOneBlock* block = &self->old_blocks.buf[blind];
assert(((self->mem_properties & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) ==
(block->mapped_memory != NULL));
if (block->mapped_memory)
vkUnmapMemory(self->device, block->mapped_memory);
{ /* destroying images and buffers from this block. Binary tree detour takes O(n) time */
U64 set_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find_min(&block->occupied_memory);
while (set_it > 0) {
assert(set_it < block->occupied_memory.tree.len && set_it > 0);
U64 occ_start;
const MargaretMemoryOccupation* occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_at_iter(&block->occupied_memory, set_it, &occ_start, &occ);
if (occ->variant == MargaretMemoryOccupation_Buffer) {
const MargaretMemoryOccupationBuffer* wb = &occ->buf;
vkDestroyBuffer(self->device, wb->buffer, NULL);
} else if (occ->variant == MargaretMemoryOccupation_Image) {
const MargaretMemoryOccupationImage* wi = &occ->img;
vkDestroyImage(self->device, wi->image, NULL);
}
set_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find_next(&block->occupied_memory, set_it);
}
vkFreeMemory(self->device, block.mem_hand, NULL);
}
/* MargaretOldBufferResizeRecord is a primitive datatype */
for (U64 ri = 0; ri < m->old_buff_resize_record.len; ri++) {
const MargaretOldBufferResizeRecord* resize = &m->old_buff_resize_record.buf[ri];
if (resize->old_mem_block_id != resize->new_mem_block_id || resize->old_start != resize->new_start) {
// OptionMargaretBufferOccupationSubBuffer delete_me = BuffRBTree_SetMargaretBufferOccupationSubBuffer_pop(&)
// todo: AAAAAAAAAAAAA rewrite it all using maps
}
vkFreeMemory(self->device, block->mem_hand, NULL);
}
VecMargaretMemAllocatorOneBlock_sink(&self->old_blocks, 0);
for (U64 ri = 0; ri < self->old_buff_resize_record.len; ri++) {
const MargaretOldBufferResizeRecord* resize = &self->old_buff_resize_record.buf[ri];
if (resize->old_mem_block_id != resize->new_mem_block_id || resize->old_start != resize->new_start) {
MargaretMemAllocatorOneBlock* block = VecMargaretMemAllocatorOneBlock_mat(&self->blocks, resize->old_mem_block_id);
U64 occ_it = BuffRBTree_MapU64ToMargaretMemoryOccupation_find_max_less_or_eq(&block->occupied_memory, 2);
U64 occ_start;
MargaretMemoryOccupation* occ;
BuffRBTree_MapU64ToMargaretMemoryOccupation_mat_iter(&block->occupied_memory, occ_it, &occ_start, &occ);
// todo: this fucker wants to be deleted. Do it for him. Maybe it will also carry the whole VkBuffer with
// him
// OptionMargaretBufferOccupationSubBuffer delete_me = BuffRBTree_MapU64ToMargaretBufferOccupationSubBuffer_pop(&self->
}
}
VecMargaretOldBufferResizeRecord_sink(&self->old_buff_resize_record, 0);
}
char* MargaretMemAllocator_get_host_visible_buffer_ptr(
const MargaretMemAllocator* self, const MargaretMemAllocatorBufferPosition* pos){
check(pos->memory_type_id < VK_MAX_MEMORY_TYPES);
const MargaretMemAllocatorOneMemType* memtype = &self->mem_types.buf[pos->memory_type_id];
assert(memtype->old_blocks.len == 0);
check((memtype->mem_properties & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
check(pos->memory_allocation_id < memtype->blocks.len);
const MargaretMemAllocatorOneBlock* bl = &memtype->blocks.buf[pos->memory_allocation_id];
const MargaretMemAllocator* self, const MargaretMemAllocatorSubBufferPosition* pos){
check((self->mem_properties & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
const MargaretMemAllocatorOneBlock* bl = VecMargaretMemAllocatorOneBlock_at(&self->blocks, pos->memory_allocation_id);
assert(bl->mapped_memory);
/* We could check correctness of this position, but who cares, lol */
return (char*)bl->mapped_memory + pos->offset_in_device_memory_nubble;

View File

@ -1,3 +1,6 @@
// todo: rewrite this test with structures from l1_5/eve/margaret
// todo: but before that: don't even bother running this test
#include "../../../../gen/l1_5/BuffRBTreeByLen_SetU64Segment.h"
#include "../../../../gen/l1_5/BuffRBTreeByStart_SetU64Segment.h"
#include "../../../../gen/l1_5/BuffRBTreeByLenRespAlign_SetU64Segment.h"