Improved rbtree, bufrbtree, wrote shitty liked list. Progress update on MMA: almost complete, but I have to blow some dust off my knowledge of pipeline barriers

This commit is contained in:
Андреев Григорий 2025-11-27 18:12:44 +03:00
parent 3032016155
commit d5854dd5a3
10 changed files with 752 additions and 279 deletions

View File

@ -27,7 +27,7 @@ add_compile_definitions(_POSIX_C_SOURCE=200112L)
add_compile_definitions(_GNU_SOURCE)
add_compile_options(-fno-trapping-math)
#add_executable(codegen_l1 src/l1/anne/codegen.c)1
#add_executable(codegen_l1 src/l1/anne/codegen.c)
#target_compile_definitions(codegen_l1
# PRIVATE PROTOTYPE1_L1_CODEGEN_BOOTSTRAP_USE_CHICKEN_VECU8)

View File

@ -2,6 +2,7 @@
#define PROTOTYPE1_SRC_L1_ANNE_MARGARET_MARGARET_MISC_H
#include "../../codegen/util_template_inst.h"
#include "../../codegen/list_template_inst.h"
void generate_margaret_eve_for_vulkan_utils() {
SpanU8 l = cstr("l1");
@ -27,9 +28,7 @@ void generate_margaret_eve_for_vulkan_utils() {
});
/* For l2/margaret/vulkan_memory_claire.h */
/* It is actually an integer (pointer) */
generate_eve_span_company_for_non_primitive_non_clonable(l, ns, cstr("MargaretMemAllocatorOneBlock"), true, false);
generate_List_templ_inst_eve_header(l, ns, (list_instantiation_op){.T = cstr("MargaretMemAllocatorOneBlock")}, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestFreeOccupant"), true, false);
generate_util_templ_inst_eve_header(l, ns, (util_templates_instantiation_options){
@ -47,38 +46,6 @@ void generate_margaret_eve_for_vulkan_utils() {
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMABufferExpansionRecord"), true, false);
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMANewMovedBufRecord"), true, false);
// generate_eve_span_company_for_primitive(l, ns, cstr("MargaretOldBufferResizeRecord"), true, false);
// generate_eve_span_company_for_primitive(l, ns, cstr("MargaretBufRelocationRequest"), true, false);
//
// generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
// /* We won't need to clone this type, like, at all... It is actually clonable, but we just made
// * it non-clonable */
// .T = cstr("BufRBTreeByLenRespAlign_SetMargaretFreeMemSegment")
// });
// generate_util_templ_inst_eve_header(l, ns, (util_templates_instantiation_options){
// .T = cstr("MargaretMemAllocatorOneBlock"), .vec = true,
// });
//
// generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
// .T = cstr("MargaretMemoryOccupation")});
// generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
// .T = cstr("RefMargaretMemoryOccupation"), .t_ptr = true});
// generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
// .T = cstr("RefMutMargaretMemoryOccupation"), .t_ptr = true});
//
// generate_eve_span_company_for_primitive(l, ns, cstr("MargaretFreeMemSegment"), true, false);
// generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
// .T = cstr("MargaretFreeMemSegment"), .t_primitive = true});
//
// generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestFreeBuffer"), true, false);
// generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestFreeImage"), true, false);
// generate_util_templ_inst_eve_header(l, ns, (util_templates_instantiation_options){
// .T = cstr("MargaretMemAllocatorRequestResizeBuffer"), .t_primitive = true, .vec_extended = true});
// generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestAllocBuffer"), true, false);
// generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestAllocImage"), true, false);
// generate_eve_span_company_for_non_primitive_non_clonable(l, ns,
// cstr("MargaretMemAllocatorRequestsForCertainBufferKindAllocation"), true, false);
}

View File

@ -9,18 +9,101 @@ typedef struct {
bool t_clonable;
} list_instantiation_op;
NODISCARD VecU8 generate_List_template_instantiation(list_instantiation_op op){
NODISCARD VecU8 generate_List_template_instantiation(list_instantiation_op op, bool gen_node_declaration){
if (op.t_primitive)
op.t_clonable = true;
assert(op.T.len > 0);
VecU8 res = VecU8_fmt(
"typedef struct ListNode_%s ListNode_%s;\n" /* op.T, op.T */
"struct ListNode_%s {\n" /* op.T */
SPACE ""
"}\n\n"
);
VecU8 res = VecU8_new();
if (gen_node_declaration) {
VecU8_append_vec(&res, VecU8_fmt("typedef struct ListNode%s ListNode%s;\n", op.T, op.T));
}
VecU8_append_vec(&res, VecU8_fmt(
"struct ListNode%s {\n" /* op.T */
SPACE "ListNode%s* prev;\n" /* op.T */
SPACE "ListNode%s* next;\n" /* op.T */
SPACE "%s el;\n" /* op.T */
"};\n\n", op.T, op.T, op.T, op.T));
VecU8_append_vec(&res, VecU8_fmt(
"typedef struct {\n"
SPACE "ListNode%s* first;\n" /* op.T */
"} List%s;\n\n", /* op.T */
op.T, op.T));
VecU8_append_vec(&res, VecU8_fmt(
"#define List%s_new() {0}\n\n" /* op.T */
"void List%s_drop(List%s self) {\n" /* op.T, op.T */
SPACE "ListNode%s* cur = self.first;\n" /* op.T */
SPACE "while (cur){\n"
SPACE SPACE "ListNode%s* next = cur->next;\n" /* op.T */
"%v" /* "" / %s_drop(cur->el) */
SPACE SPACE "free(cur);\n"
SPACE SPACE "cur = next;\n"
SPACE "}\n"
"}\n\n",
op.T, op.T, op.T, op.T, op.T,
op.t_primitive ? vcstr("") : VecU8_fmt(SPACE SPACE "%s_drop(cur->el);\n", op.T)));
VecU8_append_vec(&res, VecU8_fmt(
"void List%s_insert(List%s* self, %s el) {\n" /* op.T, op.T, op.T */
SPACE "ListNode%s* new_node = safe_malloc(sizeof(ListNode%s));\n" /* op.T, op.T */
SPACE "new_node->prev = NULL;\n"
SPACE "new_node->next = self->first;\n"
SPACE "new_node->el = el;\n"
SPACE "if (self->first)\n"
SPACE SPACE "self->first->prev = new_node;\n"
SPACE "self->first = new_node;\n"
"}\n\n", op.T, op.T, op.T, op.T, op.T));
VecU8_append_vec(&res, VecU8_fmt(
"void List%s_insert_node(List%s* self, ListNode%s* new_node) {\n" /* op.T, op.T, op.T */
SPACE "new_node->prev = NULL;\n"
SPACE "new_node->next = self->first;\n"
SPACE "if (self->first)\n"
SPACE SPACE "self->first->prev = new_node;\n"
SPACE "self->first = new_node;\n"
"}\n\n", op.T, op.T, op.T));
VecU8_append_vec(&res, VecU8_fmt(
"void List%s_erase_by_it(List%s* self, ListNode%s* it) {\n" /* op.T, op.T, op.T */
SPACE "if (it->prev)\n"
SPACE SPACE "it->prev->next = it->next;\n"
SPACE "else\n"
SPACE SPACE "self->first = it->next;\n"
SPACE "if (it->next)\n"
SPACE SPACE "it->next->prev = it->prev;\n"
"%v" /* "" / %s_drop(it->el) */
SPACE "free(it);\n"
"}\n\n",
op.T, op.T, op.T,
op.t_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(it->el);\n", op.T)));
VecU8_append_vec(&res, VecU8_fmt(
"void List%s_sink(List%s* self) {\n" /* op.T, op.T */
SPACE "ListNode%s* cur = self->first;\n" /* op.T */
SPACE "while (cur){\n"
SPACE SPACE "ListNode%s* next = cur->next;\n" /* op.T */
"%v" /* "" / %s_drop(cur->el) */
SPACE SPACE "free(cur);\n"
SPACE SPACE "cur = next;\n"
SPACE "}\n"
SPACE "self->first = NULL;\n"
"}\n\n",
op.T, op.T, op.T, op.T,
op.t_primitive ? vcstr("") : VecU8_fmt(SPACE SPACE "%s_drop(cur->el);\n", op.T)));
return res;
}
void generate_List_templ_inst_eve_header(SpanU8 layer, SpanU8 bonus_ns, list_instantiation_op op, bool gen_node_declaration) {
generate_SOME_templ_inst_eve_header(layer, bonus_ns,
generate_List_template_instantiation(op, gen_node_declaration), VecU8_fmt("List%s", op.T));
}
void generate_List_templ_inst_guarded_header(
SpanU8 layer, SpanU8 bonus_ns, SpanU8 dependencies, list_instantiation_op op
){
VecU8 all_dependencies = VecU8_fmt("%v%s\n",
codegen_include_relative_to_root(bonus_ns, cstr("src/l1/core/utils.h")), dependencies);
generate_SOME_templ_inst_guarded_header(layer, bonus_ns, all_dependencies,
generate_List_template_instantiation(op, true), VecU8_fmt("List%s", op.T));
}
#endif

View File

@ -45,6 +45,7 @@ typedef struct {
SpanU8 guest_data_T;
bool at, mat;
bool pop, pop_substitute;
} map_instantiation_op;
void map_instantiation_op_fix(map_instantiation_op* self){

View File

@ -287,43 +287,43 @@ void codegen_append_buff_rbtree_map__structure_and_simplest_methods(
VecU8_to_span(&line_that_appends_new_el_to_el_vec)));
VecU8_drop(line_that_appends_new_el_to_el_vec);
VecU8_append_vec(res, VecU8_fmt(
"/* UNSAFE. Use when you dropped the symbol that is about to be deleted */\n"
"void %s_empty_index_erase(%s* self, U64 z) {\n" /* set, set */
SPACE "assert(z != 0 && z < self->tree.len);\n"
SPACE "U64 y = (self->tree.buf[z].left == 0 || self->tree.buf[z].right == 0) ? z : BufRBTree_minimum_in_subtree(self->tree.buf, self->tree.buf[z].right);\n"
SPACE "U64 x = self->tree.buf[y].left != 0 ? self->tree.buf[y].left : self->tree.buf[y].right;\n"
SPACE "assert(x != y && x != z);\n"
SPACE "U64 x_adopter = self->tree.buf[y].parent;\n"
SPACE "self->tree.buf[x].parent = x_adopter;\n"
SPACE "if (x_adopter == 0)\n"
SPACE SPACE "self->root = x;\n"
SPACE "else if (self->tree.buf[x_adopter].left == y)\n"
SPACE SPACE "self->tree.buf[x_adopter].left = x;\n"
SPACE "else\n"
SPACE SPACE "self->tree.buf[x_adopter].right = x;\n"
SPACE "RBTreeColor y_org_clr = self->tree.buf[y].color;\n"
SPACE "if (z != y) {\n"
SPACE SPACE "BufRBTree_steal_neighbours(self->tree.buf, &self->root, z, y);\n"
SPACE SPACE "if (x_adopter == z)\n"
SPACE SPACE SPACE "x_adopter = y;\n"
SPACE "}\n"
SPACE "U64 L = self->el.len;\n"
SPACE "if (L != z) {\n"
SPACE SPACE "BufRBTree_steal_neighbours(self->tree.buf, &self->root, L, z);\n"
SPACE SPACE "self->el.buf[z-1] = self->el.buf[L-1];\n"
SPACE SPACE "if (L == x)\n"
SPACE SPACE SPACE "x = z;\n"
SPACE SPACE "else if (L == x_adopter) \n"
SPACE SPACE SPACE "x_adopter = z;\n"
SPACE "}\n"
SPACE "self->tree.buf[x].parent = x_adopter;\n"
SPACE "self->tree.len--;\n"
SPACE "self->el.len--;\n"
SPACE "if (y_org_clr == RBTree_black)\n"
SPACE SPACE "BufRBTree_fix_after_delete(self->tree.buf, &self->root, x);\n"
"}\n\n",
set, set));
// VecU8_append_vec(res, VecU8_fmt(
// "/* UNSAFE. Use when you dropped the symbol that is about to be deleted */\n"
// "void %s_empty_index_erase(%s* self, U64 z) {\n" /* set, set */
// SPACE "assert(z != 0 && z < self->tree.len);\n"
// SPACE "U64 y = (self->tree.buf[z].left == 0 || self->tree.buf[z].right == 0) ? z : BufRBTree_minimum_in_subtree(self->tree.buf, self->tree.buf[z].right);\n"
// SPACE "U64 x = self->tree.buf[y].left != 0 ? self->tree.buf[y].left : self->tree.buf[y].right;\n"
// SPACE "assert(x != y && x != z);\n"
// SPACE "U64 x_adopter = self->tree.buf[y].parent;\n"
// SPACE "self->tree.buf[x].parent = x_adopter;\n"
// SPACE "if (x_adopter == 0)\n"
// SPACE SPACE "self->root = x;\n"
// SPACE "else if (self->tree.buf[x_adopter].left == y)\n"
// SPACE SPACE "self->tree.buf[x_adopter].left = x;\n"
// SPACE "else\n"
// SPACE SPACE "self->tree.buf[x_adopter].right = x;\n"
// SPACE "RBTreeColor y_org_clr = self->tree.buf[y].color;\n"
// SPACE "if (z != y) {\n"
// SPACE SPACE "BufRBTree_steal_neighbours(self->tree.buf, &self->root, z, y);\n"
// SPACE SPACE "if (x_adopter == z)\n"
// SPACE SPACE SPACE "x_adopter = y;\n"
// SPACE "}\n"
// SPACE "U64 L = self->el.len;\n"
// SPACE "if (L != z) {\n"
// SPACE SPACE "BufRBTree_steal_neighbours(self->tree.buf, &self->root, L, z);\n"
// SPACE SPACE "self->el.buf[z-1] = self->el.buf[L-1];\n"
// SPACE SPACE "if (L == x)\n"
// SPACE SPACE SPACE "x = z;\n"
// SPACE SPACE "else if (L == x_adopter) \n"
// SPACE SPACE SPACE "x_adopter = z;\n"
// SPACE "}\n"
// SPACE "self->tree.buf[x].parent = x_adopter;\n"
// SPACE "self->tree.len--;\n"
// SPACE "self->el.len--;\n"
// SPACE "if (y_org_clr == RBTree_black)\n"
// SPACE SPACE "BufRBTree_fix_after_delete(self->tree.buf, &self->root, x);\n"
// "}\n\n",
// set, set));
VecU8_append_vec(res, VecU8_fmt(
"bool %s_insert(%s* self, %v) {\n" /* set, set, taking_t_argument */
@ -348,14 +348,15 @@ void codegen_append_buff_rbtree_map__structure_and_simplest_methods(
SPACE SPACE "return false;\n"
"%v" /* "" / op.K_drop(v->key) */
"%v" /* "" / op.V_drop(v->value) */
SPACE "%s_empty_index_erase(self, v);\n" /* set */
SPACE "BufRBTree_empty_index_erase(&self->tree, &self->root, v);\n"
SPACE "self->el.buf[v - 1] = self->el.buf[self->el.len - 1];\n"
SPACE "self->el.len--;\n"
SPACE "return true;\n"
"}\n\n",
set, set, codegen_rbtree_map__taking_ref_k_argument(op), set,
op.k_primitive ? vcstr("") : VecU8_fmt(
SPACE "%s_drop(self->el.buf[v - 1]%s);\n", op.K, op.V.len > 0 ? cstr(".key") : cstr("")),
op.v_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(self->el.buf[v - 1].value);\n", op.V),
set ));
op.v_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(self->el.buf[v - 1].value);\n", op.V)));
}
NODISCARD VecU8 codegen_buf_rbtree_map__option_returned_ref_v(map_instantiation_op op, bool mut){
@ -509,22 +510,24 @@ NODISCARD VecU8 generate_buf_rbtree_Map_template_instantiation(map_instantiation
op.v_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(self->el.buf[col - 1].value);\n", op.V)));
}
VecU8_append_vec(&res, VecU8_fmt(
"Option%s %s_pop_substitute(%s* self, %s key, %s value) {\n" /* op.V, map, map, op.K, op.V */
SPACE "U64 col = %s_try_insert(self, key, value);\n" /* map */
SPACE "if (col == 0)\n"
SPACE SPACE "return None_%s();\n" /* op.V */
"%s saved = self->el.buf[col - 1].value;\n" /* op.V */
"%v" /* "" / drop col->key */
SPACE "self->el.buf[col - 1].key = key;\n"
SPACE "self->el.buf[col - 1].value = value;\n"
SPACE "return Some_%s(saved);\n" /* op.V */
"}\n\n",
op.V, map, map, op.K, op.V,
map, op.V,
op.V,
op.k_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(self->el.buf[col - 1].key);\n", op.K),
op.V));
if (op.pop_substitute) {
VecU8_append_vec(&res, VecU8_fmt(
"Option%s %s_pop_substitute(%s* self, %s key, %s value) {\n" /* op.V, map, map, op.K, op.V */
SPACE "U64 col = %s_try_insert(self, key, value);\n" /* map */
SPACE "if (col == 0)\n"
SPACE SPACE "return None_%s();\n" /* op.V */
"%s saved = self->el.buf[col - 1].value;\n" /* op.V */
"%v" /* "" / drop col->key */
SPACE "self->el.buf[col - 1].key = key;\n"
SPACE "self->el.buf[col - 1].value = value;\n"
SPACE "return Some_%s(saved);\n" /* op.V */
"}\n\n",
op.V, map, map, op.K, op.V,
map, op.V,
op.V,
op.k_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(self->el.buf[col - 1].key);\n", op.K),
op.V));
}
// todo: write _pop_by_iter method

View File

@ -274,6 +274,7 @@ void codegen_append_rbtree_map__structure_and_simplest_methods(
"%v" /* "" / op.K_drop(it->key) */
"%v" /* "" / op.V_drop(it->value) */
SPACE "RBTree_erase_empty_by_iter(&self->root, self->NIL, (RBTreeNode*)it);\n"
SPACE "free(it);\n"
"}\n\n",
set, set, TT,
op.k_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(it->key);\n", op.K),
@ -352,25 +353,26 @@ NODISCARD VecU8 generate_rbtree_Map_template_instantiation(map_instantiation_op
VecU8_append_vec(&res, codegen_rbtree__node_structure(op));
codegen_append_rbtree_map__structure_and_simplest_methods(&res, op, map, kvp);
VecU8_append_vec(&res, VecU8_fmt(
"%s" "Option%s %s_pop_substitute(%s* self, %s key, %s value) {\n" /* "" / NODISCARD , op.V, map, map, op.K, op.V */
/* Using unsafe method with conditional ownership transfer */
SPACE "RBTreeNode_%s* col = %s_try_insert(self, key, value);\n" /* kvp, map */
SPACE "if (col == NULL) {\n"
SPACE SPACE "return None_%s();\n" /* op.V */
SPACE "} else {\n"
"%v" /* "" / dropping col->key */
SPACE SPACE "%s saved = col->value;\n" /* op.V */
SPACE SPACE "col->key = key;\n"
SPACE SPACE "col->value = value;\n"
SPACE SPACE "return Some_%s(saved);\n" /* op.V */
SPACE "}\n"
"}\n\n",
op.v_primitive ? cstr("") : cstr("NODISCARD "), op.V, map, map, op.K, op.V,
kvp, map, op.V,
op.k_primitive ? vcstr("") : VecU8_fmt(SPACE SPACE"%s_drop(col->key);\n", op.K),
op.V, op.V));
if (op.pop_substitute) {
VecU8_append_vec(&res, VecU8_fmt(
"%s" "Option%s %s_pop_substitute(%s* self, %s key, %s value) {\n" /* "" / NODISCARD , op.V, map, map, op.K, op.V */
/* Using unsafe method with conditional ownership transfer */
SPACE "RBTreeNode_%s* col = %s_try_insert(self, key, value);\n" /* kvp, map */
SPACE "if (col == NULL) {\n"
SPACE SPACE "return None_%s();\n" /* op.V */
SPACE "} else {\n"
"%v" /* "" / dropping col->key */
SPACE SPACE "%s saved = col->value;\n" /* op.V */
SPACE SPACE "col->key = key;\n"
SPACE SPACE "col->value = value;\n"
SPACE SPACE "return Some_%s(saved);\n" /* op.V */
SPACE "}\n"
"}\n\n",
op.v_primitive ? cstr("") : cstr("NODISCARD "), op.V, map, map, op.K, op.V,
kvp, map, op.V,
op.k_primitive ? vcstr("") : VecU8_fmt(SPACE SPACE"%s_drop(col->key);\n", op.K),
op.V, op.V));
}
if (!op.v_primitive) {
VecU8_append_vec(&res, VecU8_fmt(
"bool %s_erase_substitute(%s* self, %s key, %s value) {\n" /* map, map, op.K, op.V */
@ -387,24 +389,26 @@ NODISCARD VecU8 generate_rbtree_Map_template_instantiation(map_instantiation_op
op.v_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(col->value);\n", op.V)));
}
// todo: write _pop_by_it
// todo: rewrite pop using _pop_by_it
VecU8_append_vec(&res, VecU8_fmt(
"Option%s %s_pop(%s* self, %v key) {\n" /* op.V, map, map, taking_ref_k_argument */
SPACE "RBTreeNode_%s* v = %s_find(self, key);\n" /* kvp, map */
SPACE "if (v == NULL)\n"
SPACE SPACE "return None_%s();\n" /* op.V */
"%v" /* "" / op.K_drop(v->key) */
"%s saved = v->value;\n" /* op.V */
SPACE "RBTree_erase_empty_by_iter(&self->root, self->NIL, (RBTreeNode*)v);\n"
SPACE "return Some_%s(saved);\n" /* op.V */
"}\n\n",
op.V, map, map, codegen_rbtree_map__taking_ref_k_argument(op),
kvp, map, op.V,
op.k_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(v->key);\n", op.K),
op.V, op.V));
if (op.pop){
// todo: write _pop_by_it
// todo: rewrite pop using _pop_by_it
VecU8_append_vec(&res, VecU8_fmt(
"Option%s %s_pop(%s* self, %v key) {\n" /* op.V, map, map, taking_ref_k_argument */
SPACE "RBTreeNode_%s* v = %s_find(self, key);\n" /* kvp, map */
SPACE "if (v == NULL)\n"
SPACE SPACE "return None_%s();\n" /* op.V */
"%v" /* "" / op.K_drop(v->key) */
"%s saved = v->value;\n" /* op.V */
SPACE "RBTree_erase_empty_by_iter(&self->root, self->NIL, (RBTreeNode*)v);\n"
SPACE "free(v);\n"
SPACE "return Some_%s(saved);\n" /* op.V */
"}\n\n",
op.V, map, map, codegen_rbtree_map__taking_ref_k_argument(op),
kvp, map, op.V,
op.k_primitive ? vcstr("") : VecU8_fmt(SPACE "%s_drop(v->key);\n", op.K),
op.V, op.V));
}
// todo: write generator for methods _at and _mat
return res;

View File

@ -219,4 +219,42 @@ void BufRBTree_fix_after_delete(BufRBTreeNode* tree, U64* root, U64 me){
tree[me].color = RBTree_black;
}
/* UNSAFE. Use when you dropped the symbol that is about to be deleted. Does not shrink the el vector,
* do it yourself */
void BufRBTree_empty_index_erase(VecBufRBTreeNode* tree, U64* root, U64 z) {
assert(z != 0 && z < tree->len);
U64 y = (tree->buf[z].left == 0 || tree->buf[z].right == 0) ? z : BufRBTree_minimum_in_subtree(tree->buf, tree->buf[z].right);
U64 x = tree->buf[y].left != 0 ? tree->buf[y].left : tree->buf[y].right;
assert(x != y && x != z);
U64 x_adopter = tree->buf[y].parent;
tree->buf[x].parent = x_adopter;
if (x_adopter == 0)
*root = x;
else if (tree->buf[x_adopter].left == y)
tree->buf[x_adopter].left = x;
else
tree->buf[x_adopter].right = x;
RBTreeColor y_org_clr = tree->buf[y].color;
if (z != y) {
BufRBTree_steal_neighbours(tree->buf, root, z, y);
if (x_adopter == z)
x_adopter = y;
}
U64 L = tree->len - 1;
if (L != z) {
BufRBTree_steal_neighbours(tree->buf, root, L, z);
if (L == x)
x = z;
else if (L == x_adopter)
x_adopter = z;
}
tree->buf[x].parent = x_adopter;
tree->len--;
if (y_org_clr == RBTree_black)
BufRBTree_fix_after_delete(tree->buf, root, x);
// self->el.buf[z-1] = self->el.buf[L-1];
// self->el.len--;
}
#endif

View File

@ -249,7 +249,7 @@ void RBTree_fix_after_delete(RBTreeNode** root, RBTreeNode* NIL, RBTreeNode* me)
me->color = RBTREE_BLACK;
}
/* Assumes that z->key and z->value were already dropped properly. Frees z node */
/* Assumes that z->key and z->value were already dropped properly. Does not free z node */
void RBTree_erase_empty_by_iter(RBTreeNode** root, RBTreeNode* NIL, RBTreeNode* z) {
assert(z != NULL);
assert(z != NIL);
@ -273,7 +273,6 @@ void RBTree_erase_empty_by_iter(RBTreeNode** root, RBTreeNode* NIL, RBTreeNode*
// x->parent = x_adopter;
if (y_org_clr == RBTREE_BLACK)
RBTree_fix_after_delete(root, NIL, x);
free((void*)z);
}
#endif

View File

@ -201,13 +201,10 @@ typedef U8 MargaretMemAllocatorDemands;
#include "../../l1_5/core/buff_rb_tree_node.h"
#include "../../l1_5/core/rb_tree_node.h"
typedef struct MargaretMemAllocatorOccupantPosition MargaretMemAllocatorOccupantPosition;
typedef struct {
U64 width;
U64 height;
VkFormat format;
VkImageTiling tiling;
VkImageLayout current_layout;
VkImageUsageFlags usage_flags;
bool preserve_at_quiet;
@ -218,6 +215,10 @@ typedef struct {
typedef struct {
VkBufferUsageFlags usage_flags;
bool preserve_at_quiet;
/* This is not a parameter of a buffer type. It is a special flag used during defragmentation to indicate
* that this buffer was already moved to 'new blocks' by expand_buf request
* and copying cycle should skip it. In other situations -> false */
bool moved_already;
VkBuffer buffer;
U64 capacity;
} MargaretMemoryOccupationBuffer;
@ -235,9 +236,11 @@ typedef struct {
};
} MargaretMAOccupant;
typedef struct ListNodeMargaretMemAllocatorOneBlock ListNodeMargaretMemAllocatorOneBlock;
typedef struct {
U64 taken_size;
MargaretMemAllocatorOccupantPosition* ans;
ListNodeMargaretMemAllocatorOneBlock* block;
MargaretMAOccupant me;
} MargaretMAOccupation;
@ -253,19 +256,13 @@ typedef struct {
void* mapped_memory;
} MargaretMemAllocatorOneBlock;
void MargaretMemAllocatorOneBlock_drop(MargaretMemAllocatorOneBlock self){
RBTree_MapU64ToMargaretMAOccupation_drop(self.occupied_memory);
}
#include "../../../gen/l1/eve/margaret/VecMargaretMemAllocatorOneBlock.h"
#include "../../../gen/l1/eve/margaret/ListMargaretMemAllocatorOneBlock.h"
struct MargaretMemAllocatorOccupantPosition{
U64 device_mem_ind;
RBTreeNode_KVPU64ToMargaretMAOccupation* occ_it;
};
typedef MargaretMemAllocatorOccupantPosition* MargaretMemAllocatorRequestFreeOccupant;
typedef RBTreeNode_KVPU64ToMargaretMAOccupation* MargaretMemAllocatorRequestFreeOccupant;
#include "../../../gen/l1/eve/margaret/VecMargaretMemAllocatorRequestFreeOccupant.h"
typedef MargaretMemAllocatorRequestFreeOccupant MargaretMemAllocatorRequestFreeBuffer;
@ -273,7 +270,7 @@ typedef MargaretMemAllocatorRequestFreeOccupant MargaretMemAllocatorRequestFreeI
typedef struct{
U64 new_size;
MargaretMemAllocatorOccupantPosition* ans;
RBTreeNode_KVPU64ToMargaretMAOccupation* occ_it;
} MargaretMemAllocatorRequestResizeBuffer;
#include "../../../gen/l1/eve/margaret/VecMargaretMemAllocatorRequestResizeBuffer.h"
@ -281,7 +278,7 @@ typedef struct {
U64 allocation_size;
VkBufferUsageFlags usage;
bool preserve_at_quiet;
MargaretMemAllocatorOccupantPosition* ans;
RBTreeNode_KVPU64ToMargaretMAOccupation** ret_ans;
} MargaretMemAllocatorRequestAllocBuffer;
#include "../../../gen/l1/eve/margaret/VecMargaretMemAllocatorRequestAllocBuffer.h"
@ -289,11 +286,10 @@ typedef struct {
U64 width;
U64 height;
VkFormat format;
VkImageTiling tiling;
VkImageLayout current_layout;
VkImageUsageFlags usage_flags;
bool preserve_at_quiet;
MargaretMemAllocatorOccupantPosition* ans;
RBTreeNode_KVPU64ToMargaretMAOccupation** ret_ans;
} MargaretMemAllocatorRequestAllocImage;
#include "../../../gen/l1/eve/margaret/VecMargaretMemAllocatorRequestAllocImage.h"
@ -309,22 +305,9 @@ typedef struct {
typedef struct {
U64 start;
U64 len;
/* If this value somehow got higher than zero, your life f****ng sucks */
U32 dev_mem_block;
ListNodeMargaretMemAllocatorOneBlock* dev_mem_block;
} MargaretFreeMemSegment;
bool MargaretFreeMemSegment_less(
const MargaretFreeMemSegment* A, const MargaretFreeMemSegment* B
){
if (A->len == B->len) {
if (A->dev_mem_block == B->dev_mem_block) {
return A->start < B->start;
}
return A->dev_mem_block < B->dev_mem_block;
}
return A->len < B->len;
}
bool MargaretFreeMemSegment_less_resp_align(
const MargaretFreeMemSegment* A, const MargaretFreeMemSegment* B, U8 alignment_exp
){
@ -334,7 +317,7 @@ bool MargaretFreeMemSegment_less_resp_align(
if (A->dev_mem_block == B->dev_mem_block) {
return A->start < B->start;
}
return A->dev_mem_block < B->dev_mem_block;
return (uintptr_t)A->dev_mem_block < (uintptr_t)B->dev_mem_block;
}
return A_len < B_len;
}
@ -345,15 +328,17 @@ bool MargaretFreeMemSegment_less_resp_align(
#include "../../../gen/l1/eve/margaret/OptionBufRBTreeByLenRespAlign_SetMargaretFreeMemSegment.h"
typedef struct {
U64 mem_block_ind;
U64 old_capacity;
RBTreeNode_KVPU64ToMargaretMAOccupation* occ_it;
} MargaretMABufferExpansionRecord;
#include "../../../gen/l1/eve/margaret/VecMargaretMABufferExpansionRecord.h"
typedef struct {
MargaretMemAllocatorOccupantPosition old_pos;
MargaretMemAllocatorOccupantPosition* ans;
RBTreeNode_KVPU64ToMargaretMAOccupation* replacement;
/* Pointer points to the same address, but the neighbours+key+value at that address are different.
* We relocated out node, replacing it with a NEW node that holds the old buffer value and tyhe old start U64
*/
RBTreeNode_KVPU64ToMargaretMAOccupation* my_occ_it;
} MargaretMANewMovedBufRecord;
#include "../../../gen/l1/eve/margaret/VecMargaretMANewMovedBufRecord.h"
@ -381,13 +366,14 @@ void MargaretMemFreeSpaceManager_drop(MargaretMemFreeSpaceManager self){
VecU8_drop(self.set_present);
}
void MargaretMemFreeSpaceManager_sink(MargaretMemFreeSpaceManager* self){
for (U8 ae = 0; ae < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP; ae++)
if (self->free_space_in_memory[ae].variant == Option_Some)
BufRBTreeByLenRespAlign_SetMargaretFreeMemSegment_sink(&self->free_space_in_memory[ae].some);
}
// void MargaretMemFreeSpaceManager_sink(MargaretMemFreeSpaceManager* self){
// for (U8 ae = 0; ae < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP; ae++)
// if (self->free_space_in_memory[ae].variant == Option_Some)
// BufRBTreeByLenRespAlign_SetMargaretFreeMemSegment_sink(&self->free_space_in_memory[ae].some);
// }
void MargaretMemFreeSpaceManager_erase(MargaretMemFreeSpaceManager* man, U64 start, U64 len, U32 dev_mem_block){
void MargaretMemFreeSpaceManager_erase(
MargaretMemFreeSpaceManager* man, ListNodeMargaretMemAllocatorOneBlock* dev_mem_block, U64 start, U64 len){
if (len == 0)
return;
assert(man->set_present.len > 0);
@ -396,13 +382,16 @@ void MargaretMemFreeSpaceManager_erase(MargaretMemFreeSpaceManager* man, U64 sta
assert(alignment < MARGARET_ALLOC_LIMIT_ALIGNMENT_EXP);
assert(man->free_space_in_memory[alignment].variant == Option_Some);
bool eret = BufRBTreeByLenRespAlign_SetMargaretFreeMemSegment_erase(&
man->free_space_in_memory[alignment].some, &(MargaretFreeMemSegment){start, len, dev_mem_block});
man->free_space_in_memory[alignment].some,
&(MargaretFreeMemSegment){.dev_mem_block = dev_mem_block, .start = start, .len = len});
assert(eret);
}
}
void MargaretMemFreeSpaceManager_insert(MargaretMemFreeSpaceManager* man, U64 start, U64 len, U32 dev_mem_block){
assert(len > 0);
void MargaretMemFreeSpaceManager_insert(
MargaretMemFreeSpaceManager* man, ListNodeMargaretMemAllocatorOneBlock* dev_mem_block, U64 start, U64 len){
if (len == 0)
return;
assert(man->set_present.len > 0); /* MargaretMemFreeSpaceManager will do that for us with 2^3 */
for (size_t aj = 0; aj < man->set_present.len; aj++) {
U8 alignment = man->set_present.buf[aj];
@ -439,7 +428,7 @@ OptionMargaretFreeMemSegment MargaretMemFreeSpaceManager_search(
/* VkDevice and VkPhysicalDevice stay remembered here. Don't forget that, please */
typedef struct {
VecMargaretMemAllocatorOneBlock blocks;
ListMargaretMemAllocatorOneBlock blocks;
/* old_blocks is usually empty. BUT! When you generated a defragmentation command buffer with
* MargaretMemAllocator_carry_out_request, this vector will be filled with old blocks, while
* `blocks` vector will be filled with newly created blocks.
@ -447,7 +436,7 @@ typedef struct {
* to the right block. After you execute the command buffer,
* that MargaretMemAllocator_carry_out_request generates, you can (and should) wipe out old blocks
*/
VecMargaretMemAllocatorOneBlock old_blocks;
ListMargaretMemAllocatorOneBlock old_blocks;
VecMargaretMANewMovedBufRecord old_moved_buffers;
MargaretMemFreeSpaceManager mem_free_space;
@ -462,8 +451,8 @@ MargaretMemAllocator MargaretMemAllocator_new(
VkDevice device, VkPhysicalDevice physical_device, VkMemoryPropertyFlags mem_properties, U8 memory_type_id
){
MargaretMemAllocator self = {
.blocks = VecMargaretMemAllocatorOneBlock_new(),
.old_blocks = VecMargaretMemAllocatorOneBlock_new(),
.blocks = ListMargaretMemAllocatorOneBlock_new(),
.old_blocks = ListMargaretMemAllocatorOneBlock_new(),
.old_moved_buffers = VecMargaretMANewMovedBufRecord_new(),
.mem_free_space = MargaretMemFreeSpaceManager_new(),
.memory_type_id = memory_type_id,
@ -475,23 +464,30 @@ MargaretMemAllocator MargaretMemAllocator_new(
return self;
}
void MargaretMemAllocator__erase_gap(MargaretMemAllocator* self, U32 dev_mem_block, U64Segment gap){
MargaretMemFreeSpaceManager_erase(&self->mem_free_space, gap.start, gap.len, dev_mem_block);
MargaretMemAllocatorOneBlock* block = VecMargaretMemAllocatorOneBlock_mat(&self->blocks, dev_mem_block);
block->occupation_counter += gap.len;
assert(block->occupation_counter <= block->capacity);
void MargaretMemAllocator__erase_gap(
MargaretMemAllocator* self, ListNodeMargaretMemAllocatorOneBlock* block_it, U64 start, U64 len){
MargaretMemFreeSpaceManager_erase(&self->mem_free_space, block_it, start, len);
block_it->el.occupation_counter += len;
assert(block_it->el.occupation_counter <= block_it->el.capacity);
}
void MargaretMemAllocator__insert_gap(MargaretMemAllocator* self, U32 dev_mem_block, U64 start, U64 len){
MargaretMemFreeSpaceManager_insert(&self->mem_free_space, start, len, dev_mem_block);
MargaretMemAllocatorOneBlock* block = VecMargaretMemAllocatorOneBlock_mat(&self->blocks, dev_mem_block);
assert(len <= block->occupation_counter);
block->occupation_counter -= len;
void MargaretMemAllocator__insert_gap(
MargaretMemAllocator* self, ListNodeMargaretMemAllocatorOneBlock* block_it, U64 start, U64 len){
MargaretMemFreeSpaceManager_insert(&self->mem_free_space, block_it, start, len);
assert(len <= block_it->el.occupation_counter);
block_it->el.occupation_counter -= len;
}
bool MargaretMemAllocator__add_new_occupant_any_type(
U64 margaret_get_alignment_left_padding(U64 unaligned_start, U8 alignment_exp){
U64 hit = unaligned_start & (1ull << alignment_exp) - 1;
return (hit ? (1ull << alignment_exp) - hit : 0);
}
/* This method only works for alloc_buffer and alloc_image requests. It does not work for
* expand_buffer request (path 2) */
bool MargaretMemAllocator__add_freshly_new_occupant_any_type(
MargaretMemAllocator* self, MargaretMAOccupant occ, const VkMemoryRequirements* requirements,
MargaretMemAllocatorOccupantPosition* ans
RBTreeNode_KVPU64ToMargaretMAOccupation** ret_ans
){
check(U64_is_2pow(requirements->alignment));
U8 alignment_exp = U64_2pow_log(requirements->alignment);
@ -499,35 +495,30 @@ bool MargaretMemAllocator__add_new_occupant_any_type(
MargaretMemFreeSpaceManager_search(&self->mem_free_space, alignment_exp, requirements->size);
if (free_gap.variant == Option_None)
return false;
U32 dev_mem_block = free_gap.some.dev_mem_block;
MargaretMemAllocatorOneBlock* block = VecMargaretMemAllocatorOneBlock_mat(&self->blocks, dev_mem_block);
ListNodeMargaretMemAllocatorOneBlock* block_it = free_gap.some.dev_mem_block;
RBTree_MapU64ToMargaretMAOccupation* block_occupied_memory = &block_it->el.occupied_memory;
U64 gap_start = free_gap.some.start;
U64 gap_len = free_gap.some.len;
U64 hit = gap_start & (1ull << alignment_exp) - 1;
U64 af = (hit ? (1ull << alignment_exp) - hit : 0);
U64 af = margaret_get_alignment_left_padding(gap_start, alignment_exp);
U64 aligned_start = gap_start + af;
assert(aligned_start + requirements->size <= gap_start + gap_len);
MargaretMemAllocator__erase_gap(self, dev_mem_block, (U64Segment){.start = gap_start, .len = gap_len});
MargaretMemAllocator__insert_gap(self, dev_mem_block, gap_start, af);
MargaretMemAllocator__insert_gap(self, dev_mem_block, aligned_start + requirements->size,
MargaretMemAllocator__erase_gap(self, block_it, gap_start, gap_len);
MargaretMemAllocator__insert_gap(self, block_it, gap_start, af);
MargaretMemAllocator__insert_gap(self, block_it, aligned_start + requirements->size,
gap_start + gap_len - (aligned_start + requirements->size));
/* We are doing a dumb crutch here where we first insert key+value, then search for the iterator */
check(RBTree_MapU64ToMargaretMAOccupation_insert(&block->occupied_memory, aligned_start, (MargaretMAOccupation){
.taken_size = requirements->size, .ans = ans, .me = occ}));
check(RBTree_MapU64ToMargaretMAOccupation_insert(block_occupied_memory, aligned_start,
(MargaretMAOccupation){.taken_size = requirements->size, .block = block_it, .me = occ}));
/* Lord forgive me */
RBTreeNode_KVPU64ToMargaretMAOccupation* new_it =
RBTree_MapU64ToMargaretMAOccupation_find(&block->occupied_memory, aligned_start);
assert(new_it);
/* Updating answer. occ->ans may be already filled, or it may not. I don't care */
ans->device_mem_ind = dev_mem_block;
ans->occ_it = new_it;
*ret_ans = RBTree_MapU64ToMargaretMAOccupation_find(block_occupied_memory, aligned_start);
assert(*ret_ans);
return true;
}
bool MargaretMemAllocator__add_new_buffer_occ(
MargaretMemAllocator* self, MargaretMemAllocatorOccupantPosition* ans,
bool MargaretMemAllocator__add_freshly_new_buffer_occ(
MargaretMemAllocator* self, RBTreeNode_KVPU64ToMargaretMAOccupation** ret_ans,
U64 size, VkBufferUsageFlags usage_flags, bool preserve_at_quiet){
VkBuffer buf;
check(vkCreateBuffer(self->device, &(VkBufferCreateInfo){
@ -538,18 +529,19 @@ bool MargaretMemAllocator__add_new_buffer_occ(
}, NULL, &buf) == VK_SUCCESS);
VkMemoryRequirements memory_requirements;
vkGetBufferMemoryRequirements(self->device, buf, &memory_requirements);
bool success = MargaretMemAllocator__add_new_occupant_any_type(self, (MargaretMAOccupant){
bool success = MargaretMemAllocator__add_freshly_new_occupant_any_type(self, (MargaretMAOccupant){
.variant = MargaretMemoryOccupation_Buffer,
.buf = (MargaretMemoryOccupationBuffer){
.buffer = buf, .capacity = size, .usage_flags = usage_flags, .preserve_at_quiet = preserve_at_quiet
}}, &memory_requirements, ans);
}}, &memory_requirements, ret_ans);
if (!success)
vkDestroyBuffer(self->device, buf, NULL);
return success;
}
/* Helper function used in BD path */
bool MargaretMemAllocator__add_new_image_occ(
MargaretMemAllocator* self, MargaretMemAllocatorOccupantPosition* ans,
MargaretMemAllocator* self, RBTreeNode_KVPU64ToMargaretMAOccupation** ret_ans,
U64 width, U64 height, VkFormat format, VkImageUsageFlags usage_flags, bool preserve_at_quiet){
VkImage img;
check(vkCreateImage(self->device, &(VkImageCreateInfo){
@ -571,13 +563,12 @@ bool MargaretMemAllocator__add_new_image_occ(
}, NULL, &img) == VK_SUCCESS);
VkMemoryRequirements memory_requirements;
vkGetImageMemoryRequirements(self->device, img, &memory_requirements);
return MargaretMemAllocator__add_new_occupant_any_type(self, (MargaretMAOccupant){
return MargaretMemAllocator__add_freshly_new_occupant_any_type(self, (MargaretMAOccupant){
.variant = MargaretMemoryOccupation_Image,
.img = (MargaretMemoryOccupationImage){
.image = img, .width = width, .height = height, .current_layout = VK_IMAGE_LAYOUT_UNDEFINED,
.usage_flags = usage_flags, .preserve_at_quiet = preserve_at_quiet, .format = format,
.tiling = VK_IMAGE_TILING_OPTIMAL
}}, &memory_requirements, ans);
}}, &memory_requirements, ret_ans);
}
U64Segment MargaretMemAllocatorOneBlock_get_left_free_space(
@ -611,31 +602,34 @@ U64Segment MargaretMemAllocatorOneBlock_get_right_free_space(
return (U64Segment){.start = occ_start + occ_taken_size, .len = self->capacity - (occ_start + occ_taken_size)};
}
/* If mem occupant in question is VkBuffer, it won't delete anything from the set of available free mem segments
* for that buffer kindred. It is your job to remove free buffer subsegments from this set*/
void MargaretMemAllocator__get_rid_of_memory_occupant(
MargaretMemAllocator* self, U32 mem_block_id, RBTreeNode_KVPU64ToMargaretMAOccupation* occ_it){
MargaretMemAllocatorOneBlock* block = VecMargaretMemAllocatorOneBlock_mat(&self->blocks, mem_block_id);
void MargaretMemAllocator__get_rid_of_memory_occupant_but_not_node(
MargaretMemAllocator* self, RBTreeNode_KVPU64ToMargaretMAOccupation* exile){
MargaretMemAllocatorOneBlock* block = &exile->value.block->el;
const MargaretMAOccupant* occ = &occ_it->value.me;
U64Segment left_free_space = MargaretMemAllocatorOneBlock_get_left_free_space(block, occ_it);
U64Segment right_free_space = MargaretMemAllocatorOneBlock_get_right_free_space(block, occ_it);
U64Segment left_free_space = MargaretMemAllocatorOneBlock_get_left_free_space(block, exile);
U64Segment right_free_space = MargaretMemAllocatorOneBlock_get_right_free_space(block, exile);
const MargaretMAOccupant* occ = &exile->value.me;
if (occ->variant == MargaretMemoryOccupation_Buffer) {
vkDestroyBuffer(self->device, occ->buf.buffer, NULL);
} else if (occ->variant == MargaretMemoryOccupation_Image) {
vkDestroyImage(self->device, occ->img.image, NULL);
}
RBTree_MapU64ToMargaretMAOccupation_erase_by_iter(&block->occupied_memory, occ_it);
MargaretMemAllocator__erase_gap(self, mem_block_id, left_free_space);
MargaretMemAllocator__erase_gap(self, mem_block_id, right_free_space);
MargaretMemAllocator__insert_gap(self, mem_block_id, left_free_space.start,
MargaretMemAllocator__erase_gap(self, exile->value.block, left_free_space.start, left_free_space.len);
MargaretMemAllocator__erase_gap(self, exile->value.block, right_free_space.start, right_free_space.len);
MargaretMemAllocator__insert_gap(self, exile->value.block,
left_free_space.start,
right_free_space.start + right_free_space.len - left_free_space.start);
}
/* Either for temporary replacements of moved-expanded buffers, or for direct free_X requests */
void MargaretMemAllocator__get_rid_of_memory_occupant_and_node(
MargaretMemAllocator* self, RBTreeNode_KVPU64ToMargaretMAOccupation* exile){
MargaretMemAllocator__get_rid_of_memory_occupant_but_not_node(self, exile);
MargaretMemAllocatorOneBlock* block = &exile->value.block->el;
RBTree_MapU64ToMargaretMAOccupation_erase_by_iter(&block->occupied_memory, exile);
}
void MargaretMemAllocator__clean_handlers_in_block(const MargaretMemAllocator* self, const MargaretMemAllocatorOneBlock* block){
assert(((self->mem_properties & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) ==
@ -657,46 +651,53 @@ void MargaretMemAllocator__clean_handlers_in_block(const MargaretMemAllocator* s
}
void MargaretMemAllocator_drop(MargaretMemAllocator self){
for (size_t blind = 0; blind < self.old_blocks.len; blind++) {
MargaretMemAllocator__clean_handlers_in_block(&self, &self.old_blocks.buf[blind]);
for (ListNodeMargaretMemAllocatorOneBlock* block = self.old_blocks.first; block; block = block->next) {
ListNodeMargaretMemAllocatorOneBlock* next = block->next;
MargaretMemAllocator__clean_handlers_in_block(&self, &block->el);
free(block);
block = next;
}
for (size_t blind = 0; blind < self.blocks.len; blind++) {
MargaretMemAllocator__clean_handlers_in_block(&self, &self.blocks.buf[blind]);
for (ListNodeMargaretMemAllocatorOneBlock* block = self.blocks.first; block; block = block->next) {
ListNodeMargaretMemAllocatorOneBlock* next = block->next;
MargaretMemAllocator__clean_handlers_in_block(&self, &block->el);
free(block);
block = next;
}
VecMargaretMemAllocatorOneBlock_drop(self.old_blocks);
VecMargaretMemAllocatorOneBlock_drop(self.blocks);
// VecMargaretOldBufferResizeRecord_drop(self.old_buff_resize_record);
VecMargaretMANewMovedBufRecord_drop(self.old_moved_buffers);
MargaretMemFreeSpaceManager_drop(self.mem_free_space);
}
void MargaretMemAllocator_wipe_old(MargaretMemAllocator* self){
assert(!self->old_blocks.len || !self->old_moved_buffers.len);
for (size_t blind = 0; blind < self->old_blocks.len; blind++) {
MargaretMemAllocator__clean_handlers_in_block(self, &self->old_blocks.buf[blind]);
assert(self->old_blocks.first == NULL || self->old_moved_buffers.len == 0);
for (ListNodeMargaretMemAllocatorOneBlock* block = self->old_blocks.first; block; block = block->next) {
ListNodeMargaretMemAllocatorOneBlock* next = block->next;
MargaretMemAllocator__clean_handlers_in_block(self, &block->el);
free(block);
block = next;
}
VecMargaretMemAllocatorOneBlock_sink(&self->old_blocks, 0);
self->old_blocks.first = NULL;
for (U64 ri = 0; ri < self->old_moved_buffers.len; ri++) {
MargaretMANewMovedBufRecord moved = self->old_moved_buffers.buf[ri];
assert(moved.old_pos.occ_it->value.me.variant == MargaretMemoryOccupation_Buffer);
MargaretMemAllocator__get_rid_of_memory_occupant(self, moved.old_pos.device_mem_ind, moved.old_pos.occ_it);
assert(moved.replacement->value.me.variant == MargaretMemoryOccupation_Buffer);
MargaretMemAllocator__get_rid_of_memory_occupant_and_node(self, moved.replacement);
}
VecMargaretMANewMovedBufRecord_sink(&self->old_moved_buffers, 0);
}
void MargaretMemAllocator__shrink_some_buffer(
MargaretMemAllocator* self, MargaretMemAllocatorOccupantPosition pos, size_t smaller_size
MargaretMemAllocator* self, RBTreeNode_KVPU64ToMargaretMAOccupation* occ_it, size_t smaller_size
){
MargaretMemAllocatorOneBlock* block = VecMargaretMemAllocatorOneBlock_mat(&self->blocks, pos.device_mem_ind);
MargaretMAOccupation* occ = &pos.occ_it->value;
assert(occ->me.variant == MargaretMemoryOccupation_Buffer);
assert(occ->me.buf.capacity >= smaller_size);
U64 buf_start = pos.occ_it->key;
U64 buf_taken_size = occ->taken_size;
ListNodeMargaretMemAllocatorOneBlock* block_it = occ_it->value.block;
MargaretMAOccupant* occ_me = &occ_it->value.me;
assert(occ_me->variant == MargaretMemoryOccupation_Buffer);
assert(occ_me->buf.capacity >= smaller_size);
U64 buf_start = occ_it->key;
U64 buf_taken_size = occ_it->value.taken_size;
VkBuffer shorter_buf;
check(vkCreateBuffer(self->device, &(VkBufferCreateInfo){
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
.size = smaller_size,
.usage = occ->me.buf.usage_flags,
.usage = occ_me->buf.usage_flags,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
}, NULL, &shorter_buf) == VK_SUCCESS);
VkMemoryRequirements shorter_buf_req;
@ -706,16 +707,88 @@ void MargaretMemAllocator__shrink_some_buffer(
check((buf_start & (shorter_buf_req.alignment - 1)) == 0)
check(shorter_buf_req.size <= buf_taken_size);
U64Segment right_free_space = MargaretMemAllocatorOneBlock_get_right_free_space(block, pos.occ_it);
MargaretMemAllocator__erase_gap(self, pos.device_mem_ind, right_free_space);
MargaretMemAllocator__insert_gap(self, pos.device_mem_ind,
U64Segment right_free_space = MargaretMemAllocatorOneBlock_get_right_free_space(&block_it->el, occ_it);
MargaretMemAllocator__erase_gap(self, block_it, right_free_space.start, right_free_space.len);
MargaretMemAllocator__insert_gap(self, block_it,
buf_start + shorter_buf_req.size,
right_free_space.len + (buf_taken_size - shorter_buf_req.size));
vkDestroyBuffer(self->device, occ->me.buf.buffer, NULL);
occ->taken_size = shorter_buf_req.size;
occ->me.buf.buffer = shorter_buf;
occ->me.buf.capacity = smaller_size;
vkDestroyBuffer(self->device, occ_me->buf.buffer, NULL);
occ_it->value.taken_size = shorter_buf_req.size;
occ_me->buf.buffer = shorter_buf;
occ_me->buf.capacity = smaller_size;
}
/* didn't generate it, so I am doing it myself */
void RBTree_MapU64ToMargaretMAOccupation_insert_node(
RBTree_MapU64ToMargaretMAOccupation* self, RBTreeNode_KVPU64ToMargaretMAOccupation* node){
node->base.left = node->base.right = self->NIL;
node->base.color = RBTREE_RED;
if (self->root == self->NIL) {
self->root = &node->base;
node->base.parent = self->NIL;
node->base.color = RBTREE_BLACK;
}
RBTreeNode_KVPU64ToMargaretMAOccupation* cur = (RBTreeNode_KVPU64ToMargaretMAOccupation*)self->root;
while (true) {
if (node->key < cur->key) {
if (cur->base.left == self->NIL) {
node->base.parent = &cur->base;
cur->base.left = &node->base;
RBTree_fix_after_insert(&self->root, self->NIL, &node->base);
} else {
cur = (RBTreeNode_KVPU64ToMargaretMAOccupation*)cur->base.left;
}
} else if (cur->key < node->key) {
if (cur->base.right == self->NIL) {
node->base.parent = &cur->base;
cur->base.right = &node->base;
RBTree_fix_after_insert(&self->root, self->NIL, &node->base);
} else {
cur = (RBTreeNode_KVPU64ToMargaretMAOccupation*)cur->base.right;
}
} else
assert(false);
}
}
/* Helper function for MargaretMemAllocator_request_needs_defragmentation */
void MargaretMemAllocator__keep_building_up_cur_block(
MargaretMemAllocator* self, U64* cur_block_size_needed, ListNodeMargaretMemAllocatorOneBlock** cur_block,
VkMemoryRequirements mem_requirements, RBTreeNode_KVPU64ToMargaretMAOccupation* occ_it,
U64 maxMemoryAllocationSize
){
check(U64_is_2pow(mem_requirements.alignment));
if (mem_requirements.size > maxMemoryAllocationSize)
abortf("Your object asks too much :(\n");
U64 af = margaret_get_alignment_left_padding(*cur_block_size_needed, U64_2pow_log(mem_requirements.alignment));
if (*cur_block_size_needed + af + mem_requirements.size > maxMemoryAllocationSize) {
MargaretMemAllocator__insert_gap(self, *cur_block,
*cur_block_size_needed, maxMemoryAllocationSize - *cur_block_size_needed);
vkAllocateMemory(self->device, &(VkMemoryAllocateInfo){
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
.memoryTypeIndex = self->memory_type_id,
.allocationSize = maxMemoryAllocationSize}, NULL, &((*cur_block)->el.mem_hand));
(*cur_block)->el.capacity = maxMemoryAllocationSize;
if (self->mem_properties & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
vkMapMemory(self->device, (*cur_block)->el.mem_hand, 0, VK_WHOLE_SIZE, 0, &(*cur_block)->el.mapped_memory);
}
*cur_block = (ListNodeMargaretMemAllocatorOneBlock*)safe_calloc(1, sizeof(ListNodeMargaretMemAllocatorOneBlock));
(*cur_block)->el.occupied_memory = RBTree_MapU64ToMargaretMAOccupation_new();
ListMargaretMemAllocatorOneBlock_insert_node(&self->blocks, *cur_block);
*cur_block_size_needed = 0;
af = 0;
}
MargaretMemAllocator__insert_gap(self, *cur_block, *cur_block_size_needed, af);
occ_it->key = *cur_block_size_needed + af;
occ_it->value.block = *cur_block;
occ_it->value.taken_size = mem_requirements.size;
RBTree_MapU64ToMargaretMAOccupation_insert_node(&(*cur_block)->el.occupied_memory, occ_it);
/* Updating important counter */
*cur_block_size_needed = *cur_block_size_needed + af + mem_requirements.size;
}
void MargaretMemAllocator_request_needs_defragmentation(
@ -723,12 +796,8 @@ void MargaretMemAllocator_request_needs_defragmentation(
VecMargaretMABufferExpansionRecord buffer_expansion_record,
size_t alloc_buf_requests_require_cancel, size_t alloc_img_requests_require_cancel){
// VkPhysicalDeviceMaintenance4Properties maintenance4_properties = {
// .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES,
// };
VkPhysicalDeviceMaintenance3Properties maintenance3_properties = {
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
// .pNext = &maintenance4_properties
};
VkPhysicalDeviceProperties2 properties = {
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
@ -739,9 +808,298 @@ void MargaretMemAllocator_request_needs_defragmentation(
check(vkBeginCommandBuffer(cmd_buff, &(VkCommandBufferBeginInfo){
.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO
}) == VK_SUCCESS);
// todo: do
/* You see, some of the expand_buffer requests were taken care of right away. And we popped them from
* request vector. Because we expanded buffer without moving it. Now we need to undo the expansions and
* regenerate expand_buffer request back in vector */
for (size_t i = 0; i < buffer_expansion_record.len; i++) {
MargaretMABufferExpansionRecord ss;
U64 old_capacity = buffer_expansion_record.buf[i].old_capacity;
RBTreeNode_KVPU64ToMargaretMAOccupation* occ_it = buffer_expansion_record.buf[i].occ_it;
assert(occ_it);
U64 needed_capacity = occ_it->value.me.buf.capacity;
MargaretMemAllocator__shrink_some_buffer(self, occ_it, old_capacity);
VecMargaretMemAllocatorRequestResizeBuffer_append(&requests->expand_buf,
(MargaretMemAllocatorRequestResizeBuffer){.new_size = needed_capacity, .occ_it = occ_it});
}
VecMargaretMABufferExpansionRecord_drop(buffer_expansion_record);
/* Also, if expand_buf request could not be carried out by simply expanding buffer, we would move it to
* another place. This could have happened prior to defragmentation call.
* Now we need to undo this. We store the record of such action in self->old_moved_buffers. The expand_buf
* request that resulted in such action is not popped, it is still in the vector. We just have to undo all
* changes. We moved occupant node to another place. leaving, `replacer` behind. It holds the original buffer.
* while our original node holds the new buffer, failed attempt to carry out request without defragmentation.
* We actually destroy the second (big) buffer and steal everything from replacer (stealing into original node)
*/
for (size_t i = 0; i < self->old_moved_buffers.len; i++) {
RBTreeNode_KVPU64ToMargaretMAOccupation* replacer = self->old_moved_buffers.buf[i].replacement;
RBTreeNode_KVPU64ToMargaretMAOccupation* my_occ_it = self->old_moved_buffers.buf[i].my_occ_it;
assert(replacer != NULL && my_occ_it != NULL);
assert(replacer->value.me.variant == MargaretMemoryOccupation_Buffer);
assert(my_occ_it->value.me.variant == MargaretMemoryOccupation_Buffer);
RBTree_MapU64ToMargaretMAOccupation* OLD_TREE = &replacer->value.block->el.occupied_memory;
RBTree_MapU64ToMargaretMAOccupation* NEW_TREE = &my_occ_it->value.block->el.occupied_memory;
RBTree_erase_empty_by_iter(&NEW_TREE->root, NEW_TREE->NIL, &my_occ_it->base);
MargaretMemAllocator__get_rid_of_memory_occupant_but_not_node(self, my_occ_it);
// my_occ_it remains correct
RBTree_steal_neighbours(&OLD_TREE->root, OLD_TREE->NIL, &replacer->base, &my_occ_it->base);
// now nobody knows about replacer
my_occ_it->key = replacer->key;
my_occ_it->value = replacer->value;
// now everything important was moved to my_occ_it. Replacer can be freed
free(replacer);
}
self->old_moved_buffers.len = 0;
for (size_t i = 0; i < alloc_buf_requests_require_cancel; i++) {
RBTreeNode_KVPU64ToMargaretMAOccupation* given_occ_it = *requests->alloc_buf.buf[i].ret_ans;
assert(given_occ_it && given_occ_it->value.me.variant == MargaretMemoryOccupation_Buffer);
MargaretMemAllocator__get_rid_of_memory_occupant_and_node(self, given_occ_it);
}
for (size_t i = 0; i < alloc_img_requests_require_cancel; i++) {
RBTreeNode_KVPU64ToMargaretMAOccupation* given_occ_it = *requests->alloc_image.buf[i].ret_ans;
assert(given_occ_it && given_occ_it->value.me.variant == MargaretMemoryOccupation_Image);
MargaretMemAllocator__get_rid_of_memory_occupant_and_node(self, given_occ_it);
}
/* END OF REVERTING. WE REVERTED EVERYTHING */
/* We came here because we ran out of space. We defragment everything there is to defragment */
assert(self->old_blocks.first == NULL);
self->old_blocks.first = self->blocks.first;
self->blocks.first = NULL;
/* Cleaning free space set from gaps of old blocks */
for (ListNodeMargaretMemAllocatorOneBlock* block_it = self->old_blocks.first; block_it; block_it = block_it->next){
RBTreeNode_KVPU64ToMargaretMAOccupation* cur = RBTree_MapU64ToMargaretMAOccupation_find_min(
&block_it->el.occupied_memory);
U64 prev_end = 0;
while (cur) {
MargaretMemAllocator__erase_gap(self, block_it, prev_end, cur->key - prev_end);
prev_end = cur->key + cur->value.taken_size;
cur = RBTree_MapU64ToMargaretMAOccupation_find_next(&block_it->el.occupied_memory, cur);
}
MargaretMemAllocator__erase_gap(self, block_it, prev_end, block_it->el.capacity - prev_end);
}
/* It's used as a counter before the actual VkDeviceMemory is allocated.
* (To know how much memory needs to be allocated ) */
U64 cur_block_size_needed = 0;
ListNodeMargaretMemAllocatorOneBlock* cur_block = safe_calloc(1, sizeof(ListNodeMargaretMemAllocatorOneBlock));
cur_block->el.occupied_memory = RBTree_MapU64ToMargaretMAOccupation_new();
ListMargaretMemAllocatorOneBlock_insert_node(&self->blocks, cur_block);
for (size_t i = 0; i < requests->expand_buf.len; i++) {
U64 needed_buf_capacity = requests->expand_buf.buf[i].new_size;
RBTreeNode_KVPU64ToMargaretMAOccupation* occ_it = requests->expand_buf.buf[i].occ_it;
assert(occ_it->value.me.variant == MargaretMemoryOccupation_Buffer);
RBTreeNode_KVPU64ToMargaretMAOccupation* replacer = safe_malloc(sizeof(RBTreeNode_KVPU64ToMargaretMAOccupation));
{
RBTree_MapU64ToMargaretMAOccupation* OLD_TREE = &occ_it->value.block->el.occupied_memory;
RBTree_steal_neighbours(&OLD_TREE->root, OLD_TREE->NIL, &occ_it->base, &replacer->base);
}
replacer->key = occ_it->key;
replacer->value = occ_it->value;
replacer->value.me.buf.moved_already = true;
VkBuffer fresh_buf;
check(vkCreateBuffer(self->device, &(VkBufferCreateInfo){
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
.flags = replacer->value.me.buf.usage_flags,
.size = needed_buf_capacity,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE
}, NULL, &fresh_buf) == VK_SUCCESS);
VkMemoryRequirements mem_requirements;
vkGetBufferMemoryRequirements(self->device, fresh_buf, &mem_requirements);
occ_it->value.me.buf.buffer = fresh_buf;
occ_it->value.me.buf.capacity = needed_buf_capacity;
MargaretMemAllocator__keep_building_up_cur_block(self, &cur_block_size_needed, &cur_block, mem_requirements,
occ_it, maintenance3_properties.maxMemoryAllocationSize);
if (occ_it->value.me.buf.preserve_at_quiet) {
vkCmdCopyBuffer(cmd_buff, replacer->value.me.buf.buffer, occ_it->value.me.buf.buffer, 1, &(VkBufferCopy){
.srcOffset = 0, .dstOffset = 0, .size = replacer->value.me.buf.capacity});
}
}
for (size_t i = 0; i < requests->alloc_buf.len; i++) {
MargaretMemAllocatorRequestAllocBuffer* req = &requests->alloc_buf.buf[i];
U64 needed_buf_capacity = req->allocation_size;
VkBuffer fresh_buf;
check(vkCreateBuffer(self->device, &(VkBufferCreateInfo){
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
.flags = req->usage,
.size = needed_buf_capacity,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE
}, NULL, &fresh_buf) == VK_SUCCESS);
VkMemoryRequirements mem_requirements;
vkGetBufferMemoryRequirements(self->device, fresh_buf, &mem_requirements);
RBTreeNode_KVPU64ToMargaretMAOccupation* occ_it = safe_calloc(1, sizeof(RBTreeNode_KVPU64ToMargaretMAOccupation));
*(req->ret_ans) = occ_it;
occ_it->value.me.variant = MargaretMemoryOccupation_Buffer;
occ_it->value.me.buf.buffer = fresh_buf;
occ_it->value.me.buf.capacity = needed_buf_capacity;
occ_it->value.me.buf.usage_flags = req->usage;
occ_it->value.me.buf.preserve_at_quiet = req->preserve_at_quiet;
MargaretMemAllocator__keep_building_up_cur_block(self, &cur_block_size_needed, &cur_block, mem_requirements,
occ_it, maintenance3_properties.maxMemoryAllocationSize);
}
for (size_t i = 0; i < requests->alloc_image.len; i++) {
MargaretMemAllocatorRequestAllocImage* req = &requests->alloc_image.buf[i];
VkImage fresh_image;
check(vkCreateImage(self->device, &(VkImageCreateInfo){
.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
.imageType = VK_IMAGE_TYPE_2D,
.format = req->format,
.extent = (VkExtent3D){
.width = req->width,
.height = req->height,
.depth = 1,
},
.mipLevels = 1,
.arrayLayers = 1,
.samples = VK_SAMPLE_COUNT_1_BIT,
.tiling = VK_IMAGE_TILING_OPTIMAL,
.usage = req->usage_flags,
.initialLayout = req->current_layout,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
}, NULL, &fresh_image) == VK_SUCCESS);
VkMemoryRequirements mem_requirements;
vkGetImageMemoryRequirements(self->device, fresh_image, &mem_requirements);
RBTreeNode_KVPU64ToMargaretMAOccupation* occ_it = safe_calloc(1, sizeof(RBTreeNode_KVPU64ToMargaretMAOccupation));
*(req->ret_ans) = occ_it;
occ_it->value.me.variant = MargaretMemoryOccupation_Image;
occ_it->value.me.img = (MargaretMemoryOccupationImage){
.width = req->width, .height = req->height, .usage_flags = req->usage_flags,
.current_layout = req->current_layout, .format = req->format, .image = fresh_image,
.preserve_at_quiet = req->preserve_at_quiet
};
MargaretMemAllocator__keep_building_up_cur_block(self, &cur_block_size_needed, &cur_block, mem_requirements,
occ_it, maintenance3_properties.maxMemoryAllocationSize);
}
/* We move blocks here, but not because some request asked, no, we migrate all unmoved blocks from */
for (ListNodeMargaretMemAllocatorOneBlock* block_it = self->old_blocks.first; block_it; block_it = block_it->next) {
RBTree_MapU64ToMargaretMAOccupation* OLD_TREE = &block_it->el.occupied_memory;
RBTreeNode_KVPU64ToMargaretMAOccupation* occ_it = RBTree_MapU64ToMargaretMAOccupation_find_min(OLD_TREE);
while (occ_it) {
if (occ_it->value.me.variant == MargaretMemoryOccupation_Buffer && occ_it->value.me.buf.moved_already) {
occ_it = RBTree_MapU64ToMargaretMAOccupation_find_next(OLD_TREE, occ_it);
continue;
}
RBTreeNode_KVPU64ToMargaretMAOccupation* replacer = safe_malloc(sizeof(RBTreeNode_KVPU64ToMargaretMAOccupation));
RBTree_steal_neighbours(&OLD_TREE->root, OLD_TREE->NIL, &occ_it->base, &replacer->base);
replacer->key = occ_it->key;
replacer->value = occ_it->value;
if (replacer->value.me.variant == MargaretMemoryOccupation_Buffer) {
VkBuffer fresh_buf;
check(vkCreateBuffer(self->device, &(VkBufferCreateInfo){
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
.flags = replacer->value.me.buf.usage_flags,
.size = replacer->value.me.buf.capacity,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE
}, NULL, &fresh_buf) == VK_SUCCESS);
VkMemoryRequirements mem_requirements;
vkGetBufferMemoryRequirements(self->device, fresh_buf, &mem_requirements);
occ_it->value.me.buf.buffer = fresh_buf;
/* This function changes occ_it->taken_size and, of course, occ_it->block */
MargaretMemAllocator__keep_building_up_cur_block(self, &cur_block_size_needed, &cur_block, mem_requirements,
occ_it, maintenance3_properties.maxMemoryAllocationSize);
if (occ_it->value.me.buf.preserve_at_quiet) {
vkCmdCopyBuffer(cmd_buff, replacer->value.me.buf.buffer, occ_it->value.me.buf.buffer, 1, &(VkBufferCopy){
.srcOffset = 0, .dstOffset = 0, .size = replacer->value.me.buf.capacity});
}
} else {
VkImage fresh_image;
check(vkCreateImage(self->device, &(VkImageCreateInfo){
.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
.imageType = VK_IMAGE_TYPE_2D,
.format = replacer->value.me.img.format,
.extent = (VkExtent3D){
.width = replacer->value.me.img.width,
.height = replacer->value.me.img.height,
.depth = 1,
},
.mipLevels = 1,
.arrayLayers = 1,
.samples = VK_SAMPLE_COUNT_1_BIT,
.tiling = VK_IMAGE_TILING_OPTIMAL,
.usage = replacer->value.me.img.usage_flags,
.initialLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
}, NULL, &fresh_image) == VK_SUCCESS);
VkMemoryRequirements mem_requirements;
vkGetImageMemoryRequirements(self->device, fresh_image, &mem_requirements);
occ_it->value.me.img.image = fresh_image;
if (occ_it->value.me.img.preserve_at_quiet) {
VkImageMemoryBarrier first_barriers[2]; // todo: continue from here
vkCmdPipelineBarrier(cmd_buff, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
0 /* Flags */, 0, NULL /* not here */, 0, NULL /* not here */,
1, &(VkImageMemoryBarrier){
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
.srcAccessMask = 0,
.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED,
.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
.image = occ_it->value.me.img.image,
.subresourceRange = (VkImageSubresourceRange){
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
.baseMipLevel = 0,
.levelCount = 1,
.baseArrayLayer = 0,
.layerCount = 1,
},
});
vkCmdCopyImage(cmd_buff, replacer->value.me.img.image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
fresh_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &(VkImageCopy){
.srcSubresource = (VkImageSubresourceLayers){
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
.mipLevel = 0,
.baseArrayLayer = 0,
.layerCount = 1,
},
.srcOffset = {0, 0, 0},
.dstSubresource = (VkImageSubresourceLayers){
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
.mipLevel = 0,
.baseArrayLayer = 0,
.layerCount = 1,
},
.dstOffset = {0, 0, 0},
.extent = (VkExtent3D){
.width = replacer->value.me.img.width,
.height = replacer->value.me.img.height,
.depth = 1
}
});
// vkCmdPipelineBarrier(command_buffer, VK_PIPELINE_STAGE_TRANSFER_BIT, destination_stage_mask,
}
}
occ_it = RBTree_MapU64ToMargaretMAOccupation_find_next(OLD_TREE, replacer);
}
}
// todo: iterate over all remaining occupants, and do the "moving thing"
}
MargaretMemAllocatorDemands MargaretMemAllocator_carry_out_request(
@ -749,19 +1107,18 @@ MargaretMemAllocatorDemands MargaretMemAllocator_carry_out_request(
){
MargaretMemAllocator_wipe_old(self);
for (size_t i = 0; i < requests->free_buf.len; i++) {
MargaretMemAllocatorOccupantPosition pos = *(requests->free_buf.buf[i]);
assert(pos.occ_it->value.me.variant == MargaretMemoryOccupation_Buffer);
MargaretMemAllocator__get_rid_of_memory_occupant(self, pos.device_mem_ind, pos.occ_it);
RBTreeNode_KVPU64ToMargaretMAOccupation* occ_it = requests->free_buf.buf[i];
assert(occ_it->value.me.variant == MargaretMemoryOccupation_Buffer);
MargaretMemAllocator__get_rid_of_memory_occupant_and_node(self, occ_it);
}
for (size_t i = 0; i < requests->free_image.len; i++) {
MargaretMemAllocatorOccupantPosition pos = *(requests->free_buf.buf[i]);
assert(pos.occ_it->value.me.variant == MargaretMemoryOccupation_Image);
MargaretMemAllocator__get_rid_of_memory_occupant(self, pos.device_mem_ind, pos.occ_it);
RBTreeNode_KVPU64ToMargaretMAOccupation* occ_it = requests->free_image.buf[i];
assert(occ_it->value.me.variant == MargaretMemoryOccupation_Image);
MargaretMemAllocator__get_rid_of_memory_occupant_and_node(self, occ_it);
}
for (size_t i = 0; i < requests->shrink_buf.len; i++) {
MargaretMemAllocatorRequestResizeBuffer req = (requests->shrink_buf.buf[i]);
assert(req.ans->occ_it->value.ans == req.ans);
MargaretMemAllocator__shrink_some_buffer(self, *req.ans, req.new_size);
MargaretMemAllocator__shrink_some_buffer(self, req.occ_it, req.new_size);
}
VecMargaretMABufferExpansionRecord buffer_expansion_record = VecMargaretMABufferExpansionRecord_new();
@ -770,11 +1127,8 @@ MargaretMemAllocatorDemands MargaretMemAllocator_carry_out_request(
/* We first try to do all the expand_buf requests, that COULD be done using method 1 */
for (U64 rr = 0; rr < requests->expand_buf.len;) {
U64 new_size = requests->expand_buf.buf[rr].new_size;
MargaretMemAllocatorOccupantPosition* ans = requests->expand_buf.buf[rr].ans;
assert(ans->occ_it->value.ans == ans);
// todo: I may actually want to store blocks in a linked list
MargaretMemAllocatorOneBlock* block = VecMargaretMemAllocatorOneBlock_mat(&self->blocks, ans->device_mem_ind);
RBTreeNode_KVPU64ToMargaretMAOccupation* original_node = requests->expand_buf.buf[rr].occ_it;
// todo: fix according to new design
U64 occ_start = ans->occ_it->key;
assert(ans->occ_it->value.me.variant == MargaretMemoryOccupation_Buffer);
MargaretMemoryOccupationBuffer* buf = &ans->occ_it->value.me.buf;

View File

@ -16,6 +16,7 @@ const VkStructureType VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO = 645484;
const VkStructureType VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES = 14542;
const VkStructureType VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES = 145;
const VkStructureType VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 = 5324;
const VkStructureType VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER = 986;
typedef int VkBufferCreateFlags;
@ -109,6 +110,8 @@ typedef int VkImageUsageFlags;
typedef int VkImageLayout;
const VkImageLayout VK_IMAGE_LAYOUT_UNDEFINED = 780;
const VkImageLayout VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 56637;
const VkImageLayout VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 56622;
typedef struct {
VkStructureType sType;
@ -176,6 +179,8 @@ void vkDestroyImage(
typedef int VkMemoryMapFlags;
#define VK_WHOLE_SIZE (~0ULL)
VkResult vkMapMemory(
VkDevice device,
VkDeviceMemory memory,
@ -249,6 +254,7 @@ VkResult vkBeginCommandBuffer(
typedef int VkAccessFlags;
const VkAccessFlags VK_ACCESS_TRANSFER_READ_BIT = 0x100;
const VkAccessFlags VK_ACCESS_TRANSFER_WRITE_BIT = 0x100000;
typedef int VkImageAspectFlags;
const VkImageAspectFlags VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001;
@ -261,6 +267,7 @@ typedef struct VkImageSubresourceRange {
uint32_t layerCount;
} VkImageSubresourceRange;
const uint32_t VK_QUEUE_FAMILY_IGNORED = (~0U);
typedef struct VkImageMemoryBarrier {
VkStructureType sType;
@ -336,5 +343,22 @@ void vkCmdCopyBuffer(
uint32_t regionCount,
const VkBufferCopy* pRegions);
typedef struct VkImageCopy {
VkImageSubresourceLayers srcSubresource;
VkOffset3D srcOffset;
VkImageSubresourceLayers dstSubresource;
VkOffset3D dstOffset;
VkExtent3D extent;
} VkImageCopy;
void vkCmdCopyImage(
VkCommandBuffer commandBuffer,
VkImage srcImage,
VkImageLayout srcImageLayout,
VkImage dstImage,
VkImageLayout dstImageLayout,
uint32_t regionCount,
const VkImageCopy* pRegions);
#include "../../margaret/vulkan_memory_claire.h"