I just realized I wasted 100 days of my life, of the most important period of my life on a useless shit that served no purpose. I just deleted MargaretMemAllocator
This commit is contained in:
parent
202e11ab56
commit
6f418827dc
@ -13,25 +13,15 @@ void generate_margaret_eve_for_vulkan_utils() {
|
||||
.mut_span = true, .collab_vec_span = true, .span_sort = true
|
||||
});
|
||||
|
||||
/* For l2/margaret/vulkan_memory_claire.h */
|
||||
generate_List_templ_inst_eve_header(l, ns, (list_instantiation_op){.T = cstr("MargaretMemAllocatorOneBlock")}, false);
|
||||
|
||||
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestFreeOccupant"), true, false);
|
||||
generate_util_templ_inst_eve_header(l, ns, (util_templates_instantiation_options){
|
||||
.T = cstr("MargaretMemAllocatorRequestResizeBuffer"), .t_primitive = true,
|
||||
.vec_extended = true /* We need unordered_pop to do some tomfoolery */});
|
||||
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestAllocBuffer"), true, false);
|
||||
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMemAllocatorRequestAllocImage"), true, false);
|
||||
|
||||
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretFreeMemSegment"), true, false);
|
||||
/* For l2/margaret/{ vulkan_img_claire.h , vulkan_buffer_claire.h } */
|
||||
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretFreeSegment"), true, false);
|
||||
generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
|
||||
.T = cstr("MargaretFreeMemSegment"), .t_primitive = true});
|
||||
.T = cstr("MargaretFreeSegment"), .t_primitive = true});
|
||||
|
||||
generate_Option_templ_inst_eve_header(l, ns, (option_template_instantiation_op){
|
||||
.T = cstr("BufRBTreeByLenRespAlign_SetMargaretFreeMemSegment")});
|
||||
.T = cstr("BufRBTreeByLenRespAlign_SetMargaretFreeSegment")});
|
||||
|
||||
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMABufferExpansionRecord"), true, false);
|
||||
generate_eve_span_company_for_primitive(l, ns, cstr("MargaretMANewMovedBufRecord"), true, false);
|
||||
generate_eve_span_company_for_non_primitive_non_clonable(l, ns, cstr("MargaretImgAllocatorOneBlock"), true, false);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -48,31 +48,25 @@ void generate_util_temp_very_base_headers() {
|
||||
VecU8_drop(SpanT);
|
||||
VecU8_drop(dependency);
|
||||
}
|
||||
generate_guarded_span_company_for_primitive(cstr("l1"), cstr(""),
|
||||
cstr("CSTR"), cstr(""), true, false);
|
||||
generate_guarded_span_company_for_primitive(l, ns, cstr("CSTR"), cstr(""), true, false);
|
||||
|
||||
generate_ResultType_templ_inst_guarded_header(cstr("l1"), cstr(""),
|
||||
generate_ResultType_templ_inst_guarded_header(l, ns,
|
||||
cstr(""), cstr("VecU8"), cstr("#include \"VecAndSpan_U8.h\""), true, false);
|
||||
generate_ResultType_templ_inst_guarded_header(cstr("l1"), cstr(""),
|
||||
generate_ResultType_templ_inst_guarded_header(l, ns,
|
||||
cstr(""), cstr("SpanU8"), cstr("#include \"VecAndSpan_U8.h\""), true, true);
|
||||
|
||||
generate_guarded_span_company_for_primitive(cstr("l1"), cstr(""), cstr("U32Segment"),
|
||||
generate_guarded_span_company_for_primitive(l, ns, cstr("U32Segment"),
|
||||
cstr("#include \"../../src/l1/core/uint_segments.h\""), true, true);
|
||||
|
||||
/* Not very basic but definitely very common */
|
||||
generate_guarded_span_company_for_non_primitive_clonable(cstr("l1"), cstr(""), cstr("TextureDataR8G8B8A8"),
|
||||
generate_guarded_span_company_for_non_primitive_clonable(l, ns, cstr("TextureDataR8G8B8A8"),
|
||||
cstr("#include \"../../gen/l1/pixel_masses.h\"\n"), true, false);
|
||||
generate_guarded_span_company_for_non_primitive_clonable(cstr("l1"), cstr(""), cstr("TextureDataR8G8B8"),
|
||||
generate_guarded_span_company_for_non_primitive_clonable(l, ns, cstr("TextureDataR8G8B8"),
|
||||
cstr("#include \"../../gen/l1/pixel_masses.h\"\n"), true, false);
|
||||
generate_guarded_span_company_for_non_primitive_clonable(cstr("l1"), cstr(""), cstr("TextureDataR8"),
|
||||
generate_guarded_span_company_for_non_primitive_clonable(l, ns, cstr("TextureDataR8"),
|
||||
cstr("#include \"../../gen/l1/pixel_masses.h\"\n"), true, false);
|
||||
|
||||
generate_guarded_span_company_for_primitive(cstr("l1"), cstr(""), cstr("PostponedMemcpy"), cstr(
|
||||
"typedef struct{\n"
|
||||
SPACE "void* dest;\n"
|
||||
SPACE "const void* src;\n"
|
||||
SPACE "size_t n;\n"
|
||||
"} PostponedMemcpy;\n"), true, false);
|
||||
generate_guarded_span_company_for_primitive(l, ns, cstr("KVPU64ToU64"), cstr(""), true, false);
|
||||
}
|
||||
|
||||
#endif
|
||||
@ -87,6 +87,11 @@ typedef struct {
|
||||
U32 height;
|
||||
} SizeOfRectangleU32;
|
||||
|
||||
typedef struct{
|
||||
U64 key;
|
||||
U64 value;
|
||||
} KVPU64ToU64;
|
||||
|
||||
#define check(expr) if (!(expr)) { abortf("Assertion failed at %s : %d : " #expr "\n", __FILE__, __LINE__); }
|
||||
|
||||
#endif
|
||||
|
||||
@ -11,11 +11,15 @@ void generate_l1_5_template_instantiation_for_base_types(){
|
||||
generate_buf_rbtree_Set_templ_inst_guarded_header(l, ns, cstr("#include \"../l1/VecAndSpan_S64.h\""),
|
||||
(set_instantiation_op){.T = cstr("S64"), .t_integer = true});
|
||||
|
||||
// l1/core/int_primitives is included in l1_5/core/rb_tree_node.h, hence no additional dependencies needed
|
||||
/* l1/core/utils.h is included in l1_5/core/rb_tree_node.h, hence no additional dependencies needed */
|
||||
generate_rbtree_Set_templ_inst_guarded_header(l, ns, cstr(""), (set_instantiation_op){
|
||||
.T = cstr("U64"), .t_integer = true }, true);
|
||||
generate_rbtree_Set_templ_inst_guarded_header(l, ns, cstr(""), (set_instantiation_op){
|
||||
.T = cstr("S64"), .t_integer = true }, true);
|
||||
|
||||
// todo: move vector declaration HERE
|
||||
generate_buf_rbtree_Map_templ_inst_guarded_header(l, ns, cstr("#include \"../../gen/l1/VecKVPU64ToU64.h\"\n"),
|
||||
(map_instantiation_op){.K = cstr("U64"), .k_integer = true, .V = cstr("U64"), .v_integer = true,});
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
@ -9,17 +9,21 @@ void generate_l1_5_template_instantiations_for_margaret(){
|
||||
mkdir_nofail("l1_5/eve");
|
||||
mkdir_nofail("l1_5/eve/margaret");
|
||||
|
||||
/* For MargaretMemAllocator */
|
||||
/* For l2/margaret/{ vulkan_img_claire.h , vulkan_buffer_claire.h } */
|
||||
generate_buf_rbtree_Set_templ_inst_eve_header(l, ns, (set_instantiation_op){
|
||||
.T = cstr("MargaretFreeMemSegment"), .t_primitive = true,
|
||||
.T = cstr("MargaretFreeSegment"), .t_primitive = true,
|
||||
/* comparison takes additional U8 parameter */
|
||||
.alternative_less = cstr("MargaretFreeMemSegment_less_resp_align"),
|
||||
.alternative_less = cstr("MargaretFreeSegment_less_resp_align"),
|
||||
.alternative_comp_set_name_embed = cstr("LenRespAlign"),
|
||||
.guest_data_T = cstr("U8"),
|
||||
});
|
||||
generate_rbtree_Map_templ_inst_eve_header(l, ns, (map_instantiation_op){
|
||||
.K = cstr("U64"), .k_integer = true, .V = cstr("MargaretMAOccupation"), .v_primitive = true,
|
||||
}, true /* We want RBTreeNode_KVPU64ToMargaretMemoryOccupation to be generated here for us */ );
|
||||
generate_buf_rbtree_Set_templ_inst_eve_header(l, ns, (set_instantiation_op){
|
||||
.T = cstr("MargaretFreeSegment"), .t_primitive = true,
|
||||
/* comparison takes additional U8 parameter */
|
||||
.alternative_less = cstr("MargaretFreeSegment_less_len"),
|
||||
.alternative_comp_set_name_embed = cstr("Len"),
|
||||
.guest_data_T = cstr("U8"),
|
||||
});
|
||||
}
|
||||
|
||||
#endif
|
||||
@ -471,7 +471,7 @@ void codegen_append_buff_rbtree_map__method_at_iter(VecU8* res, map_instantiatio
|
||||
op.k_integer ? VecU8_from_span(op.K) : VecU8_fmt("const %s*", op.K),
|
||||
mut ? VecU8_fmt("%s*", op.V) : (op.v_integer ? VecU8_from_span(op.V) : VecU8_fmt("const %s*", op.V)),
|
||||
|
||||
op.k_integer ? cstr("") : cstr("&"), op.v_integer ? cstr("") : cstr("&")));
|
||||
op.k_integer ? cstr("") : cstr("&"), (op.v_integer && !mut) ? cstr("") : cstr("&")));
|
||||
}
|
||||
|
||||
NODISCARD VecU8 get_name_of_buf_rbtree_map_structure(map_instantiation_op op){
|
||||
|
||||
41
src/l2/margaret/allocator_base.h
Normal file
41
src/l2/margaret/allocator_base.h
Normal file
@ -0,0 +1,41 @@
|
||||
#ifndef prototype1_src_l2_margaret_allocator_base_h
|
||||
#define prototype1_src_l2_margaret_allocator_base_h
|
||||
|
||||
#include "../../l1/core/uint_segments.h"
|
||||
#include "../../l1/core/util.h"
|
||||
#include "../../l1_5/core/buff_rb_tree_node.h"
|
||||
#include "../../../gen/l1_5/BufRBTree_MapU64ToU64.h"
|
||||
|
||||
typedef struct {
|
||||
U64 block;
|
||||
U64 start;
|
||||
U64 len;
|
||||
} MargaretFreeSegment;
|
||||
|
||||
bool MargaretFreeSegment_less_len(const MargaretFreeSegment* A, const MargaretFreeSegment* B){
|
||||
if (A->len == B->len) {
|
||||
if (A->block == B->block) {
|
||||
return A->start < B->start;
|
||||
}
|
||||
return A->block < B->block;
|
||||
}
|
||||
return A->len < B->len;
|
||||
}
|
||||
|
||||
// todo: substitute U64Segment_get_length_resp_alignment by my own function
|
||||
bool MargaretFreeSegment_less_resp_align(const MargaretFreeSegment* A, const MargaretFreeSegment* B, U8 alignment_exp){
|
||||
U64 A_len = U64Segment_get_length_resp_alignment((U64Segment){A->start, A->len}, alignment_exp);
|
||||
U64 B_len = U64Segment_get_length_resp_alignment((U64Segment){B->start, B->len}, alignment_exp);
|
||||
if (A_len == B_len) {
|
||||
if (A->block == B->block) {
|
||||
return A->start < B->start;
|
||||
}
|
||||
return A->block < B->block;
|
||||
}
|
||||
return A_len < B_len;
|
||||
}
|
||||
|
||||
#include "../../../gen/l1/eve/margaret/VecMargaretFreeSegment.h"
|
||||
#include "../../../gen/l1/eve/margaret/OptionMargaretFreeSegment.h"
|
||||
|
||||
#endif
|
||||
194
src/l2/margaret/vulkan_buffer_claire.h
Normal file
194
src/l2/margaret/vulkan_buffer_claire.h
Normal file
@ -0,0 +1,194 @@
|
||||
#ifndef prototype1_src_l2_margaret_vulkan_buffer_claire_h
|
||||
#define prototype1_src_l2_margaret_vulkan_buffer_claire_h
|
||||
|
||||
// Same dependencies as vulkan memory allocator
|
||||
|
||||
#include "allocator_base.h"
|
||||
|
||||
typedef struct {
|
||||
U64 block;
|
||||
U64 start;
|
||||
} MargaretBufAllocation;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// void MargaretMemAllocator__shrink_some_buffer(
|
||||
// MargaretMemAllocator* self, RBTreeNode_KVPU64ToMargaretMAOccupation* occ_it, size_t smaller_size
|
||||
// ){
|
||||
// ListNodeMargaretMemAllocatorOneBlock* block_it = occ_it->value.block;
|
||||
// MargaretMAOccupant* occ_me = &occ_it->value.me;
|
||||
// assert(occ_me->variant == MargaretMemoryOccupation_Buffer);
|
||||
// assert(occ_me->buf.capacity >= smaller_size);
|
||||
// U64 buf_start = occ_it->key;
|
||||
// U64 buf_taken_size = occ_it->value.taken_size;
|
||||
// VkBuffer shorter_buf;
|
||||
// check(vkCreateBuffer(self->device, &(VkBufferCreateInfo){
|
||||
// .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
|
||||
// .size = smaller_size,
|
||||
// .usage = occ_me->buf.usage_flags,
|
||||
// .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
|
||||
// }, NULL, &shorter_buf) == VK_SUCCESS);
|
||||
// VkMemoryRequirements shorter_buf_req;
|
||||
// vkGetBufferMemoryRequirements(self->device, shorter_buf, &shorter_buf_req);
|
||||
// check(U64_is_2pow(shorter_buf_req.alignment));
|
||||
// check((shorter_buf_req.memoryTypeBits & self->memory_type_id));
|
||||
// check((buf_start & (shorter_buf_req.alignment - 1)) == 0)
|
||||
// check(shorter_buf_req.size <= buf_taken_size);
|
||||
//
|
||||
// U64Segment right_free_space = MargaretMemAllocatorOneBlock_get_right_free_space(&block_it->el, occ_it);
|
||||
// MargaretMemAllocator__erase_gap(self, block_it, right_free_space.start, right_free_space.len);
|
||||
// MargaretMemAllocator__insert_gap(self, block_it,
|
||||
// buf_start + shorter_buf_req.size,
|
||||
// right_free_space.len + (buf_taken_size - shorter_buf_req.size));
|
||||
//
|
||||
// vkDestroyBuffer(self->device, occ_me->buf.buffer, NULL);
|
||||
// occ_it->value.taken_size = shorter_buf_req.size;
|
||||
// occ_me->buf.buffer = shorter_buf;
|
||||
// occ_me->buf.capacity = smaller_size;
|
||||
// }
|
||||
|
||||
// MargaretMemAllocatorDemands MargaretMemAllocator_carry_out_request(
|
||||
// MargaretMemAllocator* self, MargaretMemAllocatorRequests* requests
|
||||
// ){
|
||||
|
||||
//
|
||||
// /* We first try to do all the expand_buf requests, that COULD be done using method 1 */
|
||||
// for (U64 rr = 0; rr < requests->expand_buf.len;) {
|
||||
// U64 new_size = requests->expand_buf.buf[rr].new_size;
|
||||
// RBTreeNode_KVPU64ToMargaretMAOccupation* occ_it = requests->expand_buf.buf[rr].occ_it;
|
||||
//
|
||||
// U64 occ_start = occ_it->key;
|
||||
// assert(occ_it->value.me.variant == MargaretMemoryOccupation_Buffer);
|
||||
// MargaretMemoryOccupationBuffer* buf = &occ_it->value.me.buf;
|
||||
//
|
||||
// /* Method 1 */
|
||||
// U64Segment right_free_space = MargaretMemAllocatorOneBlock_get_right_free_space(
|
||||
// &occ_it->value.block->el, occ_it);
|
||||
//
|
||||
// VkBuffer temp_buf_extension;
|
||||
// check (vkCreateBuffer(self->device, &(VkBufferCreateInfo){
|
||||
// .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
|
||||
// .size = new_size,
|
||||
// .usage = buf->usage_flags,
|
||||
// .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
|
||||
// }, NULL, &temp_buf_extension) == VK_SUCCESS);
|
||||
// VkMemoryRequirements temp_buf_extension_req;
|
||||
// vkGetBufferMemoryRequirements(self->device, temp_buf_extension, &temp_buf_extension_req);
|
||||
// check(U64_is_2pow(temp_buf_extension_req.alignment));
|
||||
// check((temp_buf_extension_req.memoryTypeBits & (1ull << self->memory_type_id)) > 0)
|
||||
// if ((occ_start + temp_buf_extension_req.size > right_free_space.start + right_free_space.len) ||
|
||||
// ((occ_start & (temp_buf_extension_req.alignment - 1)) != 0)
|
||||
// ){
|
||||
// vkDestroyBuffer(self->device, temp_buf_extension, NULL);
|
||||
// rr++;
|
||||
// continue;
|
||||
// }
|
||||
// MargaretMemAllocator__erase_gap(self, occ_it->value.block, right_free_space.start, right_free_space.len);
|
||||
// MargaretMemAllocator__insert_gap(self, occ_it->value.block,
|
||||
// occ_start + temp_buf_extension_req.size,
|
||||
// right_free_space.start + right_free_space.len - (occ_start + temp_buf_extension_req.size));
|
||||
// VecMargaretMABufferExpansionRecord_append(&buffer_expansion_record, (MargaretMABufferExpansionRecord){
|
||||
// .old_capacity = buf->capacity, .occ_it = occ_it
|
||||
// });
|
||||
// /* Success */
|
||||
// vkDestroyBuffer(self->device, buf->buffer, NULL);
|
||||
// buf->capacity = new_size;
|
||||
// buf->buffer = temp_buf_extension;
|
||||
// occ_it->value.taken_size = temp_buf_extension_req.size;
|
||||
// VecMargaretMemAllocatorRequestResizeBuffer_unordered_pop(&requests->expand_buf, rr);
|
||||
// MargaretMemAllocator__bind_buffer_memory(self, occ_it);
|
||||
// }
|
||||
//
|
||||
// check(vkResetCommandBuffer(self->command_buffer, 0) == VK_SUCCESS);
|
||||
// check(vkBeginCommandBuffer(self->command_buffer, &(VkCommandBufferBeginInfo){
|
||||
// .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO
|
||||
// }) == VK_SUCCESS);
|
||||
// MargaretMemAllocatorDemands demands = 0;
|
||||
//
|
||||
// for (U64 ri = 0; ri < requests->expand_buf.len; ri++) {
|
||||
// U64 larger_size = requests->expand_buf.buf[ri].new_size;
|
||||
// RBTreeNode_KVPU64ToMargaretMAOccupation* occ_it = requests->expand_buf.buf[ri].occ_it;
|
||||
// assert(occ_it->value.me.variant == MargaretMemoryOccupation_Buffer);
|
||||
// assert(larger_size >= occ_it->value.me.buf.capacity);
|
||||
//
|
||||
// VkBuffer bigger_buffer;
|
||||
// check(vkCreateBuffer(self->device, &(VkBufferCreateInfo){
|
||||
// .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
|
||||
// .size = larger_size,
|
||||
// .usage = occ_it->value.me.buf.usage_flags,
|
||||
// .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
|
||||
// }, NULL, &bigger_buffer) == VK_SUCCESS);
|
||||
// VkMemoryRequirements mem_requirements;
|
||||
// vkGetBufferMemoryRequirements(self->device, bigger_buffer, &mem_requirements);
|
||||
//
|
||||
// check(U64_is_2pow(mem_requirements.alignment));
|
||||
// U8 alignment_exp = U64_2pow_log(mem_requirements.alignment);
|
||||
// OptionMargaretFreeMemSegment free_gap =
|
||||
// MargaretMemFreeSpaceManager_search(&self->mem_free_space, alignment_exp, mem_requirements.size);
|
||||
// if (free_gap.variant == Option_None) {
|
||||
// vkDestroyBuffer(self->device, bigger_buffer, NULL);
|
||||
// return MargaretMemAllocator_request_needs_defragmentation(self, requests, buffer_expansion_record, 0, 0);
|
||||
// }
|
||||
//
|
||||
// RBTreeNode_KVPU64ToMargaretMAOccupation* replacer = safe_malloc(sizeof(RBTreeNode_KVPU64ToMargaretMAOccupation));
|
||||
// RBTree_MapU64ToMargaretMAOccupation* OLD_TREE = &occ_it->value.block->el.occupied_memory;
|
||||
// RBTree_steal_neighbours(&OLD_TREE->root, OLD_TREE->NIL, &occ_it->base, &replacer->base);
|
||||
// replacer->key = occ_it->key;
|
||||
// replacer->value = occ_it->value;
|
||||
// assert(replacer->value.me.variant == MargaretMemoryOccupation_Buffer);
|
||||
// occ_it->value.me.buf.buffer = bigger_buffer;
|
||||
// occ_it->value.me.buf.capacity = larger_size;
|
||||
//
|
||||
// MargaretMemAllocator__add_occupant_node_given_gap_any_type(self, occ_it, free_gap.some, mem_requirements.size, alignment_exp);
|
||||
// MargaretMemAllocator__bind_buffer_memory(self, occ_it);
|
||||
//
|
||||
// VecMargaretMANewMovedBufRecord_append(&self->old_moved_buffers,
|
||||
// (MargaretMANewMovedBufRecord){.replacement = replacer, .my_occ_it = occ_it});
|
||||
// if (replacer->value.me.buf.preserve_at_quiet) {
|
||||
// demands = MARGARET_MA_DEMANDS_CMD_BUFFER_BIT;
|
||||
// vkCmdCopyBuffer(self->command_buffer, replacer->value.me.buf.buffer, bigger_buffer,
|
||||
// 1, &(VkBufferCopy){0, 0, replacer->value.me.buf.capacity});
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// for (U64 ri = 0; ri < requests->alloc_buf.len; ri++) {
|
||||
// MargaretMemAllocatorRequestAllocBuffer* req = &requests->alloc_buf.buf[ri];
|
||||
//
|
||||
// VkBuffer fresh_buf;
|
||||
// check(vkCreateBuffer(self->device, &(VkBufferCreateInfo){
|
||||
// .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
|
||||
// .size = req->allocation_size,
|
||||
// .usage = req->usage,
|
||||
// .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
|
||||
// }, NULL, &fresh_buf) == VK_SUCCESS);
|
||||
// VkMemoryRequirements mem_requirements;
|
||||
// vkGetBufferMemoryRequirements(self->device, fresh_buf, &mem_requirements);
|
||||
// check(U64_is_2pow(mem_requirements.alignment));
|
||||
// U8 alignment_exp = U64_2pow_log(mem_requirements.alignment);
|
||||
// OptionMargaretFreeMemSegment free_gap =
|
||||
// MargaretMemFreeSpaceManager_search(&self->mem_free_space, alignment_exp, mem_requirements.size);
|
||||
// if (free_gap.variant == Option_None) {
|
||||
// vkDestroyBuffer(self->device, fresh_buf, NULL);
|
||||
// return MargaretMemAllocator_request_needs_defragmentation(self, requests, buffer_expansion_record, 0, 0);
|
||||
// }
|
||||
//
|
||||
// RBTreeNode_KVPU64ToMargaretMAOccupation* new_node = req->new_node; /* It was allocated for us */
|
||||
// new_node->value.me = (MargaretMAOccupant){.variant = MargaretMemoryOccupation_Buffer, .buf = {
|
||||
// .buffer = fresh_buf, .capacity = req->allocation_size, .preserve_at_quiet = req->allocation_size,
|
||||
// .usage_flags = req->usage
|
||||
// }};
|
||||
// MargaretMemAllocator__add_occupant_node_given_gap_any_type(self, new_node, free_gap.some, mem_requirements.size, alignment_exp);
|
||||
// MargaretMemAllocator__bind_buffer_memory(self, new_node);
|
||||
// }
|
||||
//
|
||||
// MargaretMemAllocatorRequests_sink(requests);
|
||||
// return demands;
|
||||
// }
|
||||
|
||||
#endif
|
||||
File diff suppressed because it is too large
Load Diff
@ -235,23 +235,6 @@ void vkGetPhysicalDeviceProperties2(
|
||||
|
||||
typedef int VkCommandBufferResetFlags;
|
||||
|
||||
VkResult vkResetCommandBuffer(
|
||||
VkCommandBuffer commandBuffer,
|
||||
VkCommandBufferResetFlags flags);
|
||||
|
||||
typedef int VkCommandBufferUsageFlags;
|
||||
|
||||
typedef struct VkCommandBufferBeginInfo {
|
||||
VkStructureType sType;
|
||||
const void* pNext;
|
||||
VkCommandBufferUsageFlags flags;
|
||||
const void* pInheritanceInfo; /* will be NULL */
|
||||
} VkCommandBufferBeginInfo;
|
||||
|
||||
VkResult vkBeginCommandBuffer(
|
||||
VkCommandBuffer commandBuffer,
|
||||
const VkCommandBufferBeginInfo* pBeginInfo);
|
||||
|
||||
typedef int VkAccessFlags;
|
||||
const VkAccessFlags VK_ACCESS_TRANSFER_READ_BIT = 0x100;
|
||||
const VkAccessFlags VK_ACCESS_TRANSFER_WRITE_BIT = 0x100000;
|
||||
@ -259,108 +242,8 @@ const VkAccessFlags VK_ACCESS_TRANSFER_WRITE_BIT = 0x100000;
|
||||
typedef int VkImageAspectFlags;
|
||||
const VkImageAspectFlags VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001;
|
||||
|
||||
typedef struct VkImageSubresourceRange {
|
||||
VkImageAspectFlags aspectMask;
|
||||
uint32_t baseMipLevel;
|
||||
uint32_t levelCount;
|
||||
uint32_t baseArrayLayer;
|
||||
uint32_t layerCount;
|
||||
} VkImageSubresourceRange;
|
||||
|
||||
const uint32_t VK_QUEUE_FAMILY_IGNORED = (~0U);
|
||||
|
||||
typedef struct VkImageMemoryBarrier {
|
||||
VkStructureType sType;
|
||||
const void* pNext;
|
||||
VkAccessFlags srcAccessMask;
|
||||
VkAccessFlags dstAccessMask;
|
||||
VkImageLayout oldLayout;
|
||||
VkImageLayout newLayout;
|
||||
uint32_t srcQueueFamilyIndex;
|
||||
uint32_t dstQueueFamilyIndex;
|
||||
VkImage image;
|
||||
VkImageSubresourceRange subresourceRange;
|
||||
} VkImageMemoryBarrier;
|
||||
|
||||
typedef int VkPipelineStageFlags;
|
||||
const VkPipelineStageFlags VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x1;
|
||||
const VkPipelineStageFlags VK_PIPELINE_STAGE_TRANSFER_BIT = 0x2;
|
||||
|
||||
typedef int VkDependencyFlags;
|
||||
|
||||
void vkCmdPipelineBarrier(
|
||||
VkCommandBuffer commandBuffer,
|
||||
VkPipelineStageFlags srcStageMask,
|
||||
VkPipelineStageFlags dstStageMask,
|
||||
VkDependencyFlags dependencyFlags,
|
||||
uint32_t memoryBarrierCount,
|
||||
const void* /* VkMemoryBarrier */ pMemoryBarriers,
|
||||
uint32_t bufferMemoryBarrierCount,
|
||||
const void* /* VkBufferMemoryBarrier */ pBufferMemoryBarriers,
|
||||
uint32_t imageMemoryBarrierCount,
|
||||
const VkImageMemoryBarrier* pImageMemoryBarriers);
|
||||
|
||||
typedef struct VkImageSubresourceLayers {
|
||||
VkImageAspectFlags aspectMask;
|
||||
uint32_t mipLevel;
|
||||
uint32_t baseArrayLayer;
|
||||
uint32_t layerCount;
|
||||
} VkImageSubresourceLayers;
|
||||
|
||||
typedef struct VkOffset3D {
|
||||
int32_t x;
|
||||
int32_t y;
|
||||
int32_t z;
|
||||
} VkOffset3D;
|
||||
|
||||
typedef struct VkBufferImageCopy {
|
||||
VkDeviceSize bufferOffset;
|
||||
uint32_t bufferRowLength;
|
||||
uint32_t bufferImageHeight;
|
||||
VkImageSubresourceLayers imageSubresource;
|
||||
VkOffset3D imageOffset;
|
||||
VkExtent3D imageExtent;
|
||||
} VkBufferImageCopy;
|
||||
|
||||
void vkCmdCopyBufferToImage(
|
||||
VkCommandBuffer commandBuffer,
|
||||
VkBuffer srcBuffer,
|
||||
VkImage dstImage,
|
||||
VkImageLayout dstImageLayout,
|
||||
uint32_t regionCount,
|
||||
const VkBufferImageCopy* pRegions);
|
||||
|
||||
typedef struct VkBufferCopy {
|
||||
VkDeviceSize srcOffset;
|
||||
VkDeviceSize dstOffset;
|
||||
VkDeviceSize size;
|
||||
} VkBufferCopy;
|
||||
|
||||
void vkCmdCopyBuffer(
|
||||
VkCommandBuffer commandBuffer,
|
||||
VkBuffer srcBuffer,
|
||||
VkBuffer dstBuffer,
|
||||
uint32_t regionCount,
|
||||
const VkBufferCopy* pRegions);
|
||||
|
||||
typedef struct VkImageCopy {
|
||||
VkImageSubresourceLayers srcSubresource;
|
||||
VkOffset3D srcOffset;
|
||||
VkImageSubresourceLayers dstSubresource;
|
||||
VkOffset3D dstOffset;
|
||||
VkExtent3D extent;
|
||||
} VkImageCopy;
|
||||
|
||||
void vkCmdCopyImage(
|
||||
VkCommandBuffer commandBuffer,
|
||||
VkImage srcImage,
|
||||
VkImageLayout srcImageLayout,
|
||||
VkImage dstImage,
|
||||
VkImageLayout dstImageLayout,
|
||||
uint32_t regionCount,
|
||||
const VkImageCopy* pRegions);
|
||||
|
||||
|
||||
#include "../../margaret/vulkan_memory_claire.h"
|
||||
|
||||
int main(){
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user