From 6b7a67cb1f4a2056f238db3dd2e2616b41ce360f Mon Sep 17 00:00:00 2001 From: Andreew Gregory Date: Fri, 19 Sep 2025 00:09:37 +0300 Subject: [PATCH] Deletion --- src/l1/anne/geom.h | 48 ++-- src/l1/anne/pixel_masses.h | 108 ++++---- src/l1/codegen/codegen.h | 2 +- src/l1/codegen/util_template_inst.h | 236 +++++++++--------- src/l1_5/anne/codegen.c | 2 + src/l1_5/anne/l1_5_templ_very_base.h | 8 + src/l1_5/anne/marie/clipping.h | 22 +- .../codegen/rb_tree_set_map_template_inst.h | 214 +++++++++++----- src/l1_5/codegen/trait_wrap_boil.h | 2 +- src/l1_5/core/rb_tree_node.h | 103 ++++++++ src/l2/tests/t0.c | 3 + 11 files changed, 474 insertions(+), 274 deletions(-) create mode 100644 src/l1_5/anne/l1_5_templ_very_base.h create mode 100644 src/l2/tests/t0.c diff --git a/src/l1/anne/geom.h b/src/l1/anne/geom.h index 9829389..7ed358c 100644 --- a/src/l1/anne/geom.h +++ b/src/l1/anne/geom.h @@ -20,13 +20,13 @@ NODISCARD VecU8 generate_xvecn_struct_and_base_methods(SpanU8 xvec, SpanU8 memb, /* Structure definition */ VecU8_append_span(&res, cstr("typedef struct {\n")); for (int ci = 0; ci < n; ci++) { - VecU8_append_vec(&res, VecU8_fmt(SPACE4 "%s %s;\n", memb, vec_field_name(ci))); + VecU8_append_vec(&res, VecU8_fmt(SPACE "%s %s;\n", memb, vec_field_name(ci))); } VecU8_append_vec(&res, VecU8_fmt("} %s;\n\n", xvecn)); /* xvecn_add_xvecn method */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_add_%s(%s A, %s B) {\n" - SPACE4 "return(%s){ ", + SPACE "return(%s){ ", xvecn, xvecn, xvecn, xvecn, xvecn, xvecn)); for (int ci = 0; ci < n; ci++) { if (ci) @@ -37,7 +37,7 @@ NODISCARD VecU8 generate_xvecn_struct_and_base_methods(SpanU8 xvec, SpanU8 memb, /* xvecn_minus_xvecn method */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_minus_%s(%s A, %s B) {\n" - SPACE4 "return (%s){ ", + SPACE "return (%s){ ", xvecn, xvecn, xvecn, xvecn, xvecn, xvecn)); for (int ci = 0; ci < n; ci++) { if (ci) @@ -48,7 +48,7 @@ NODISCARD VecU8 generate_xvecn_struct_and_base_methods(SpanU8 xvec, SpanU8 memb, /* xvecn_minus method */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_minus(%s A) {\n" - SPACE4 "return (%s){ ", + SPACE "return (%s){ ", xvecn, xvecn, xvecn, xvecn)); for (int ci = 0; ci < n; ci++) { if (ci) @@ -68,7 +68,7 @@ NODISCARD VecU8 generate_xvecn_struct_and_cool_methods(SpanU8 xvec, SpanU8 memb, /* xvecn_length method */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_length(%s A) {\n" - SPACE4 "return %s(", + SPACE "return %s(", memb, xvecn, xvecn, sqrt_func)); for (int i = 0; i < n; i++) { if (i) @@ -79,7 +79,7 @@ NODISCARD VecU8 generate_xvecn_struct_and_cool_methods(SpanU8 xvec, SpanU8 memb, /* xvecn_mul_scal method */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_mul_scal(%s A, %s B) {\n" - SPACE4 "return (%s) { ", + SPACE "return (%s) { ", xvecn, xvecn, xvecn, memb, xvecn)); for (int ci = 0; ci < n; ci++) { if (ci) @@ -90,12 +90,12 @@ NODISCARD VecU8 generate_xvecn_struct_and_cool_methods(SpanU8 xvec, SpanU8 memb, /* xvecn_div_by_scal method */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_div_by_scal(%s A, %s B) {\n" - SPACE4 "return %s_mul_scal(A, 1/B);\n" + SPACE "return %s_mul_scal(A, 1/B);\n" "}\n\n", xvecn, xvecn, xvecn, memb, xvecn)); /* xvecn_mul_xvecn method */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_mul_%s(%s A, %s B) {\n" - SPACE4 "return (%s){ ", + SPACE "return (%s){ ", xvecn, xvecn, xvecn, xvecn, xvecn, xvecn)); for (int ci = 0; ci < n; ci++) { if (ci) @@ -106,7 +106,7 @@ NODISCARD VecU8 generate_xvecn_struct_and_cool_methods(SpanU8 xvec, SpanU8 memb, /* xvecn_dot method */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_dot(%s A, %s B) {\n" - SPACE4 "return ", + SPACE "return ", memb, xvecn, xvecn, xvecn)); for (int i = 0; i < n; i++) { if (i) @@ -117,7 +117,7 @@ NODISCARD VecU8 generate_xvecn_struct_and_cool_methods(SpanU8 xvec, SpanU8 memb, /* xvecn_normalize method */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_normalize(%s A) {\n" - SPACE4 "return %s_div_by_scal(A, %s_length(A));\n" + SPACE "return %s_div_by_scal(A, %s_length(A));\n" "}\n\n", xvecn, xvecn, xvecn, xvecn, xvecn)); VecU8_drop(g_xvecn); @@ -133,7 +133,7 @@ NODISCARD VecU8 generate_xvecn_method_and_one(SpanU8 xvec, int n) { VecU8 res = VecU8_fmt( "%s %s_and_one(%s A) {\n" - SPACE4 "return (%s){ ", + SPACE "return (%s){ ", xvecn_pp, xvecn, xvecn, xvecn_pp); for (int i = 0; i < n; i++) { VecU8_append_vec(&res, VecU8_fmt("A.%s, ", vec_field_name(i))); @@ -150,7 +150,7 @@ NODISCARD VecU8 generate_xvec3_method_cross(SpanU8 xvec) { SpanU8 xvec3 = VecU8_to_span(&g_xvec3); VecU8 res = VecU8_fmt( "%s %s_cross(%s A, %s B) {\n" - SPACE4 "return (%s){A.y * B.z - A.z * B.y, -A.x * B.z + A.z * B.x, A.x * B.y - A.y * B.x};\n" + SPACE "return (%s){A.y * B.z - A.z * B.y, -A.x * B.z + A.z * B.x, A.x * B.y - A.y * B.x};\n" "}\n\n", xvec3, xvec3, xvec3, xvec3, xvec3); VecU8_drop(g_xvec3); return res; @@ -177,9 +177,9 @@ NODISCARD VecU8 generate_xmatnm_struct_and_methods( int sv = (rows * sizeof_member) % 16; VecU8_append_span(&res, cstr("typedef struct {\n")); for (int x = 0; x < cols; x++) { - VecU8_append_vec(&res, VecU8_fmt(SPACE4 "%s %s;\n", xvecm, vec_field_name(x))); + VecU8_append_vec(&res, VecU8_fmt(SPACE "%s %s;\n", xvecm, vec_field_name(x))); if (sv) { - VecU8_append_vec(&res, VecU8_format(SPACE4 "char _padding_%d[%d];\n", x, 16 - sv)); + VecU8_append_vec(&res, VecU8_format(SPACE "char _padding_%d[%d];\n", x, 16 - sv)); } } VecU8_append_vec(&res, VecU8_fmt("} %s;\n\n", xmatnm)); @@ -192,7 +192,7 @@ NODISCARD VecU8 generate_xmatnm_struct_and_methods( VecU8_append_vec(&res, VecU8_fmt("%s %s%s", memb, vec_field_name(x), vec_field_name(y))); } } - VecU8_append_vec(&res, VecU8_fmt(") {\n" SPACE4 "return (%s){ ", xmatnm)); + VecU8_append_vec(&res, VecU8_fmt(") {\n" SPACE "return (%s){ ", xmatnm)); for (int x = 0; x < cols; x++) { if (x) VecU8_append_span(&res, cstr(", ")); @@ -208,7 +208,7 @@ NODISCARD VecU8 generate_xmatnm_struct_and_methods( /* xmatnm_add_xmatnm method */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_add_%s(%s A, %s B) {\n" - SPACE4 "return (%s){ ", + SPACE "return (%s){ ", xmatnm, xmatnm, xmatnm, xmatnm, xmatnm, xmatnm)); for (int x = 0; x < cols; x++) { if (x) @@ -220,7 +220,7 @@ NODISCARD VecU8 generate_xmatnm_struct_and_methods( /* xmatnm_minus_xmatnm method */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_minus_%s(%s A, %s B) {\n" - SPACE4 "return (%s){ ", + SPACE "return (%s){ ", xmatnm, xmatnm, xmatnm, xmatnm, xmatnm, xmatnm)); for (int x = 0; x < cols; x++) { if (x) @@ -232,7 +232,7 @@ NODISCARD VecU8 generate_xmatnm_struct_and_methods( /* xmatnm_minus method */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_minus(%s A) {\n" - SPACE4 "return (%s){ ", + SPACE "return (%s){ ", xmatnm, xmatnm, xmatnm, xmatnm)); for (int x = 0; x < cols; x++) { if (x) @@ -244,7 +244,7 @@ NODISCARD VecU8 generate_xmatnm_struct_and_methods( /* xmatnm_mul_scal method */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_mul_scal(%s A, %s B) {\n" - SPACE4 "return (%s){ ", + SPACE "return (%s){ ", xmatnm, xmatnm, xmatnm, memb, xmatnm)); for (int x = 0; x < cols; x++) { if (x) @@ -256,7 +256,7 @@ NODISCARD VecU8 generate_xmatnm_struct_and_methods( /* xmatnm_div_by_scal method */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_div_by_scal(%s A, %s B) {\n" - SPACE4 "return %s_mul_scal(A, 1/B);\n" + SPACE "return %s_mul_scal(A, 1/B);\n" "}\n\n", xmatnm, xmatnm, xmatnm, memb, xmatnm)); @@ -265,7 +265,7 @@ NODISCARD VecU8 generate_xmatnm_struct_and_methods( /* xmatnm_mul_xvecn */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_mul_%s(%s A, %s B) {\n" - SPACE4 "return (%s){ ", + SPACE "return (%s){ ", xvecm, xmatnm, xvecn, xmatnm, xvecn, xvecm)); for (int y = 0; y < rows; y++) { if (y) @@ -277,7 +277,7 @@ NODISCARD VecU8 generate_xmatnm_struct_and_methods( VecU8_append_vec(&res, VecU8_fmt("A.%s.%s * B.%s", vec_field_name(x), vec_field_name(y), vec_field_name(x))); } } - VecU8_append_span(&res, cstr("\n" SPACE4 "};\n}\n\n")); + VecU8_append_span(&res, cstr("\n" SPACE "};\n}\n\n")); VecU8_drop(g_xvecn); VecU8_drop(g_xvecm); @@ -293,7 +293,7 @@ NODISCARD VecU8 generate_xmatnm_transpose_method(SpanU8 xmat, SpanU8 xvec, SpanU SpanU8 xmatmn = VecU8_to_span(&g_xmatmn); VecU8 res = VecU8_fmt( "%s %s_transpose(%s A) {\n" - SPACE4 "return (%s){ ", + SPACE "return (%s){ ", xmatmn, xmatnm, xmatnm, xmatmn); for (int bx = 0; bx < rows; bx++) { if (bx) @@ -347,7 +347,7 @@ NODISCARD VecU8 generate_xmatnm_method_mul_xmatkn(SpanU8 xmat, int n, int m, int VecU8 res = VecU8_fmt( "%s %s_mul_%s(%s A, %s B) {\n" - SPACE4 "return (%s){ ", + SPACE "return (%s){ ", xmatkm, xmatnm, xmatkn, xmatnm, xmatkn, xmatkm); for (int x = 0; x < k; x++) { if (x) diff --git a/src/l1/anne/pixel_masses.h b/src/l1/anne/pixel_masses.h index 8a7868d..a40ef32 100644 --- a/src/l1/anne/pixel_masses.h +++ b/src/l1/anne/pixel_masses.h @@ -7,8 +7,8 @@ NODISCARD VecU8 generate_texture_data_method_at(SpanU8 tex, SpanU8 pixvec, SpanU8 memb, bool const_access) { return VecU8_fmt( "%s%s* %s_%sat(%s%s* self, size_t x, size_t y) {\n" - SPACE4 "assert(x < self->width);\n" - SPACE4 "return %s_%sat(&self->pixels, x + y * self->width);\n" + SPACE "assert(x < self->width);\n" + SPACE "return %s_%sat(&self->pixels, x + y * self->width);\n" "}\n\n", const_access ? cstr("const ") : cstr(""), memb, tex, const_access ? cstr("") : cstr("m"), const_access ? cstr("const ") : cstr(""), tex, pixvec, const_access ? cstr("") : cstr("m")); @@ -22,24 +22,24 @@ NODISCARD VecU8 generate_texture_data_struct_and_necc_methods(SpanU8 tex, SpanU8 VecU8 res = VecU8_fmt( "typedef struct {\n" - SPACE4 "%s pixels;\n" - SPACE4 "size_t width;\n" + SPACE "%s pixels;\n" + SPACE "size_t width;\n" "} %s;\n\n", pixvec, tex); /* Method _new() */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_new(U32 width, U32 height) {\n" - SPACE4 "assert(!(SIZE_MAX / width / height < 100 || UINT32_MAX / width < 10 || UINT32_MAX / height < 10));\n" - SPACE4 "return (%s){.pixels = %s_new_zeroinit((size_t)width * height), .width = width};\n" + SPACE "assert(!(SIZE_MAX / width / height < 100 || UINT32_MAX / width < 10 || UINT32_MAX / height < 10));\n" + SPACE "return (%s){.pixels = %s_new_zeroinit((size_t)width * height), .width = width};\n" "}\n\n", tex, tex, tex, pixvec)); /* Method _drop() */ VecU8_append_vec(&res, VecU8_fmt( "void %s_drop(%s self) {\n" - SPACE4 "%s_drop(self.pixels);\n" + SPACE "%s_drop(self.pixels);\n" "}\n\n", tex, tex, pixvec)); /* Method _get_height() */ VecU8_append_vec(&res, VecU8_fmt( "size_t %s_get_height(const %s* self) {\n" - SPACE4 "return self->pixels.len / self->width;\n" + SPACE "return self->pixels.len / self->width;\n" "}\n\n", tex, tex)); /* Methods _at and _cat */ VecU8_append_vec(&res, generate_texture_data_method_at(tex, pixvec, memb, false)); @@ -47,7 +47,7 @@ NODISCARD VecU8 generate_texture_data_struct_and_necc_methods(SpanU8 tex, SpanU8 /* Method _get_size_in_bytes */ VecU8_append_vec(&res, VecU8_fmt( "size_t %s_get_size_in_bytes(const %s* self) {\n" - SPACE4 "return self->pixels.len * sizeof(%s);\n" + SPACE "return self->pixels.len * sizeof(%s);\n" "}\n\n", tex, tex, memb)); /* Method _to_bitmap_text() * We use the assumption that bytes in type member are tightly packed @@ -55,75 +55,75 @@ NODISCARD VecU8 generate_texture_data_struct_and_necc_methods(SpanU8 tex, SpanU8 */ VecU8_append_vec(&res, VecU8_fmt( "VecU8 %s_to_bitmap_text(const %s* self) {\n" - SPACE4 "assert(SIZE_MAX / self->pixels.len >= 100);\n" - SPACE4 "size_t len = self->pixels.len * sizeof(%s);\n" - SPACE4 "VecU8 res = VecU8_new_zeroinit(8 + len);\n" - SPACE4 "size_t width = self->width;\n" - SPACE4 "size_t height = self->pixels.len / self->width;\n" - SPACE4 "assert(UINT32_MAX / width >= 10 && UINT32_MAX / height >= 10);\n" - SPACE4 "for (int i = 0; i < 4; i++)\n" - SPACE4 SPACE4 "*VecU8_mat(&res, 0 + i) = (width >> (8 * i)) & 0xff;\n" - SPACE4 "for (int i = 0; i < 4; i++)\n" - SPACE4 SPACE4 "*VecU8_mat(&res, 4 + i) = (height >> (8 * i)) & 0xff;\n" - SPACE4 "memcpy(res.buf + 8, self->pixels.buf, len);\n" - SPACE4 "return res;\n" + SPACE "assert(SIZE_MAX / self->pixels.len >= 100);\n" + SPACE "size_t len = self->pixels.len * sizeof(%s);\n" + SPACE "VecU8 res = VecU8_new_zeroinit(8 + len);\n" + SPACE "size_t width = self->width;\n" + SPACE "size_t height = self->pixels.len / self->width;\n" + SPACE "assert(UINT32_MAX / width >= 10 && UINT32_MAX / height >= 10);\n" + SPACE "for (int i = 0; i < 4; i++)\n" + SPACE SPACE "*VecU8_mat(&res, 0 + i) = (width >> (8 * i)) & 0xff;\n" + SPACE "for (int i = 0; i < 4; i++)\n" + SPACE SPACE "*VecU8_mat(&res, 4 + i) = (height >> (8 * i)) & 0xff;\n" + SPACE "memcpy(res.buf + 8, self->pixels.buf, len);\n" + SPACE "return res;\n" "}\n\n", tex, tex, memb)); /* Method _write_to_file * Aborts on failure */ VecU8_append_vec(&res, VecU8_fmt( "void %s_write_to_file(const %s* self, const char* path) {\n" - SPACE4 "VecU8 data = %s_to_bitmap_text(self);\n" - SPACE4 "write_whole_file_or_abort(path, VecU8_to_span(&data));\n" - SPACE4 "VecU8_drop(data);\n" + SPACE "VecU8 data = %s_to_bitmap_text(self);\n" + SPACE "write_whole_file_or_abort(path, VecU8_to_span(&data));\n" + SPACE "VecU8_drop(data);\n" "}\n\n", tex, tex, tex)); /* Result structure */ VecU8 g_resoftex = VecU8_fmt("Result%sOrSpanU8", tex); SpanU8 resoftex = VecU8_to_span(&g_resoftex); VecU8_append_vec(&res, VecU8_fmt( "typedef struct {\n" - SPACE4 "Result_variant variant;\n" - SPACE4 "union {\n" - SPACE4 SPACE4 "%s ok;\n" - SPACE4 SPACE4 "SpanU8 err;\n" - SPACE4 "};\n" + SPACE "Result_variant variant;\n" + SPACE "union {\n" + SPACE SPACE "%s ok;\n" + SPACE SPACE "SpanU8 err;\n" + SPACE "};\n" "} %s;\n\n", tex, resoftex)); /* Method _from_bitmap_text() * We assume that bytes are tightly packed in member type */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_from_bitmap_text(SpanU8 text) {\n" - SPACE4 "if (text.len < 8)\n" - SPACE4 SPACE4 "return (%s){.variant = Result_Err, .err = cstr(\"No header *crying emoji*\")};\n" - SPACE4 "size_t width = 0, height = 0;\n" - SPACE4 "for (int i = 0; i < 4; i++)\n" - SPACE4 SPACE4 "width |= (((size_t)*SpanU8_at(text, 0 + i)) << (8 * i));\n" - SPACE4 "for (int i = 0; i < 4; i++)\n" - SPACE4 SPACE4 "height |= (((size_t)*SpanU8_at(text, 4 + i)) << (8 * i));\n" - SPACE4 "if (SIZE_MAX / width / height < 100 || UINT32_MAX / width < 10 || UINT32_MAX / height < 10)\n" - SPACE4 SPACE4 "return (%s){.variant = Result_Err, .err = cstr(\"Image is too big\")};\n" - SPACE4 "size_t len = width * height * sizeof(%s);\n" - SPACE4 "if (text.len < 8 + len)\n" - SPACE4 SPACE4 "return (%s){.variant = Result_Err, .err = cstr(\"Texture size and file size mismatch\")};\n" - SPACE4 "%s res = %s_new(width, height);\n" - SPACE4 "memcpy(res.pixels.buf, text.data + 8, len);\n" - SPACE4 "return (%s){.variant = Result_Ok, .ok = res};\n" + SPACE "if (text.len < 8)\n" + SPACE SPACE "return (%s){.variant = Result_Err, .err = cstr(\"No header *crying emoji*\")};\n" + SPACE "size_t width = 0, height = 0;\n" + SPACE "for (int i = 0; i < 4; i++)\n" + SPACE SPACE "width |= (((size_t)*SpanU8_at(text, 0 + i)) << (8 * i));\n" + SPACE "for (int i = 0; i < 4; i++)\n" + SPACE SPACE "height |= (((size_t)*SpanU8_at(text, 4 + i)) << (8 * i));\n" + SPACE "if (SIZE_MAX / width / height < 100 || UINT32_MAX / width < 10 || UINT32_MAX / height < 10)\n" + SPACE SPACE "return (%s){.variant = Result_Err, .err = cstr(\"Image is too big\")};\n" + SPACE "size_t len = width * height * sizeof(%s);\n" + SPACE "if (text.len < 8 + len)\n" + SPACE SPACE "return (%s){.variant = Result_Err, .err = cstr(\"Texture size and file size mismatch\")};\n" + SPACE "%s res = %s_new(width, height);\n" + SPACE "memcpy(res.pixels.buf, text.data + 8, len);\n" + SPACE "return (%s){.variant = Result_Ok, .ok = res};\n" "}\n\n", resoftex, tex, resoftex, resoftex, memb, resoftex, tex, tex, resoftex)); /* Method _read_from_file */ VecU8_append_vec(&res, VecU8_fmt( "%s %s_read_from_file(const char* path) {\n" - SPACE4 "VecU8 data = read_whole_file_or_abort(path);\n" - SPACE4 "%s res = %s_from_bitmap_text(VecU8_to_span(&data));\n" - SPACE4 "if (res.variant != Result_Ok) {\n" - SPACE4 SPACE4 "fprintf(stderr, \"Tried loading bitmap texture from file, but encountered decoding error: \");\n" - SPACE4 SPACE4 "SpanU8_fprint(res.err, stderr);\n" - SPACE4 SPACE4 "abortf(\"\\n\");\n" - SPACE4 "}\n" - SPACE4 "VecU8_drop(data);\n" - SPACE4 "return res.ok;\n" + SPACE "VecU8 data = read_whole_file_or_abort(path);\n" + SPACE "%s res = %s_from_bitmap_text(VecU8_to_span(&data));\n" + SPACE "if (res.variant != Result_Ok) {\n" + SPACE SPACE "fprintf(stderr, \"Tried loading bitmap texture from file, but encountered decoding error: \");\n" + SPACE SPACE "SpanU8_fprint(res.err, stderr);\n" + SPACE SPACE "abortf(\"\\n\");\n" + SPACE "}\n" + SPACE "VecU8_drop(data);\n" + SPACE "return res.ok;\n" "}\n\n", tex, tex, resoftex, tex)); /* Method _is_inside() */ VecU8_append_vec(&res, VecU8_fmt( "bool %s_is_inside(const %s* self, S32 x, S32 y) {\n" - SPACE4 "return x >= 0 && y >= 0 && x < (S32)self->width && self->width * y + x < self->pixels.len;\n" + SPACE "return x >= 0 && y >= 0 && x < (S32)self->width && self->width * y + x < self->pixels.len;\n" "}\n\n", tex, tex)); VecU8_drop(g_resoftex); diff --git a/src/l1/codegen/codegen.h b/src/l1/codegen/codegen.h index 668ab60..61741f0 100644 --- a/src/l1/codegen/codegen.h +++ b/src/l1/codegen/codegen.h @@ -37,7 +37,7 @@ void finish_header(GeneratedHeader header) { VecU8_drop(header.result); } -#define SPACE4 " " +#define SPACE " " #define SPACE8 " " #define SPACE12 " " #define SPACE16 " " diff --git a/src/l1/codegen/util_template_inst.h b/src/l1/codegen/util_template_inst.h index 58a0314..9fc775d 100644 --- a/src/l1/codegen/util_template_inst.h +++ b/src/l1/codegen/util_template_inst.h @@ -11,9 +11,9 @@ NODISCARD VecU8 generate_VecT_struct_and_base_methods(SpanU8 T, bool primitive, SpanU8 VecT = VecU8_to_span(&g_VecT); VecU8 res = VecU8_fmt( "typedef struct {\n" - SPACE4 "%s* buf;\n" - SPACE4 "size_t len;\n" - SPACE4 "size_t capacity;\n" + SPACE "%s* buf;\n" + SPACE "size_t len;\n" + SPACE "size_t capacity;\n" "} %s;\n\n", T, VecT); VecU8_append_vec(&res, VecU8_fmt("#define %s_new() ((%s){ 0 })\n\n", VecT, VecT)); @@ -21,76 +21,76 @@ NODISCARD VecU8 generate_VecT_struct_and_base_methods(SpanU8 T, bool primitive, VecU8_append_vec(&res, VecU8_fmt("void %s_drop(%s self) {\n", VecT, VecT)); if (!primitive) { VecU8_append_vec(&res, VecU8_fmt( - SPACE4 "for (size_t i = 0; i < self.len; i++) \n" - SPACE4 SPACE4 "%s_drop(self.buf[i]);\n", T)); + SPACE "for (size_t i = 0; i < self.len; i++) \n" + SPACE SPACE "%s_drop(self.buf[i]);\n", T)); } VecU8_append_vec(&res, VecU8_fmt( - SPACE4 "free(self.buf);\n" + SPACE "free(self.buf);\n" "}\n\n")); VecU8_append_vec(&res, VecU8_fmt( "NODISCARD %s %s_new_reserved(size_t n) {\n" - SPACE4 "return (%s){ .buf = safe_calloc(n, sizeof(%s)), .len = 0, .capacity = n };\n" + SPACE "return (%s){ .buf = safe_calloc(n, sizeof(%s)), .len = 0, .capacity = n };\n" "}\n\n", VecT, VecT, VecT, T)); VecU8_append_vec(&res, VecU8_fmt( "void %s_append(%s* self, %s el) {\n" - SPACE4 "size_t new_length = self->len + 1;\n" - SPACE4 "if (new_length > self->capacity) {\n" - SPACE4 SPACE4 "size_t new_capacity = Vec_get_new_capacity(self->capacity, new_length);\n" - SPACE4 SPACE4 "self->buf = safe_realloc(self->buf, new_capacity * sizeof(%s));\n" - SPACE4 SPACE4 "self->capacity = new_capacity;\n" - SPACE4 "}\n" - SPACE4 "self->buf[self->len] = el;\n" - SPACE4 "self->len = new_length;\n" + SPACE "size_t new_length = self->len + 1;\n" + SPACE "if (new_length > self->capacity) {\n" + SPACE SPACE "size_t new_capacity = Vec_get_new_capacity(self->capacity, new_length);\n" + SPACE SPACE "self->buf = safe_realloc(self->buf, new_capacity * sizeof(%s));\n" + SPACE SPACE "self->capacity = new_capacity;\n" + SPACE "}\n" + SPACE "self->buf[self->len] = el;\n" + SPACE "self->len = new_length;\n" "}\n\n", VecT, VecT, T, T)); VecU8_append_vec(&res, VecU8_fmt( "%s* %s_mat(%s* self, size_t i) {\n" - SPACE4 "assert(i < self->len);\n" - SPACE4 "return &self->buf[i];\n" + SPACE "assert(i < self->len);\n" + SPACE "return &self->buf[i];\n" "}\n\n", T, VecT, VecT)); VecU8_append_vec(&res, VecU8_fmt( "const %s* %s_at(const %s* self, size_t i) {\n" - SPACE4 "assert(i < self->len);\n" - SPACE4 "return &self->buf[i];\n" + SPACE "assert(i < self->len);\n" + SPACE "return &self->buf[i];\n" "}\n\n", T, VecT, VecT)); if (clonable) { VecU8_append_vec(&res, VecU8_fmt( "NODISCARD %s %s_clone(const %s* self) {\n" - SPACE4 "%s res = (%s){.buf = safe_calloc(self->len, sizeof(%s)), .len = self->len, .capacity = self->len};\n", + SPACE "%s res = (%s){.buf = safe_calloc(self->len, sizeof(%s)), .len = self->len, .capacity = self->len};\n", VecT, VecT, VecT, VecT, VecT, T)); if (primitive) { - VecU8_append_vec(&res, VecU8_fmt(SPACE4 "memcpy(res.buf, self->buf, self->len * sizeof(%s));\n", T)); + VecU8_append_vec(&res, VecU8_fmt(SPACE "memcpy(res.buf, self->buf, self->len * sizeof(%s));\n", T)); } else { VecU8_append_vec(&res, VecU8_fmt( - SPACE4 "for (size_t i = 0; i < self->len; i++)\n" - SPACE4 SPACE4 "res.buf[i] = %s_clone(&self->buf[i]);\n", T)); + SPACE "for (size_t i = 0; i < self->len; i++)\n" + SPACE SPACE "res.buf[i] = %s_clone(&self->buf[i]);\n", T)); } - VecU8_append_span(&res, cstr(SPACE4 "return res;\n}\n\n")); + VecU8_append_span(&res, cstr(SPACE "return res;\n}\n\n")); } VecU8_append_vec(&res, VecU8_fmt( "void %s_append_vec(%s* self, %s b) {\n" - SPACE4 "size_t new_length = self->len + b.len;\n" - SPACE4 "if (new_length > self->capacity) {\n" - SPACE4 SPACE4 "size_t new_capacity = Vec_get_new_capacity(self->capacity, new_length);\n" - SPACE4 SPACE4 "self->buf = safe_realloc(self->buf, new_capacity * sizeof(%s));\n" - SPACE4 SPACE4 "self->capacity = new_capacity;\n" - SPACE4 "}\n" - SPACE4 "for (size_t i = 0; i < b.len; i++){\n" - SPACE4 SPACE4 "self->buf[self->len + i] = b.buf[i];\n" - SPACE4 "}\n" - SPACE4 "self->len = new_length;\n" - SPACE4 "free(b.buf);\n" + SPACE "size_t new_length = self->len + b.len;\n" + SPACE "if (new_length > self->capacity) {\n" + SPACE SPACE "size_t new_capacity = Vec_get_new_capacity(self->capacity, new_length);\n" + SPACE SPACE "self->buf = safe_realloc(self->buf, new_capacity * sizeof(%s));\n" + SPACE SPACE "self->capacity = new_capacity;\n" + SPACE "}\n" + SPACE "for (size_t i = 0; i < b.len; i++){\n" + SPACE SPACE "self->buf[self->len + i] = b.buf[i];\n" + SPACE "}\n" + SPACE "self->len = new_length;\n" + SPACE "free(b.buf);\n" "}\n\n", VecT, VecT, VecT, T)); if (primitive) { VecU8_append_vec(&res, VecU8_fmt( "NODISCARD %s %s_new_zeroinit(size_t len) {\n" - SPACE4 "return (%s){.buf = safe_calloc(len, sizeof(%s)), .len = len, .capacity = len};\n" + SPACE "return (%s){.buf = safe_calloc(len, sizeof(%s)), .len = len, .capacity = len};\n" "}\n\n", VecT, VecT, VecT, T)); } @@ -106,61 +106,61 @@ NODISCARD VecU8 generate_VecT_trivmove_extended_methods(SpanU8 T, bool primitive VecU8_append_vec(&res, VecU8_fmt( "%s%s %s_pop(%s* self) {\n" - SPACE4 "assert(self->len > 0);\n" - SPACE4 "self->len--;\n" - SPACE4 "return self->buf[self->len];\n" + SPACE "assert(self->len > 0);\n" + SPACE "self->len--;\n" + SPACE "return self->buf[self->len];\n" "}\n\n", primitive ? cstr("") : cstr("NODISCARD "), T, VecT, VecT)); if (!primitive) { VecU8_append_vec(&res, VecU8_fmt( "void %s_pop_and_drop(%s* self) {\n" - SPACE4 "assert(self->len > 0);\n" - SPACE4 "%s_drop(self->buf[self->len - 1]);\n" - SPACE4 "self->len--;\n" + SPACE "assert(self->len > 0);\n" + SPACE "%s_drop(self->buf[self->len - 1]);\n" + SPACE "self->len--;\n" "}\n\n", VecT, VecT, T)); } VecU8_append_vec(&res, VecU8_fmt( "%s%s %s_unordered_pop(%s* self, size_t ind) {\n" - SPACE4 "assert(ind < self->len);\n" - SPACE4 "%s res = self->buf[ind];\n" - SPACE4 "self->buf[ind] = self->buf[self->len - 1];\n" - SPACE4 "self->len--;\n" - SPACE4 "return res;\n" + SPACE "assert(ind < self->len);\n" + SPACE "%s res = self->buf[ind];\n" + SPACE "self->buf[ind] = self->buf[self->len - 1];\n" + SPACE "self->len--;\n" + SPACE "return res;\n" "}\n\n", primitive ? cstr("") : cstr("NODISCARD "), T, VecT, VecT, T)); if (!primitive) { VecU8_append_vec(&res, VecU8_fmt( "void %s_unordered_pop_and_drop(%s* self, size_t ind) {\n" - SPACE4 "assert(ind < self->len);\n" - SPACE4 "%s_drop(self->buf[ind]);\n" - SPACE4 "self->buf[ind] = self->buf[self->len - 1];\n" - SPACE4 "self->len--;\n" + SPACE "assert(ind < self->len);\n" + SPACE "%s_drop(self->buf[ind]);\n" + SPACE "self->buf[ind] = self->buf[self->len - 1];\n" + SPACE "self->len--;\n" "}\n\n", VecT, VecT, T)); } VecU8_append_vec(&res, VecU8_fmt( "NODISCARD %s %s_swap_with_empty(%s* cell) {\n" - SPACE4 "%s val = *cell;\n" - SPACE4 "*cell = (%s){NULL, 0, 0};\n" - SPACE4 "return val;\n" + SPACE "%s val = *cell;\n" + SPACE "*cell = (%s){NULL, 0, 0};\n" + SPACE "return val;\n" "}\n\n", VecT, VecT, VecT, VecT, VecT)); if (primitive) { VecU8_append_vec(&res, VecU8_fmt( "NODISCARD %s %s_new_filled(size_t len, %s el) {\n" - SPACE4 "%s res = (%s){.buf = safe_calloc(len, sizeof(%s)), .len = len, .capacity = len};\n" - SPACE4 "for (size_t i = 0; i < len; i++)\n" - SPACE4 SPACE4 "res.buf[i] = el;\n" - SPACE4 "return res;\n" + SPACE "%s res = (%s){.buf = safe_calloc(len, sizeof(%s)), .len = len, .capacity = len};\n" + SPACE "for (size_t i = 0; i < len; i++)\n" + SPACE SPACE "res.buf[i] = el;\n" + SPACE "return res;\n" "}\n\n", VecT, VecT, T, VecT, VecT, T)); } else if (clonable) { VecU8_append_vec(&res, VecU8_fmt( "NODISCARD %s %s_new_filled(size_t len, const %s* el) {\n" - SPACE4 "%s res = (%s){.buf = safe_calloc(len, sizeof(%s)), .len = len, .capacity = len};\n" - SPACE4 "for (size_t i = 0; i < len; i++)\n" - SPACE4 SPACE4 "res.buf[i] = %s_clone(el);\n" - SPACE4 "return res;\n" + SPACE "%s res = (%s){.buf = safe_calloc(len, sizeof(%s)), .len = len, .capacity = len};\n" + SPACE "for (size_t i = 0; i < len; i++)\n" + SPACE SPACE "res.buf[i] = %s_clone(el);\n" + SPACE "return res;\n" "}\n\n", VecT, VecT, T, VecT, VecT, T, T)); } @@ -174,18 +174,18 @@ NODISCARD VecU8 generate_VecT_equal_method(SpanU8 T, bool integer) { SpanU8 VecT = VecU8_to_span(&g_VecT); VecU8 res = VecU8_fmt( "bool %s_equal_%s(const %s* A, const %s* B) {\n" - SPACE4 "if (A->len != B->len)\n" - SPACE4 SPACE4 "return false;\n" - SPACE4 "for (size_t i = 0; i < A->len; i++) {\n", VecT, VecT, VecT, VecT); + SPACE "if (A->len != B->len)\n" + SPACE SPACE "return false;\n" + SPACE "for (size_t i = 0; i < A->len; i++) {\n", VecT, VecT, VecT, VecT); if (integer) { VecU8_append_span(&res, cstr(SPACE8 "if (A->buf[i] != B->buf[i])\n")); } else { VecU8_append_vec(&res, VecU8_fmt(SPACE8 "if (!%s_equal_%s(A->buf + i, B->buf + i))\n", T, T)); } VecU8_append_span(&res, cstr( - SPACE4 SPACE4 SPACE4 "return false;\n" - SPACE4 "}\n" - SPACE4 "return true;\n" + SPACE SPACE SPACE "return false;\n" + SPACE "}\n" + SPACE "return true;\n" "}\n\n" )); @@ -199,10 +199,10 @@ NODISCARD VecU8 generate_VecT_new_of_size_method(SpanU8 T) { SpanU8 VecT = VecU8_to_span(&g_VecT); VecU8 res = VecU8_fmt( "NODISCARD %s %s_new_of_size(size_t len) {\n" - SPACE4 "%s res = (%s){.buf = safe_calloc(len, sizeof(%s)), .len = len, .capacity = len};\n" - SPACE4 "for (size_t i = 0; i < len; i++)\n" - SPACE4 SPACE4 "res.buf[i] = %s_new();\n" - SPACE4 "return res;\n" + SPACE "%s res = (%s){.buf = safe_calloc(len, sizeof(%s)), .len = len, .capacity = len};\n" + SPACE "for (size_t i = 0; i < len; i++)\n" + SPACE SPACE "res.buf[i] = %s_new();\n" + SPACE "return res;\n" "}\n", VecT, VecT, VecT, VecT, T, T); VecU8_drop(g_VecT); @@ -213,7 +213,7 @@ NODISCARD VecU8 generate_VecT_new_of_size_method(SpanU8 T) { void codegen_append_some_span_equal_method(VecU8* res, SpanU8 SpanT) { VecU8_append_vec(res, VecU8_fmt( "bool %s_equal_%s(%s A, %s B) {\n" - SPACE4 "return A->data == B->data && A->len == B->len;\n" + SPACE "return A->data == B->data && A->len == B->len;\n" "}\n\n", SpanT, SpanT, SpanT, SpanT)); } @@ -221,8 +221,8 @@ void codegen_append_some_span_equal_method(VecU8* res, SpanU8 SpanT) { void codegen_append_some_span_struct(VecU8* res, SpanU8 T, SpanU8 SpanT, SpanU8 mod) { VecU8_append_vec(res, VecU8_fmt( "typedef struct {\n" - SPACE4 "%s%s* data;\n" - SPACE4 "size_t len;\n" + SPACE "%s%s* data;\n" + SPACE "size_t len;\n" "} %s;\n\n", mod, T, SpanT)); } @@ -230,8 +230,8 @@ void codegen_append_some_span_struct(VecU8* res, SpanU8 T, SpanU8 SpanT, SpanU8 void codegen_append_some_span_at_method(VecU8* res, SpanU8 T, SpanU8 SpanT, SpanU8 mod) { VecU8_append_vec(res, VecU8_fmt( "%s%s* %s_at(%s self, size_t i) {\n" - SPACE4 "assert(i < self.len);\n" - SPACE4 "return self.data + i;\n" + SPACE "assert(i < self.len);\n" + SPACE "return self.data + i;\n" "}\n\n", mod, T, SpanT, SpanT)); } @@ -240,8 +240,8 @@ void codegen_append_some_span_at_method(VecU8* res, SpanU8 T, SpanU8 SpanT, Span void codegen_append_some_span_span_method(VecU8* res, SpanU8 SpanT) { VecU8_append_vec(res, VecU8_fmt( "%s %s_span(%s self, size_t start, size_t len){\n" - SPACE4 "assert(start < SIZE_MAX - len && start + len <= self.len);\n" - SPACE4 "return (%s){.data = self.data + start, .len = len};\n" + SPACE "assert(start < SIZE_MAX - len && start + len <= self.len);\n" + SPACE "return (%s){.data = self.data + start, .len = len};\n" "}\n\n", SpanT, SpanT, SpanT, SpanT)); } @@ -269,7 +269,7 @@ NODISCARD VecU8 generate_SpanT_struct_and_methods( if (add_mutable) { VecU8_append_vec(&res, VecU8_fmt( "%s %s_to_%s(%s self) {\n" - SPACE4 "return (%s){.data = self.data, .len = self.len};\n" + SPACE "return (%s){.data = self.data, .len = self.len};\n" "}\n\n", SpanT, MutSpanT, SpanT, MutSpanT, SpanT)); } codegen_append_some_span_at_method(&res, T, SpanT, cstr("const ")); @@ -285,18 +285,18 @@ NODISCARD VecU8 generate_SpanT_struct_and_methods( assert(add_mutable); VecU8_append_vec(&res, VecU8_fmt( "int %s_qcompare(const void* a, const void* b) {\n" - SPACE4 "const %s* A = a;\n" - SPACE4 "const %s* B = b;\n", T, T, T)); + SPACE "const %s* A = a;\n" + SPACE "const %s* B = b;\n", T, T, T)); if (integer) { - VecU8_append_span(&res, cstr(SPACE4 "return (int)(B < A) - (int)(A < B);\n")); + VecU8_append_span(&res, cstr(SPACE "return (int)(B < A) - (int)(A < B);\n")); } else { VecU8_append_vec(&res, VecU8_fmt( - SPACE4 "return (int)%s_less_%s(B, A) - (int)%s_less_%s(A, B);\n", T, T, T, T)); + SPACE "return (int)%s_less_%s(B, A) - (int)%s_less_%s(A, B);\n", T, T, T, T)); } VecU8_append_vec(&res, VecU8_fmt( "}\n\n" "void %s_sort(%s self) {\n" - SPACE4 "qsort(self.data, self.len, sizeof(%s), %s_qcompare);\n" + SPACE "qsort(self.data, self.len, sizeof(%s), %s_qcompare);\n" "}\n\n", MutSpanT, MutSpanT, T, T)); } @@ -320,63 +320,63 @@ NODISCARD VecU8 generate_SpanT_VecT_trivmove_collab(SpanU8 T, bool primitive, bo if (clonable) { VecU8_append_vec(&res, VecU8_fmt( "NODISCARD %s %s_from_span(%s src){\n" - SPACE4 "%s res = (%s){ .buf = safe_calloc(src.len, sizeof(%s)), .len = src.len, .capacity = src.len };\n", + SPACE "%s res = (%s){ .buf = safe_calloc(src.len, sizeof(%s)), .len = src.len, .capacity = src.len };\n", VecT, VecT, SpanT, VecT, VecT, T)); if (primitive) { VecU8_append_vec(&res, VecU8_fmt( - SPACE4 "memcpy(res.buf, src.data, src.len * sizeof(%s));\n", T)); + SPACE "memcpy(res.buf, src.data, src.len * sizeof(%s));\n", T)); } else { VecU8_append_vec(&res, VecU8_fmt( - SPACE4 "for (size_t i = 0; i < src.len; i++)\n" + SPACE "for (size_t i = 0; i < src.len; i++)\n" SPACE8 "res.buf[i] = %s_clone(&src.data[i]);\n", T)); } - VecU8_append_span(&res, cstr(SPACE4 "return res;\n}\n\n")); + VecU8_append_span(&res, cstr(SPACE "return res;\n}\n\n")); } VecU8_append_vec(&res, VecU8_fmt( "%s %s_to_span(const %s* vec){\n" - SPACE4 "return (%s){vec->buf, vec->len};\n" + SPACE "return (%s){vec->buf, vec->len};\n" "}\n\n", SpanT, VecT, VecT, SpanT)); if (add_mutable) { VecU8_append_vec(&res, VecU8_fmt( "%s %s_to_mspan(%s* vec){\n" - SPACE4 "return (%s){vec->buf, vec->len};\n" + SPACE "return (%s){vec->buf, vec->len};\n" "}\n\n", MutSpanT, VecT, VecT, MutSpanT)); } if (clonable) { VecU8_append_vec(&res, VecU8_fmt( "void %s_append_span(%s* self, %s b) {\n" - SPACE4 "size_t new_length = self->len + b.len;\n" - SPACE4 "if (new_length > self->capacity) {\n" - SPACE4 SPACE4 "size_t new_capacity = Vec_get_new_capacity(self->capacity, new_length);\n" - SPACE4 SPACE4 "self->buf = safe_realloc(self->buf, new_capacity * sizeof(%s));\n" - SPACE4 SPACE4 "self->capacity = new_capacity;\n" - SPACE4 "}\n", VecT, VecT, SpanT, T)); + SPACE "size_t new_length = self->len + b.len;\n" + SPACE "if (new_length > self->capacity) {\n" + SPACE SPACE "size_t new_capacity = Vec_get_new_capacity(self->capacity, new_length);\n" + SPACE SPACE "self->buf = safe_realloc(self->buf, new_capacity * sizeof(%s));\n" + SPACE SPACE "self->capacity = new_capacity;\n" + SPACE "}\n", VecT, VecT, SpanT, T)); if (primitive) { VecU8_append_vec(&res, VecU8_fmt( - SPACE4 "memcpy(self->buf + self->len, b.data, b.len * sizeof(%s));\n", T)); + SPACE "memcpy(self->buf + self->len, b.data, b.len * sizeof(%s));\n", T)); } else { VecU8_append_vec(&res, VecU8_fmt( - SPACE4 "for (size_t i = 0; i < b.len; i++)\n" - SPACE4 SPACE4 "self->buf[self->len + i] = %s_clone(&b.data[i]);\n", T)); + SPACE "for (size_t i = 0; i < b.len; i++)\n" + SPACE SPACE "self->buf[self->len + i] = %s_clone(&b.data[i]);\n", T)); } VecU8_append_span(&res, cstr( - SPACE4 "self->len = new_length;\n" + SPACE "self->len = new_length;\n" "}\n\n")); } if (add_extended) { VecU8_append_vec(&res, VecU8_fmt( "%s %s_span(const %s* vec, size_t start, size_t len) {\n" - SPACE4 "assert(start < SIZE_MAX - len && start + len <= vec->len);\n" - SPACE4 "return (%s){.data = vec->buf + start, .len = len};\n" + SPACE "assert(start < SIZE_MAX - len && start + len <= vec->len);\n" + SPACE "return (%s){.data = vec->buf + start, .len = len};\n" "}\n\n", SpanT, VecT, VecT, SpanT)); if (add_mutable) { VecU8_append_vec(&res, VecU8_fmt( "%s %s_mspan(%s* vec, size_t start, size_t len) {\n" - SPACE4 "assert(start < SIZE_MAX - len && start + len <= vec->len);\n" - SPACE4 "return (%s){.data = vec->buf + start, .len = len};\n" + SPACE "assert(start < SIZE_MAX - len && start + len <= vec->len);\n" + SPACE "return (%s){.data = vec->buf + start, .len = len};\n" "}\n\n", MutSpanT, VecT, VecT, MutSpanT)); } } @@ -578,40 +578,40 @@ NODISCARD VecU8 generate_OptionT_struct_and_methods(option_template_instantiatio if (op.t_ptr) { VecU8_append_vec(&res, VecU8_fmt("typedef %s %s;\n", op.T, OptionT)); VecU8_append_vec(&res, VecU8_fmt("#define None_%s() NULL\n", op.T)); - VecU8_append_vec(&res, VecU8_fmt("%s Some_%s(%s ref) {\n" SPACE4 "return ref;\n}\n\n", OptionT, op.T, op.T)); + VecU8_append_vec(&res, VecU8_fmt("%s Some_%s(%s ref) {\n" SPACE "return ref;\n}\n\n", OptionT, op.T, op.T)); VecU8_append_vec(&res, VecU8_fmt( "%s %s_expect(%s self) {\n" - SPACE4 "return self;\n" + SPACE "return self;\n" "}\n\n", op.T, OptionT, OptionT)); } else { VecU8_append_vec(&res, VecU8_fmt( "typedef struct {\n" - SPACE4 "Option_variant variant;\n" - SPACE4 "%s some;\n" + SPACE "Option_variant variant;\n" + SPACE "%s some;\n" "} %s;\n\n", op.T, OptionT)); VecU8_append_vec(&res, VecU8_fmt("#define None_%s() (%s){ .variant = Option_None }\n\n", op.T, OptionT)); VecU8_append_vec(&res, VecU8_fmt( "NODISCARD %s Some_%s(%s obj) {\n" - SPACE4 "return (%s){ .variant = Option_Some, .some = obj };\n" + SPACE "return (%s){ .variant = Option_Some, .some = obj };\n" "}\n\n", OptionT, op.T, op.T, OptionT)); VecU8_append_vec(&res, VecU8_fmt( "NODISCARD %s %s_expect(%s self){\n" - SPACE4 "if (self.variant == Option_None)\n" - SPACE4 SPACE4 "abortf(\"Expected something in %s got None\\n\");\n" - SPACE4 "return self.some;\n" + SPACE "if (self.variant == Option_None)\n" + SPACE SPACE "abortf(\"Expected something in %s got None\\n\");\n" + SPACE "return self.some;\n" "}\n\n", op.T, OptionT, OptionT, OptionT)); if (!op.t_primitive) { VecU8_append_vec(&res, VecU8_fmt( "void %s_drop(%s self) {\n" - SPACE4 "if (self.variant == Option_None)\n" - SPACE4 SPACE4 "%s_drop(self.some);\n" + SPACE "if (self.variant == Option_None)\n" + SPACE SPACE "%s_drop(self.some);\n" "}\n\n", OptionT, OptionT, op.T)); if (op.t_clonable) { VecU8_append_vec(&res, VecU8_fmt( "NODISCARD %s %s_clone(const %s* self) {\n" - SPACE4 "if (self->variant == Option_None)\n" - SPACE4 SPACE4 "return (%s) { .variant = Option_None };\n" - SPACE4 "return (%s){ .variant = Option_Some, .some = %s_clone(&self->some) };\n" + SPACE "if (self->variant == Option_None)\n" + SPACE SPACE "return (%s) { .variant = Option_None };\n" + SPACE "return (%s){ .variant = Option_Some, .some = %s_clone(&self->some) };\n" "}\n\n", OptionT, OptionT, OptionT, OptionT, OptionT, op.T)); } } diff --git a/src/l1_5/anne/codegen.c b/src/l1_5/anne/codegen.c index d25788d..71ce5d6 100644 --- a/src/l1_5/anne/codegen.c +++ b/src/l1_5/anne/codegen.c @@ -2,12 +2,14 @@ #include "marie/clipping.h" #include "liza.h" +#include "l1_5_templ_very_base.h" int main() { mkdir_nofail("l1_5"); mkdir_nofail("l1_5/marie"); generate_marie_clipping_header(); generate_l1_5_liza_headers(); + generate_l1_5_template_instantiation_for_base_types(); finish_layer(cstr("l1_5")); return 0; } diff --git a/src/l1_5/anne/l1_5_templ_very_base.h b/src/l1_5/anne/l1_5_templ_very_base.h new file mode 100644 index 0000000..127e91f --- /dev/null +++ b/src/l1_5/anne/l1_5_templ_very_base.h @@ -0,0 +1,8 @@ +#ifndef prototype1_src_l1_5_anne_l1_5_templ_very_base_h +#define prototype1_src_l1_5_anne_l1_5_templ_very_base_h + +void generate_l1_5_template_instantiation_for_base_types(){ + +} + +#endif \ No newline at end of file diff --git a/src/l1_5/anne/marie/clipping.h b/src/l1_5/anne/marie/clipping.h index f55f914..1101660 100644 --- a/src/l1_5/anne/marie/clipping.h +++ b/src/l1_5/anne/marie/clipping.h @@ -144,7 +144,7 @@ int mod3_dec(int x) { void generate_func_clip_triang_on_triang_case_where_some_vertex_stuck(VecU8* res, char tC, char tT, bool tables_turned) { /* Case where all 3 vertices of tT are inside tC */ - VecU8_append_span(res, cstr(SPACE4 "if (")); + VecU8_append_span(res, cstr(SPACE "if (")); for (int cs = 0; cs < 3; cs++) { for (int tv = 0; tv < 3; tv++) { if (cs != 0 || tv != 0) @@ -155,11 +155,11 @@ void generate_func_clip_triang_on_triang_case_where_some_vertex_stuck(VecU8* res VecU8_append_span(res, cstr(") {\n" SPACE8)); append_triangle_registration_stmt(res, get_firstborn_vertex_stmt(tT, 0), get_firstborn_vertex_stmt(tT, 1), get_firstborn_vertex_stmt(tT, 2)); - VecU8_append_span(res, cstr(SPACE8 "return;\n" SPACE4 "}\n\n")); + VecU8_append_span(res, cstr(SPACE8 "return;\n" SPACE "}\n\n")); /* Cases where two vertices of tT are inside tC, but one is outside */ for (int ti = 0; ti < 3; ti++) { - VecU8_append_span(res, cstr(SPACE4 "if (")); + VecU8_append_span(res, cstr(SPACE "if (")); int TA = mod3_inc(ti); int TB = mod3_inc(TA); for (int j = 1; j <= 2; j++) { @@ -216,14 +216,14 @@ void generate_func_clip_triang_on_triang_case_where_some_vertex_stuck(VecU8* res } VecU8_append_span(res, cstr(SPACE8 "}\n")); } - VecU8_append_span(res, cstr(SPACE4 "}\n\n")); + VecU8_append_span(res, cstr(SPACE "}\n\n")); } /* Case where one vertice of tT is inside tC, but other two are outside tC */ for (int pl = 0; pl < 3; pl++) { int TA = mod3_inc(pl); int TB = mod3_inc(TA); - VecU8_append_span(res, cstr(SPACE4 "if (")); + VecU8_append_span(res, cstr(SPACE "if (")); for (int cb = 0; cb < 3; cb++) { if (cb) VecU8_append_span(res, cstr(" && ")); @@ -298,7 +298,7 @@ void generate_func_clip_triang_on_triang_case_where_some_vertex_stuck(VecU8* res } VecU8_append_span(res, cstr(SPACE8 "}\n")); } - VecU8_append_span(res, cstr(SPACE4 "}\n\n")); + VecU8_append_span(res, cstr(SPACE "}\n\n")); } } @@ -306,7 +306,7 @@ void generate_func_clip_triang_on_triang_case_where_some_vertex_stuck(VecU8* res void generate_func_clip_triang_on_triang_case_boring(VecU8* res) { /* Star of David case */ for (int cb = 0; cb < 3; cb++) { - VecU8_append_span(res, cstr(SPACE4 "if (")); + VecU8_append_span(res, cstr(SPACE "if (")); for (int i = 0; i < 3; i++) { if (i) VecU8_append_span(res, cstr(" && ")); @@ -325,12 +325,12 @@ void generate_func_clip_triang_on_triang_case_boring(VecU8* res) { VecU8_append_span(res, cstr(SPACE8 "for (int i = 0; i < 4; i++)\n" SPACE12 "VecMarieTriangle_append(pile, (MarieTriangle){hex[i], hex[i + 1], hex[5]});\n")); } - VecU8_append_span(res, cstr(SPACE4 "}\n")); + VecU8_append_span(res, cstr(SPACE "}\n")); } /* Wedge cases */ for (int cf = 0; cf < 3; cf++) { for (int ti = 0; ti < 3; ti++){ - VecU8_append_span(res, cstr(SPACE4 "if (")); + VecU8_append_span(res, cstr(SPACE "if (")); append_on_the_left_stmt(res, 'T', ti, 'T', mod3_dec(ti), 'C', cf); VecU8_append_span(res, cstr(" && ")); append_on_the_right_stmt(res, 'T', mod3_inc(ti), 'T', mod3_dec(ti), 'C', (cf + 2) % 3); @@ -380,7 +380,7 @@ void generate_func_clip_triang_on_triang_case_boring(VecU8* res) { } VecU8_append_span(res, cstr(SPACE8 "}\n")); } - VecU8_append_span(res, cstr(SPACE4 "}\n")); + VecU8_append_span(res, cstr(SPACE "}\n")); } } @@ -390,7 +390,7 @@ NODISCARD VecU8 generate_func_clip_ccw_triang_with_ccw_triang_append_to_Vec() { VecU8 res = VecU8_from_cstr( "void marie_clip_ccw_triang_with_ccw_triang_append_to_Vec(MarieTriangle C, MarieTriangle T, VecMarieTriangle* pile) {\n"); for (int ord = 0; ord < 18; ord++) { - VecU8_append_vec(&res, VecU8_format(SPACE4 "float M%d = marie_surface(", ord)); + VecU8_append_vec(&res, VecU8_format(SPACE "float M%d = marie_surface(", ord)); for (int a = 0; a < 3; a++) { if (a) VecU8_append_span(&res, cstr(", ")); diff --git a/src/l1_5/codegen/rb_tree_set_map_template_inst.h b/src/l1_5/codegen/rb_tree_set_map_template_inst.h index ae46a72..e903ee9 100644 --- a/src/l1_5/codegen/rb_tree_set_map_template_inst.h +++ b/src/l1_5/codegen/rb_tree_set_map_template_inst.h @@ -3,26 +3,66 @@ #include "all_set_map_templ_util_inst.h" -// VecU8 codegen_rb_tree_NOT_EQUAL(set_instantiation_op op, SpanU8 A, SpanU8 B){ -// return op.t_integer ? VecU8_fmt("%s != %s", A, B) : VecU8_fmt("!%s_equal_%s(%s, %s)", op.T, op.T, A, B); -// } -// -// VecU8 codegen_rb_tree_LESS(set_instantiation_op op, SpanU8 A, SpanU8 B){ -// return op.t_integer ? VecU8_fmt("A < B", A, B) : VecU8_fmt("%s_less_%s(%s, %s)", op.T, op.T, A, B); -// } - /* When key is given by value into some method of Buff_RBTreeSet */ -NODISCARD VecU8 codegen_rb_tree_set_key_value_NOT_EQUAL_element(set_instantiation_op op, SpanU8 node_id_var_name){ +NODISCARD VecU8 codegen_rb_tree_set_key_value_NOT_EQUAL_element(set_instantiation_op op){ if (op.t_integer) - return VecU8_fmt("self->el.buf[%s - 1] != key", node_id_var_name); - return VecU8_fmt("!%s_equal_%s(&self->el.buf[%s - 1], &key)", op.T, op.T, node_id_var_name); + return VecU8_fmt("self->el.buf[cur - 1] != key"); + return VecU8_fmt("!%s_equal_%s(&self->el.buf[cur - 1], &key)", op.T, op.T); } /* When key is given by value into some method of Buff_RBTreeSet */ -NODISCARD VecU8 codegen_rb_tree_set_key_value_LESS_element(set_instantiation_op op, SpanU8 node_id_var_name){ +NODISCARD VecU8 codegen_rb_tree_set_key_value_LESS_element(set_instantiation_op op){ if (op.t_integer) - return VecU8_fmt("key < self->el.buf[%s - 1]", node_id_var_name); - return VecU8_fmt("%s_less_%s(&key, &self->el.buf[%s - 1])", op.T, op.T, node_id_var_name); + return VecU8_fmt("key < self->el.buf[cur - 1]"); + return VecU8_fmt("%s_less_%s(&key, &self->el.buf[cur - 1])", op.T, op.T); +} + +/* When key is given by a pointer into some method of Buff_RBTreeSet */ +NODISCARD VecU8 codegen_rb_tree_set_key_ref_EQUAL_element(set_instantiation_op op){ + if (op.t_integer) + return VecU8_fmt("self->el.buf[cur - 1] == key"); + return VecU8_fmt("%s_equal_%s(&self->el.buf[cur - 1], key)", op.T, op.T); +} + +/* When key is given by a pointer into some method of Buff_RBTreeSet */ +NODISCARD VecU8 codegen_rb_tree_set_key_ref_LESS_element(set_instantiation_op op){ + if (op.t_integer) + return VecU8_fmt("self->el.buf[cur - 1] < key"); + return VecU8_fmt("%s_less_%s(key, &self->el.buf[cur - 1])", op.T, op.T); +} + +/* When method returns constant pointer to found key (wrapped in Option) we will use this type + * Ofcourse, it can turn out that it is not generated. So be careful and generate it by yourself + */ +NODISCARD VecU8 codegen_rb_tree_set_option_returned_ref_t(set_instantiation_op op){ + /* Constant pointer to an integer is an integer */ + return op.t_integer ? VecU8_fmt("Option%s", op.T) : VecU8_fmt("OptionRef%s", op.T); +} + +/* Suppose some method returns pointer to key (ofc wrapped in option). And we found what to return + * we return it from self->el array */ +NODISCARD VecU8 codegen_rb_tree_set_some_ref_t(set_instantiation_op op, SpanU8 index_var_name){ + if (op.t_integer) + return VecU8_fmt("Some_%s(self->el[%s - 1])", op.T, index_var_name); + return VecU8_fmt("Some_Ref%s(&self->el[%s - 1])", op.T, index_var_name); +} + +/* Suppose some method returns pointer to key (ofc wrapped in option). But this time we found nothing */ +NODISCARD VecU8 codegen_rb_tree_set_none_ref_t(set_instantiation_op op, SpanU8 index_var_name){ + if (op.t_integer) + return VecU8_fmt("Some_%s()", op.T, index_var_name); + return VecU8_fmt("Some_Ref%s()", op.T, index_var_name); +} + +/* Suppose some method returns an owned key (by value, ofc wrapped in option). If we DID found something, + * we construct Option_Some */ +NODISCARD VecU8 codegen_rb_tree_some_t(set_instantiation_op op, SpanU8 val_giving_expr){ + return VecU8_fmt("Some_%s(%s)", op.T, val_giving_expr); +} + +/* Suppose some method returns an owned key (by value, ofc wrapped in option). But this time we found nothing */ +NODISCARD VecU8 codegen_rb_tree_none_t(set_instantiation_op op, SpanU8 val_giving_expr){ + return VecU8_fmt("None_%s(%s)", op.T, val_giving_expr); } /* src/l1_5/core/rb_tree_node.h is a dependency of all instantiations of rb_tree_set template @@ -31,68 +71,112 @@ NODISCARD VecU8 codegen_rb_tree_set_key_value_LESS_element(set_instantiation_op NODISCARD VecU8 generate_rb_tree_set_template_instantiation(set_instantiation_op op){ set_instantiation_op_fix(&op); VecU8 res = VecU8_new(); - VecU8 set = VecU8_fmt("BuffRBTree_Set%s", op.T); + VecU8 g_set = VecU8_fmt("BuffRBTree_Set%s", op.T); + SpanU8 set = VecU8_to_span(&g_set); VecU8_append_vec(&res, VecU8_fmt( "typedef struct {\n" - SPACE4 "VecRBTreeNode tree;\n" - SPACE4 "U64 root;\n" - SPACE4 "Vec%s" - "} %s\n\n", op.T, VecU8_to_span(&set))); + SPACE "VecRBTreeNode tree;\n" + SPACE "U64 root;\n" + SPACE "Vec%s" + "} %s\n\n", op.T, set)); VecU8_append_vec(&res, VecU8_fmt( "void %s_drop(%s self) {\n" - SPACE4 "VecRBTreeNode_drop(self->tree);\n" - SPACE4 "Vec%s_drop(self->el);\n" - "}\n\n", VecU8_to_span(&set), VecU8_to_span(&set), op.T)); + SPACE "VecRBTreeNode_drop(self->tree);\n" + SPACE "Vec%s_drop(self->el);\n" + "}\n\n", set, set, op.T)); /* Method insert does not try to replace the existing element with equal key, * it returns true if insertion was done, false it collision happened and key was not inserted */ VecU8_append_vec(&res, VecU8_fmt( - "bool %s_insert(%s* self, %s key) {\n" - SPACE4 "if (self->root == 0) {\n" - SPACE4 SPACE4 "assert(self->tree.len == 1);\n" - SPACE4 SPACE4 "VecRBTreeNode_append(&self->tree, (RBTreeNode){.color = RBTree_black});\n" - SPACE4 SPACE4 "Vec%s_append(&self->el, key);\n" - SPACE4 SPACE4 "self->root = 1;\n" - SPACE4 SPACE4 "return true;\n" - SPACE4 "}\n" - SPACE4 "U64 cur = self->root;\n" - SPACE4 "while (%v) {\n" - SPACE4 SPACE4 "if (%v) {\n" - SPACE4 SPACE4 SPACE4 "if (self->tree.buf[cur].left != 0) {\n" - SPACE4 SPACE4 SPACE4 SPACE4 "cur = self->tree.buf[cur].left\n" - SPACE4 SPACE4 SPACE4 "} else { \n" - /* We are inserting to the left of cur */ - SPACE4 SPACE4 SPACE4 SPACE4 "U64 n = self->tree.len;\n" - SPACE4 SPACE4 SPACE4 SPACE4 "VecRBTreeNode_append(&self->tree, (RBTreeeNode){.parent = cur, .color = RBTree_red});\n" - SPACE4 SPACE4 SPACE4 SPACE4 "self->tree.buf[cur].left = n;\n" - SPACE4 SPACE4 SPACE4 SPACE4 "RBTree_fix_after_insert(&self->tree, &self->root, n);\n" - SPACE4 SPACE4 SPACE4 SPACE4 "return true;\n" - SPACE4 SPACE4 SPACE4 "}\n" - SPACE4 SPACE4 "} else {\n" - SPACE4 SPACE4 SPACE4 "if (self->tree.buf[cur].right != 0) {\n" - SPACE4 SPACE4 SPACE4 SPACE4 "cur = self->tree.buf[cur].right\n" - SPACE4 SPACE4 SPACE4 "} else {\n" - /* We are inserting to the right of cur */ - SPACE4 SPACE4 SPACE4 SPACE4 "U64 n = self->tree.len;\n" - SPACE4 SPACE4 SPACE4 SPACE4 "VecRBTreeNode_append(&self->tree, (RBTreeeNode){.parent = cur, .color = RBTree_red});\n" - SPACE4 SPACE4 SPACE4 SPACE4 "self->tree.buf[cur].right = n;\n" - SPACE4 SPACE4 SPACE4 SPACE4 "RBTree_fix_after_insert(&self->tree, &self->root, n);\n" - SPACE4 SPACE4 SPACE4 SPACE4 "return true;\n" - SPACE4 SPACE4 SPACE4 "}\n" - SPACE4 SPACE4 "}\n" - SPACE4 "}\n" - SPACE4 "return false;\n" - "}\n\n", - VecU8_to_span(&set), VecU8_to_span(&set), op.T, - codegen_rb_tree_set_key_value_NOT_EQUAL_element(op, cstr("cur")), - codegen_rb_tree_set_key_value_LESS_element(op, cstr("cur")) - )); + "bool %s_insert(%s* self, %s key) {\n" + SPACE "if (self->root == 0) {\n" + SPACE SPACE "assert(self->tree.len == 1);\n" + SPACE SPACE "VecRBTreeNode_append(&self->tree, (RBTreeNode){.color = RBTree_black});\n" + SPACE SPACE "Vec%s_append(&self->el, key);\n" + SPACE SPACE "self->root = 1;\n" + SPACE SPACE "return true;\n" + SPACE "}\n" + SPACE "U64 cur = self->root;\n" + SPACE "while (%v) {\n" + SPACE SPACE "if (%v) {\n" + SPACE SPACE SPACE "if (self->tree.buf[cur].left != 0) {\n" + SPACE SPACE SPACE SPACE "cur = self->tree.buf[cur].left\n" + SPACE SPACE SPACE "} else { \n" + /* We are inserting to the left of cur */ + SPACE SPACE SPACE SPACE "U64 n = self->tree.len;\n" + SPACE SPACE SPACE SPACE "VecRBTreeNode_append(&self->tree, (RBTreeeNode){.parent = cur, .color = RBTree_red});\n" + SPACE SPACE SPACE SPACE "self->tree.buf[cur].left = n;\n" + SPACE SPACE SPACE SPACE "RBTree_fix_after_insert(self->tree.buf, &self->root, n);\n" + SPACE SPACE SPACE SPACE "return true;\n" + SPACE SPACE SPACE "}\n" + SPACE SPACE "} else {\n" + SPACE SPACE SPACE "if (self->tree.buf[cur].right != 0) {\n" + SPACE SPACE SPACE SPACE "cur = self->tree.buf[cur].right\n" + SPACE SPACE SPACE "} else {\n" + /* We are inserting to the right of cur */ + SPACE SPACE SPACE SPACE "U64 n = self->tree.len;\n" + SPACE SPACE SPACE SPACE "VecRBTreeNode_append(&self->tree, (RBTreeeNode){.parent = cur, .color = RBTree_red});\n" + SPACE SPACE SPACE SPACE "self->tree.buf[cur].right = n;\n" + SPACE SPACE SPACE SPACE "RBTree_fix_after_insert(self->tree.buf, &self->root, n);\n" + SPACE SPACE SPACE SPACE "return true;\n" + SPACE SPACE SPACE "}\n" + SPACE SPACE "}\n" + SPACE "}\n" + SPACE "return false;\n" + "}\n\n", + set, set, op.T, + codegen_rb_tree_set_key_value_NOT_EQUAL_element(op), + codegen_rb_tree_set_key_value_LESS_element(op) + )); - // VecU8_append_vec(&res, VecU8_fmt( - // "")); + VecU8_append_vec(&res, VecU8_fmt( + "bool %s_erase(%s* self, const %s* key) {\n" + SPACE "U64 cur = self->root;\n" + SPACE "while (true){\n" + SPACE SPACE "if (cur == 0)\n" + SPACE SPACE SPACE "return false;\n" + SPACE SPACE "if (%v)\n" + SPACE SPACE SPACE "break;\n" + SPACE SPACE "if (%v)\n" + SPACE SPACE SPACE "cur = self->tree.buf[cur].left;\n" + SPACE SPACE "else\n" + SPACE SPACE SPACE "cur = self->tree.buf[cur].right;\n" + SPACE "}\n" + SPACE "U64 z = cur;\n" + SPACE "U64 y = (self->tree.buf[z].left == 0 || ) ? z : RBTree_minimum_in_subtree(&self->tree);\n" + SPACE "U64 x = self->tree.buf[y].left != 0 ? self->tree.buf[y].left : self->tree.buf[y].z;\n" + SPACE "U64 py = self->tree[y].parent;\n" // May be null + SPACE "self->tree.buf[x] = self->tree.buf[y].parent;\n" + SPACE "if (py == 0)\n" + SPACE SPACE "self->root = x;\n" + SPACE "else if (self->tree.buf[py].left == y)\n" + SPACE SPACE "self->tree.buf[py].left = x;\n" + SPACE "else" + SPACE SPACE "self->tree.buf[py].right = x;\n" + SPACE "RBTreeColor y_org_clr = self->tree.buf[y].color;\n" + SPACE "if (z != y)\n" + SPACE SPACE "RBTree_steal_neighbours(z, y);\n" + SPACE "U64 L = self->el.len;\n" /* self->tree.len - 1 */ + SPACE "RBTree_steal_neighbours(L, z);\n" + SPACE "self->tree.len--;\n" + SPACE "self->el.buf[z-1] = self->el.buf[L-1];\n" + SPACE "self->el.len--;\n" + SPACE "if (y_org_clr)" + SPACE SPACE "RBTree_fix_after_delete(self->tree.buf, &self->root, x);\n" + SPACE "return true;\n" + "}\n\n", + set, set, op.T, + codegen_rb_tree_set_key_ref_EQUAL_element(op), + codegen_rb_tree_set_key_ref_LESS_element(op) + )); - VecU8_drop(set); + // todo: continue from here. Implement method _pop_and_substitute() + + // todo: implement contains method + + // todo: implment _find and _at methods. All the other methods are secondary in importance + VecU8_drop(g_set); return res; } diff --git a/src/l1_5/codegen/trait_wrap_boil.h b/src/l1_5/codegen/trait_wrap_boil.h index a20a63b..0c9dded 100644 --- a/src/l1_5/codegen/trait_wrap_boil.h +++ b/src/l1_5/codegen/trait_wrap_boil.h @@ -28,7 +28,7 @@ NODISCARD VecU8 generate_trait_table_structure(NamedTraitDefRecordRef trait){ // todo: add iteration macro for (size_t i = 0; i < trait.methods.len; i++) { NamedMethodSignatureRecordRef method = *SpanNamedMethodSignatureRecordRef_at(trait.methods, i); - VecU8_append_vec(&res, VecU8_fmt(SPACE4 "%s (*%s)(", method.return_type, method.name)); + VecU8_append_vec(&res, VecU8_fmt(SPACE "%s (*%s)(", method.return_type, method.name)); for (size_t p = 0; p < method.params.len; p++) { NamedVariableRecordRef param = *SpanNamedVariableRecordRef_at(method.params, p); if (p) diff --git a/src/l1_5/core/rb_tree_node.h b/src/l1_5/core/rb_tree_node.h index 2872b0b..d7c0450 100644 --- a/src/l1_5/core/rb_tree_node.h +++ b/src/l1_5/core/rb_tree_node.h @@ -104,4 +104,107 @@ void RBTree_fix_after_insert(RBTreeNode* tree, U64* root, U64 me){ tree[me].color = RBTree_black; } +/* fr index will be forgotten. to fields will be overwritten (all fields replaced by fr's values) + * If you need the old values of `to` position, you better save them on stack */ +void RBTree_steal_neighbours(RBTreeNode* tree, U64* root, U64 fr, U64 to){ + if (tree[fr].parent == 0) + *root = to; + else if (tree[tree[fr].parent].left == fr) + tree[tree[fr].parent].left = to; + else + tree[tree[fr].parent].right = to; + tree[tree[fr].left].parent = to; + tree[tree[fr].right].parent = to; + tree[to] = tree[fr]; +} + +/* helper function (used in _delete, _find_min methods). It is assumed that s is not null. + * Guaranteed to return no-null + */ +U64 RBTree_minimum_in_subtree(RBTreeNode* tree, U64 s){ + assert(s != 0); + while (tree[s].left != 0) + s = tree[s].left; + return s; +} + +void RBTree_fix_after_delete(RBTreeNode* tree, U64* root, U64 me){ + assert(tree[*root].parent == 0); + while (me != *root && tree[me].color == RBTree_black) { + U64 mom = tree[me].parent; + if (me == tree[mom].left) { /* We are on the left */ + U64 sister = tree[mom].right; + if (tree[sister].color == RBTree_red) { /* Case 1 */ + tree[mom].color = RBTree_red; + tree[sister].color = RBTree_black; + RBTree_left_rotate(tree, root, mom); + /* Reassignation required */ + sister = tree[mom].right; + } + /* Cases 2,3,4 (every instance of red-black tree has an itchy substring in source code containing 2,3,4) */ + assert(sister != 0); + U64 nephew_firstborn = tree[sister].left; + U64 nephew_benjamin = tree[sister].right; + if (tree[nephew_firstborn].color == RBTree_black && tree[nephew_benjamin].color == RBTree_black) { + /* Case 2 */ + tree[sister].color = RBTree_red; + me = mom; + continue; + } + /* Cases 3,4 */ + if (tree[nephew_benjamin].right) { + /* Case 3 */ + tree[nephew_firstborn].color = RBTree_black; + tree[sister].color = RBTree_red; + RBTree_right_rotate(tree, root, sister); + /* Reassignation required */ + nephew_benjamin = sister; + sister = nephew_firstborn; + nephew_firstborn = tree[sister].left; + } + /* Case 4 */ + tree[sister].color = tree[mom].color; + tree[mom].color = RBTree_black; + tree[nephew_benjamin].color = RBTree_red; + me = *root; + } else if (me == tree[mom].right) { /* We are on the right */ + U64 sister = tree[mom].left; + if (tree[sister].color == RBTree_red) { /* Case 1 */ + tree[mom].color = RBTree_red; + tree[sister].color = RBTree_black; + RBTree_right_rotate(tree, root, mom); + /* Reassignation required */ + sister = tree[mom].left; + } + /* Cases 2,3,4 (every instance of red-black tree has an itchy substring in source code containing 2,3,4) */ + assert(sister != 0); + U64 nephew_firstborn = tree[sister].left; + U64 nephew_benjamin = tree[sister].right; + if (tree[nephew_firstborn].color == RBTree_black && tree[nephew_benjamin].color == RBTree_black) { + /* Case 2 */ + tree[sister].color = RBTree_red; + me = mom; + continue; + } + /* Cases 3,4 */ + if (tree[nephew_firstborn].left) { + /* Case 3 */ + tree[nephew_benjamin].color = RBTree_black; + tree[sister].color = RBTree_red; + RBTree_left_rotate(tree, root, sister); + /* Reassignation required */ + nephew_firstborn = sister; + sister = nephew_benjamin; + nephew_benjamin = tree[sister].right; + } + /* Case 4 */ + tree[sister].color = tree[mom].color; + tree[mom].color = RBTree_black; + tree[nephew_firstborn].color = RBTree_red; + me = *root; + } + } + tree[me].color = RBTree_black; +} + #endif diff --git a/src/l2/tests/t0.c b/src/l2/tests/t0.c new file mode 100644 index 0000000..a74e630 --- /dev/null +++ b/src/l2/tests/t0.c @@ -0,0 +1,3 @@ +int main(){ + +} \ No newline at end of file