No types, no fucking types. Just bugs

This commit is contained in:
2025-01-07 22:22:05 +01:00
parent 1f134595de
commit 2a56ea9be2
6 changed files with 68 additions and 49 deletions

52
ast.c
View File

@@ -9,10 +9,23 @@
#define N 1024
static void astTokenListEnsureCapacity(AstTokenList* list, uint32_t additional) {
const uint32_t new_len = list->len + additional;
if (new_len <= list->cap) {
return;
}
const uint32_t new_cap = new_len > list->cap * 2 ? new_len : list->cap * 2;
list->tags = realloc(list->tags, new_cap * sizeof(TokenizerTag));
list->starts = realloc(list->starts, new_cap * sizeof(AstIndex));
if (!list->tags || !list->starts)
exit(1);
list->cap = new_cap;
}
Ast astParse(const char* source, const uint32_t len) {
uint32_t estimated_token_count = len / 8;
// Initialize token list
AstTokenList tokens = {
.len = 0,
.cap = estimated_token_count,
@@ -20,31 +33,18 @@ Ast astParse(const char* source, const uint32_t len) {
.starts = ARR_INIT(AstIndex, estimated_token_count)
};
// Tokenize
Tokenizer tok = tokenizerInit(source, len);
while (true) {
if (tokens.len >= tokens.cap) {
fprintf(stderr, "too many tokens, bump estimated_token_count\n");
exit(1);
}
astTokenListEnsureCapacity(&tokens, 1);
TokenizerToken token = tokenizerNext(&tok);
tokens.tags[++tokens.len] = token.tag;
tokens.starts[tokens.len] = token.loc.start;
tokens.tags[tokens.len] = token.tag;
tokens.starts[tokens.len++] = token.loc.start;
if (token.tag == TOKEN_EOF)
break;
}
// Initialize node list
uint32_t estimated_node_count = (tokens.len + 2) / 2;
AstNodeList nodes = {
.len = 0,
.cap = estimated_node_count,
.tags = ARR_INIT(AstNodeTag, estimated_node_count),
.main_tokens = ARR_INIT(AstTokenIndex, estimated_node_count),
.datas = ARR_INIT(AstData, estimated_node_count)
};
// Initialize parser
Parser p = {
.source = source,
.source_len = len,
@@ -52,21 +52,19 @@ Ast astParse(const char* source, const uint32_t len) {
.token_starts = tokens.starts,
.tokens_len = tokens.len,
.tok_i = 0,
.nodes = nodes,
.nodes = {
.len = 0,
.cap = estimated_node_count,
.tags = ARR_INIT(AstNodeTag, estimated_node_count),
.main_tokens = ARR_INIT(AstTokenIndex, estimated_node_count),
.datas = ARR_INIT(AstData, estimated_node_count),
},
.extra_data = SLICE_INIT(AstNodeIndex, N),
.scratch = SLICE_INIT(AstNodeIndex, N)
.scratch = SLICE_INIT(AstNodeIndex, N),
};
parseRoot(&p);
p.nodes.cap = p.nodes.len = 0;
free(p.nodes.tags);
free(p.nodes.main_tokens);
free(p.nodes.datas);
p.extra_data.cap = p.extra_data.len = 0;
free(p.extra_data.arr);
p.scratch.cap = p.scratch.len = 0;
free(p.scratch.arr);