Files
zig/stage0/ast.c
Motiejus Jakštys ccc454b3f2 sema: fix memory leaks, shift UB, cppcheck warnings, format
- Add owns_source flag to Ast; free source in astDeinit when owned.
  Fixes memory leaks from loadImportZirFromPath allocations.
- Guard comptime shift folding against exponents >= 64 (UB).
- Fix cppcheck warnings: redundant conditional assign, always-true
  condition, unused variable, redundant assignment.
- Use volatile for need_debug_scope to avoid cppcheck false positive.
- Use PID-based temp file paths to avoid races in parallel test runs.
- Reformat verbose_air.c (pre-existing clang-format violations).

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-21 20:00:39 +00:00

118 lines
3.1 KiB
C

#include "common.h"
#include <stdbool.h>
#include <stdlib.h>
#include <string.h>
#include "ast.h"
#include "parser.h"
#define N 1024
static void astTokenListEnsureCapacity(
AstTokenList* list, uint32_t additional) {
const uint32_t new_len = list->len + additional;
if (new_len <= list->cap) {
return;
}
const uint32_t new_cap = new_len > list->cap * 2 ? new_len : list->cap * 2;
list->tags = realloc(list->tags, new_cap * sizeof(TokenizerTag));
list->starts = realloc(list->starts, new_cap * sizeof(AstIndex));
if (!list->tags || !list->starts)
exit(1);
list->cap = new_cap;
}
Ast astParse(const char* source, const uint32_t len) {
uint32_t estimated_token_count = len / 8;
AstTokenList tokens = {
.len = 0,
.cap = estimated_token_count,
.tags = ARR_INIT(TokenizerTag, estimated_token_count),
.starts = ARR_INIT(AstIndex, estimated_token_count),
};
Tokenizer tok = tokenizerInit(source, len);
while (true) {
astTokenListEnsureCapacity(&tokens, 1);
TokenizerToken token = tokenizerNext(&tok);
tokens.tags[tokens.len] = token.tag;
tokens.starts[tokens.len++] = token.loc.start;
if (token.tag == TOKEN_EOF)
break;
}
uint32_t estimated_node_count = (tokens.len + 2) / 2;
Parser p = {
.source = source,
.source_len = len,
.token_tags = tokens.tags,
.token_starts = tokens.starts,
.tokens_len = tokens.len,
.tok_i = 0,
.nodes = {
.len = 0,
.cap = estimated_node_count,
.tags = ARR_INIT(AstNodeTag, estimated_node_count),
.main_tokens = ARR_INIT(AstTokenIndex, estimated_node_count),
.datas = ARR_INIT(AstData, estimated_node_count),
},
.extra_data = SLICE_INIT(AstNodeIndex, N),
.scratch = SLICE_INIT(AstNodeIndex, N),
.has_warn = false,
.has_compile_errors = false,
.err_buf = {0},
};
parseRoot(&p);
p.scratch.cap = p.scratch.len = 0;
free(p.scratch.arr);
char* err_msg = NULL;
if (p.has_compile_errors && p.err_buf[0] != '\0') {
const size_t len2 = strlen(p.err_buf);
err_msg = malloc(len2 + 1);
if (!err_msg)
exit(1);
memcpy(err_msg, p.err_buf, len2 + 1);
}
return (Ast) {
.source = source,
.source_len = len,
.tokens = tokens,
.nodes = p.nodes,
.extra_data = {
.len = p.extra_data.len,
.cap = p.extra_data.cap,
.arr = p.extra_data.arr,
},
.has_error = p.has_compile_errors || p.has_warn,
.err_msg = err_msg,
};
}
void astDeinit(Ast* tree) {
if (tree->owns_source)
free((char*)tree->source);
free(tree->err_msg);
tree->tokens.cap = tree->tokens.len = 0;
free(tree->tokens.tags);
free(tree->tokens.starts);
tree->nodes.cap = 0;
tree->nodes.len = 0;
free(tree->nodes.tags);
free(tree->nodes.main_tokens);
free(tree->nodes.datas);
tree->extra_data.cap = 0;
tree->extra_data.len = 0;
free(tree->extra_data.arr);
}