Add 'stage0/' from commit 'b3d106ec971300a9c745f4681fab3df7518c4346'
git-subtree-dir: stage0 git-subtree-mainline:3db960767dgit-subtree-split:b3d106ec97
This commit is contained in:
122
stage0/ast.c
Normal file
122
stage0/ast.c
Normal file
@@ -0,0 +1,122 @@
|
||||
#include "common.h"
|
||||
|
||||
#include <setjmp.h>
|
||||
#include <stdbool.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
#include "ast.h"
|
||||
#include "parser.h"
|
||||
|
||||
#define N 1024
|
||||
|
||||
static void astTokenListEnsureCapacity(
|
||||
AstTokenList* list, uint32_t additional) {
|
||||
const uint32_t new_len = list->len + additional;
|
||||
if (new_len <= list->cap) {
|
||||
return;
|
||||
}
|
||||
|
||||
const uint32_t new_cap = new_len > list->cap * 2 ? new_len : list->cap * 2;
|
||||
list->tags = realloc(list->tags, new_cap * sizeof(TokenizerTag));
|
||||
list->starts = realloc(list->starts, new_cap * sizeof(AstIndex));
|
||||
if (!list->tags || !list->starts)
|
||||
exit(1);
|
||||
list->cap = new_cap;
|
||||
}
|
||||
|
||||
Ast astParse(const char* source, const uint32_t len) {
|
||||
uint32_t estimated_token_count = len / 8;
|
||||
|
||||
AstTokenList tokens = {
|
||||
.len = 0,
|
||||
.cap = estimated_token_count,
|
||||
.tags = ARR_INIT(TokenizerTag, estimated_token_count),
|
||||
.starts = ARR_INIT(AstIndex, estimated_token_count),
|
||||
};
|
||||
|
||||
Tokenizer tok = tokenizerInit(source, len);
|
||||
while (true) {
|
||||
astTokenListEnsureCapacity(&tokens, 1);
|
||||
TokenizerToken token = tokenizerNext(&tok);
|
||||
tokens.tags[tokens.len] = token.tag;
|
||||
tokens.starts[tokens.len++] = token.loc.start;
|
||||
if (token.tag == TOKEN_EOF)
|
||||
break;
|
||||
}
|
||||
|
||||
uint32_t estimated_node_count = (tokens.len + 2) / 2;
|
||||
|
||||
char err_buf[PARSE_ERR_BUF_SIZE];
|
||||
err_buf[0] = '\0';
|
||||
|
||||
Parser p = {
|
||||
.source = source,
|
||||
.source_len = len,
|
||||
.token_tags = tokens.tags,
|
||||
.token_starts = tokens.starts,
|
||||
.tokens_len = tokens.len,
|
||||
.tok_i = 0,
|
||||
.nodes = {
|
||||
.len = 0,
|
||||
.cap = estimated_node_count,
|
||||
.tags = ARR_INIT(AstNodeTag, estimated_node_count),
|
||||
.main_tokens = ARR_INIT(AstTokenIndex, estimated_node_count),
|
||||
.datas = ARR_INIT(AstData, estimated_node_count),
|
||||
},
|
||||
.extra_data = SLICE_INIT(AstNodeIndex, N),
|
||||
.scratch = SLICE_INIT(AstNodeIndex, N),
|
||||
.err_buf = err_buf,
|
||||
};
|
||||
|
||||
bool has_error = false;
|
||||
if (setjmp(p.error_jmp) != 0) {
|
||||
has_error = true;
|
||||
}
|
||||
if (!has_error)
|
||||
parseRoot(&p);
|
||||
|
||||
p.scratch.cap = p.scratch.len = 0;
|
||||
free(p.scratch.arr);
|
||||
|
||||
char* err_msg = NULL;
|
||||
if (has_error && err_buf[0] != '\0') {
|
||||
const size_t len2 = strlen(err_buf);
|
||||
err_msg = malloc(len2 + 1);
|
||||
if (!err_msg)
|
||||
exit(1);
|
||||
memcpy(err_msg, err_buf, len2 + 1);
|
||||
}
|
||||
|
||||
return (Ast) {
|
||||
.source = source,
|
||||
.source_len = len,
|
||||
.tokens = tokens,
|
||||
.nodes = p.nodes,
|
||||
.extra_data = {
|
||||
.len = p.extra_data.len,
|
||||
.cap = p.extra_data.cap,
|
||||
.arr = p.extra_data.arr,
|
||||
},
|
||||
.has_error = has_error,
|
||||
.err_msg = err_msg,
|
||||
};
|
||||
}
|
||||
|
||||
void astDeinit(Ast* tree) {
|
||||
free(tree->err_msg);
|
||||
|
||||
tree->tokens.cap = tree->tokens.len = 0;
|
||||
free(tree->tokens.tags);
|
||||
free(tree->tokens.starts);
|
||||
|
||||
tree->nodes.cap = 0;
|
||||
tree->nodes.len = 0;
|
||||
free(tree->nodes.tags);
|
||||
free(tree->nodes.main_tokens);
|
||||
free(tree->nodes.datas);
|
||||
|
||||
tree->extra_data.cap = 0;
|
||||
tree->extra_data.len = 0;
|
||||
free(tree->extra_data.arr);
|
||||
}
|
||||
Reference in New Issue
Block a user