diff --git a/ast.c b/ast.c index 36a4d1c..719088a 100644 --- a/ast.c +++ b/ast.c @@ -9,6 +9,32 @@ #define N 1024 +void astNodeListEnsureCapacity(AstNodeList* list, uint32_t additional) +{ + const uint32_t new_len = list->len + additional; + if (new_len <= list->cap) { + return; + } + + const uint32_t new_cap = new_len > list->cap * 2 ? new_len : list->cap * 2; + list->tags = realloc(list->tags, new_cap * sizeof(AstNodeTag)); + list->main_tokens = realloc(list->main_tokens, new_cap * sizeof(AstTokenIndex)); + list->datas = realloc(list->datas, new_cap * sizeof(AstData)); + if (!list->tags || !list->main_tokens || !list->datas) + exit(1); + list->cap = new_cap; +} + +void astNodeListAppend(AstNodeList* list, AstNodeTag tag, + AstTokenIndex main_token, AstData data) +{ + astNodeListEnsureCapacity(list, 1); + list->tags[list->len] = tag; + list->main_tokens[list->len] = main_token; + list->datas[list->len] = data; + list->len++; +} + Ast astParse(const char* source, const uint32_t len) { uint32_t estimated_token_count = len / 8; diff --git a/ast.h b/ast.h index b62caa9..7a98e2f 100644 --- a/ast.h +++ b/ast.h @@ -481,7 +481,7 @@ typedef enum { AST_NODE_TAG_ERROR_UNION, } AstNodeTag; -typedef int32_t AstTokenIndex; +typedef uint32_t AstTokenIndex; typedef uint32_t AstNodeIndex; typedef uint32_t AstIndex; diff --git a/common.h b/common.h index e234b0d..ac7690f 100644 --- a/common.h +++ b/common.h @@ -21,10 +21,12 @@ (slice)->cap = cap; \ }) -#define SLICE_ENSURE_CAPACITY(slice, Type, additional) ({ \ - if ((slice)->len + (additional) > (slice)->cap) { \ - SLICE_RESIZE(slice, Type, ((slice)->cap * 2 > (slice)->len + (additional)) ? (slice)->cap * 2 : (slice)->len + (additional)); \ - } \ +#define SLICE_ENSURE_CAPACITY(slice, Type, additional) ({ \ + if ((slice)->len + (additional) > (slice)->cap) { \ + SLICE_RESIZE(slice, \ + Type, \ + ((slice)->cap * 2 > (slice)->len + (additional)) ? (slice)->cap * 2 : (slice)->len + (additional)); \ + } \ }) #endif diff --git a/parser.c b/parser.c index 5672ded..5649d16 100644 --- a/parser.c +++ b/parser.c @@ -14,21 +14,25 @@ typedef struct { } payload; } FieldState; -int parseRoot(Parser* p) +void parseRoot(Parser* p) { p->nodes.tags[p->nodes.len++] = AST_NODE_TAG_ROOT; p->nodes.main_tokens[p->nodes.len] = 0; // members root_members = parseContainerMembers(p); - - return 0; } static AstTokenIndex nextToken(Parser* p) { return p->tok_i++; } -static AstTokenIndex eatToken(Parser* p, TokenizerTag tag) +static AstTokenIndex eatToken(Parser* p, TokenizerTag tag, bool* ok) { - return (p->token_tags[p->tok_i] == tag) ? nextToken(p) : -1; + if (p->token_tags[p->tok_i] == tag) { + *ok = true; + return nextToken(p); + } else { + *ok = false; + return (AstTokenIndex) {}; + } } static Members parseContainerMembers(Parser* p) @@ -36,7 +40,8 @@ static Members parseContainerMembers(Parser* p) const uint32_t scratch_top = p->scratch.len; Members res = (Members) {}; // ast_token_index last_field; - while (eatToken(p, TOKENIZER_TAG_CONTAINER_DOC_COMMENT) != -1) + bool ok; + while (eatToken(p, TOKENIZER_TAG_CONTAINER_DOC_COMMENT, &ok) && ok) ; // bool trailing = false; @@ -166,7 +171,7 @@ static Members parseContainerMembers(Parser* p) case TOKENIZER_TAG_KEYWORD_VAR: case TOKENIZER_TAG_KEYWORD_VOLATILE: case TOKENIZER_TAG_KEYWORD_WHILE:; - const char* str = tokenizerTagString[p->token_tags[p->tok_i]]; + const char* str = tokenizerGetTagString(p->token_tags[p->tok_i]); fprintf(stderr, "keyword %s not implemented\n", str); exit(1); case TOKENIZER_TAG_KEYWORD_PUB: diff --git a/parser.h b/parser.h index 18cae29..3929062 100644 --- a/parser.h +++ b/parser.h @@ -37,6 +37,6 @@ typedef struct Parser { Parser* parserInit(const char* source, uint32_t len); void parserDeinit(Parser* parser); -int parseRoot(Parser* parser); +void parseRoot(Parser* parser); #endif diff --git a/tokenizer.c b/tokenizer.c index 0f9d7f3..74160bc 100644 --- a/tokenizer.c +++ b/tokenizer.c @@ -10,6 +10,15 @@ typedef struct { TokenizerTag tag; } KeywordMap; +const char* tokenizerGetTagString(TokenizerTag tag) +{ + switch (tag) { + TOKENIZER_FOREACH_TAG_ENUM(TOKENIZER_GENERATE_CASE) + default: + return "UNKNOWN"; + } +} + const KeywordMap keywords[] = { { "addrspace", TOKENIZER_TAG_KEYWORD_ADDRSPACE }, { "align", TOKENIZER_TAG_KEYWORD_ALIGN }, diff --git a/tokenizer.h b/tokenizer.h index 73604b5..dea9665 100644 --- a/tokenizer.h +++ b/tokenizer.h @@ -4,7 +4,7 @@ #include #include -#define FOREACH_TOKENIZER_TAG_ENUM(TAG) \ +#define TOKENIZER_FOREACH_TAG_ENUM(TAG) \ TAG(TOKENIZER_TAG_INVALID) \ TAG(TOKENIZER_TAG_INVALID_PERIODASTERISKS) \ TAG(TOKENIZER_TAG_IDENTIFIER) \ @@ -128,16 +128,15 @@ TAG(TOKENIZER_TAG_KEYWORD_VOLATILE) \ TAG(TOKENIZER_TAG_KEYWORD_WHILE) -#define GENERATE_ENUM(ENUM) ENUM, -#define GENERATE_STRING(STRING) #STRING, +#define TOKENIZER_GENERATE_ENUM(ENUM) ENUM, +#define TOKENIZER_GENERATE_CASE(ENUM) case ENUM: return #ENUM; +// First define the enum typedef enum { - FOREACH_TOKENIZER_TAG_ENUM(GENERATE_ENUM) + TOKENIZER_FOREACH_TAG_ENUM(TOKENIZER_GENERATE_ENUM) } TokenizerTag; -static const char* tokenizerTagString[] = { - FOREACH_TOKENIZER_TAG_ENUM(GENERATE_STRING) -}; +const char* tokenizerGetTagString(TokenizerTag tag); typedef enum { TOKENIZER_STATE_START,