more macros
This commit is contained in:
26
ast.c
26
ast.c
@@ -9,6 +9,32 @@
|
||||
|
||||
#define N 1024
|
||||
|
||||
void astNodeListEnsureCapacity(AstNodeList* list, uint32_t additional)
|
||||
{
|
||||
const uint32_t new_len = list->len + additional;
|
||||
if (new_len <= list->cap) {
|
||||
return;
|
||||
}
|
||||
|
||||
const uint32_t new_cap = new_len > list->cap * 2 ? new_len : list->cap * 2;
|
||||
list->tags = realloc(list->tags, new_cap * sizeof(AstNodeTag));
|
||||
list->main_tokens = realloc(list->main_tokens, new_cap * sizeof(AstTokenIndex));
|
||||
list->datas = realloc(list->datas, new_cap * sizeof(AstData));
|
||||
if (!list->tags || !list->main_tokens || !list->datas)
|
||||
exit(1);
|
||||
list->cap = new_cap;
|
||||
}
|
||||
|
||||
void astNodeListAppend(AstNodeList* list, AstNodeTag tag,
|
||||
AstTokenIndex main_token, AstData data)
|
||||
{
|
||||
astNodeListEnsureCapacity(list, 1);
|
||||
list->tags[list->len] = tag;
|
||||
list->main_tokens[list->len] = main_token;
|
||||
list->datas[list->len] = data;
|
||||
list->len++;
|
||||
}
|
||||
|
||||
Ast astParse(const char* source, const uint32_t len)
|
||||
{
|
||||
uint32_t estimated_token_count = len / 8;
|
||||
|
||||
2
ast.h
2
ast.h
@@ -481,7 +481,7 @@ typedef enum {
|
||||
AST_NODE_TAG_ERROR_UNION,
|
||||
} AstNodeTag;
|
||||
|
||||
typedef int32_t AstTokenIndex;
|
||||
typedef uint32_t AstTokenIndex;
|
||||
typedef uint32_t AstNodeIndex;
|
||||
typedef uint32_t AstIndex;
|
||||
|
||||
|
||||
4
common.h
4
common.h
@@ -23,7 +23,9 @@
|
||||
|
||||
#define SLICE_ENSURE_CAPACITY(slice, Type, additional) ({ \
|
||||
if ((slice)->len + (additional) > (slice)->cap) { \
|
||||
SLICE_RESIZE(slice, Type, ((slice)->cap * 2 > (slice)->len + (additional)) ? (slice)->cap * 2 : (slice)->len + (additional)); \
|
||||
SLICE_RESIZE(slice, \
|
||||
Type, \
|
||||
((slice)->cap * 2 > (slice)->len + (additional)) ? (slice)->cap * 2 : (slice)->len + (additional)); \
|
||||
} \
|
||||
})
|
||||
|
||||
|
||||
19
parser.c
19
parser.c
@@ -14,21 +14,25 @@ typedef struct {
|
||||
} payload;
|
||||
} FieldState;
|
||||
|
||||
int parseRoot(Parser* p)
|
||||
void parseRoot(Parser* p)
|
||||
{
|
||||
p->nodes.tags[p->nodes.len++] = AST_NODE_TAG_ROOT;
|
||||
p->nodes.main_tokens[p->nodes.len] = 0;
|
||||
|
||||
// members root_members = parseContainerMembers(p);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static AstTokenIndex nextToken(Parser* p) { return p->tok_i++; }
|
||||
|
||||
static AstTokenIndex eatToken(Parser* p, TokenizerTag tag)
|
||||
static AstTokenIndex eatToken(Parser* p, TokenizerTag tag, bool* ok)
|
||||
{
|
||||
return (p->token_tags[p->tok_i] == tag) ? nextToken(p) : -1;
|
||||
if (p->token_tags[p->tok_i] == tag) {
|
||||
*ok = true;
|
||||
return nextToken(p);
|
||||
} else {
|
||||
*ok = false;
|
||||
return (AstTokenIndex) {};
|
||||
}
|
||||
}
|
||||
|
||||
static Members parseContainerMembers(Parser* p)
|
||||
@@ -36,7 +40,8 @@ static Members parseContainerMembers(Parser* p)
|
||||
const uint32_t scratch_top = p->scratch.len;
|
||||
Members res = (Members) {};
|
||||
// ast_token_index last_field;
|
||||
while (eatToken(p, TOKENIZER_TAG_CONTAINER_DOC_COMMENT) != -1)
|
||||
bool ok;
|
||||
while (eatToken(p, TOKENIZER_TAG_CONTAINER_DOC_COMMENT, &ok) && ok)
|
||||
;
|
||||
|
||||
// bool trailing = false;
|
||||
@@ -166,7 +171,7 @@ static Members parseContainerMembers(Parser* p)
|
||||
case TOKENIZER_TAG_KEYWORD_VAR:
|
||||
case TOKENIZER_TAG_KEYWORD_VOLATILE:
|
||||
case TOKENIZER_TAG_KEYWORD_WHILE:;
|
||||
const char* str = tokenizerTagString[p->token_tags[p->tok_i]];
|
||||
const char* str = tokenizerGetTagString(p->token_tags[p->tok_i]);
|
||||
fprintf(stderr, "keyword %s not implemented\n", str);
|
||||
exit(1);
|
||||
case TOKENIZER_TAG_KEYWORD_PUB:
|
||||
|
||||
2
parser.h
2
parser.h
@@ -37,6 +37,6 @@ typedef struct Parser {
|
||||
|
||||
Parser* parserInit(const char* source, uint32_t len);
|
||||
void parserDeinit(Parser* parser);
|
||||
int parseRoot(Parser* parser);
|
||||
void parseRoot(Parser* parser);
|
||||
|
||||
#endif
|
||||
|
||||
@@ -10,6 +10,15 @@ typedef struct {
|
||||
TokenizerTag tag;
|
||||
} KeywordMap;
|
||||
|
||||
const char* tokenizerGetTagString(TokenizerTag tag)
|
||||
{
|
||||
switch (tag) {
|
||||
TOKENIZER_FOREACH_TAG_ENUM(TOKENIZER_GENERATE_CASE)
|
||||
default:
|
||||
return "UNKNOWN";
|
||||
}
|
||||
}
|
||||
|
||||
const KeywordMap keywords[] = {
|
||||
{ "addrspace", TOKENIZER_TAG_KEYWORD_ADDRSPACE },
|
||||
{ "align", TOKENIZER_TAG_KEYWORD_ALIGN },
|
||||
|
||||
13
tokenizer.h
13
tokenizer.h
@@ -4,7 +4,7 @@
|
||||
#include <stdbool.h>
|
||||
#include <stdint.h>
|
||||
|
||||
#define FOREACH_TOKENIZER_TAG_ENUM(TAG) \
|
||||
#define TOKENIZER_FOREACH_TAG_ENUM(TAG) \
|
||||
TAG(TOKENIZER_TAG_INVALID) \
|
||||
TAG(TOKENIZER_TAG_INVALID_PERIODASTERISKS) \
|
||||
TAG(TOKENIZER_TAG_IDENTIFIER) \
|
||||
@@ -128,16 +128,15 @@
|
||||
TAG(TOKENIZER_TAG_KEYWORD_VOLATILE) \
|
||||
TAG(TOKENIZER_TAG_KEYWORD_WHILE)
|
||||
|
||||
#define GENERATE_ENUM(ENUM) ENUM,
|
||||
#define GENERATE_STRING(STRING) #STRING,
|
||||
#define TOKENIZER_GENERATE_ENUM(ENUM) ENUM,
|
||||
#define TOKENIZER_GENERATE_CASE(ENUM) case ENUM: return #ENUM;
|
||||
|
||||
// First define the enum
|
||||
typedef enum {
|
||||
FOREACH_TOKENIZER_TAG_ENUM(GENERATE_ENUM)
|
||||
TOKENIZER_FOREACH_TAG_ENUM(TOKENIZER_GENERATE_ENUM)
|
||||
} TokenizerTag;
|
||||
|
||||
static const char* tokenizerTagString[] = {
|
||||
FOREACH_TOKENIZER_TAG_ENUM(GENERATE_STRING)
|
||||
};
|
||||
const char* tokenizerGetTagString(TokenizerTag tag);
|
||||
|
||||
typedef enum {
|
||||
TOKENIZER_STATE_START,
|
||||
|
||||
Reference in New Issue
Block a user