Files
zig0/parser.c

875 lines
26 KiB
C

#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "ast.h"
#include "common.h"
#include "parser.h"
const AstNodeIndex null_node = 0;
typedef struct {
enum {
FIELD_STATE_NONE,
FIELD_STATE_SEEN,
FIELD_STATE_END // sets "end"
} tag;
union {
uint32_t end;
} payload;
} FieldState;
typedef struct {
enum {
SMALL_SPAN_ZERO_OR_ONE,
SMALL_SPAN_MULTI
} tag;
union {
AstNodeIndex zero_or_one;
AstSubRange multi;
} payload;
} SmallSpan;
void parseRoot(Parser* p) {
p->nodes.tags[p->nodes.len++] = AST_NODE_TAG_ROOT;
p->nodes.main_tokens[p->nodes.len] = 0;
// members root_members = parseContainerMembers(p);
}
static AstTokenIndex nextToken(Parser* p) { return p->tok_i++; }
static AstTokenIndex eatToken(Parser* p, TokenizerTag tag, bool* ok) {
if (p->token_tags[p->tok_i] == tag) {
if (ok != NULL)
*ok = true;
return nextToken(p);
} else {
if (ok != NULL)
*ok = false;
return 0;
}
}
static void eatDocComments(Parser* p) {
bool ok;
while (eatToken(p, TOKENIZER_TAG_DOC_COMMENT, &ok), ok) { }
}
static void expectSemicolon(Parser* p) {
bool ok;
eatToken(p, TOKENIZER_TAG_SEMICOLON, &ok);
if (ok)
return;
fprintf(stderr, "expected semicolon\n");
exit(1);
}
static AstNodeIndex setNode(Parser* p, uint32_t i, AstNodeItem item) {
p->nodes.tags[i] = item.tag;
p->nodes.main_tokens[i] = item.main_token;
p->nodes.datas[i] = item.data;
return i;
}
static void astNodeListEnsureCapacity(AstNodeList* list, uint32_t additional) {
const uint32_t new_len = list->len + additional;
if (new_len <= list->cap) {
return;
}
const uint32_t new_cap = new_len > list->cap * 2 ? new_len : list->cap * 2;
list->tags = realloc(list->tags, new_cap * sizeof(AstNodeTag));
list->main_tokens = realloc(list->main_tokens, new_cap * sizeof(AstTokenIndex));
list->datas = realloc(list->datas, new_cap * sizeof(AstData));
if (!list->tags || !list->main_tokens || !list->datas)
exit(1);
list->cap = new_cap;
}
static AstNodeIndex addNode(AstNodeList* nodes, AstNodeItem item) {
astNodeListEnsureCapacity(nodes, 1);
nodes->tags[nodes->len] = item.tag;
nodes->main_tokens[nodes->len] = item.main_token;
nodes->datas[nodes->len] = item.data;
return nodes->len++;
}
static AstNodeIndex addExtra(Parser* p, const AstNodeIndex* extra, uint32_t count) {
const AstNodeIndex result = p->extra_data.len;
SLICE_ENSURE_CAPACITY(AstNodeIndex, &p->extra_data, count);
memcpy(&p->extra_data.arr, extra, count * sizeof(AstNodeIndex));
return result;
}
static AstNodeIndex parseTypeExpr(Parser* p);
static AstNodeIndex expectTypeExpr(Parser* p) {
const AstNodeIndex node = parseTypeExpr(p);
if (node == 0)
exit(1);
return node;
}
static AstNodeIndex parseByteAlign(Parser* p) {
bool ok;
eatToken(p, TOKENIZER_TAG_KEYWORD_ALIGN, &ok);
if (!ok)
return null_node;
fprintf(stderr, "parseByteAlign cannot parse alginment\n");
exit(1);
return 0; // tcc
}
static AstNodeIndex parseAddrSpace(Parser* p) {
bool ok;
eatToken(p, TOKENIZER_TAG_KEYWORD_ADDRSPACE, &ok);
if (!ok)
return null_node;
fprintf(stderr, "parseAddrSpace cannot parse addrspace\n");
exit(1);
return 0; // tcc
}
static AstNodeIndex parseLinkSection(Parser* p) {
bool ok;
eatToken(p, TOKENIZER_TAG_KEYWORD_LINKSECTION, &ok);
if (!ok)
return null_node;
fprintf(stderr, "parseLinkSection cannot parse linksection\n");
exit(1);
return 0; // tcc
}
static AstNodeIndex parseCallconv(Parser* p) {
bool ok;
eatToken(p, TOKENIZER_TAG_KEYWORD_CALLCONV, &ok);
if (!ok)
return null_node;
fprintf(stderr, "parseCallconv cannot parse callconv\n");
exit(1);
return 0; // tcc
}
typedef struct {
AstNodeIndex align_expr, value_expr;
} NodeContainerField;
static AstNodeIndex expectContainerField(Parser* p) {
eatToken(p, TOKENIZER_TAG_KEYWORD_COMPTIME, NULL);
const AstTokenIndex main_token = p->tok_i;
if (p->token_tags[p->tok_i] == TOKENIZER_TAG_IDENTIFIER && p->token_tags[p->tok_i + 1] == TOKENIZER_TAG_COLON)
p->tok_i += 2;
const AstNodeIndex type_expr = expectTypeExpr(p);
const AstNodeIndex align_expr = parseByteAlign(p);
bool ok;
eatToken(p, TOKENIZER_TAG_EQUAL, &ok);
if (ok) {
fprintf(stderr, "expectContainerField does not support expr\n");
exit(1);
}
const AstNodeIndex value_expr = 0;
if (align_expr == 0) {
return addNode(
&p->nodes,
(AstNodeItem) {
.tag = AST_NODE_TAG_CONTAINER_FIELD_INIT,
.main_token = main_token,
.data = {
.lhs = type_expr,
.rhs = value_expr,
},
});
} else if (value_expr == 0) {
return addNode(
&p->nodes,
(AstNodeItem) {
.tag = AST_NODE_TAG_CONTAINER_FIELD_ALIGN,
.main_token = main_token,
.data = {
.lhs = type_expr,
.rhs = align_expr,
},
});
} else {
return addNode(
&p->nodes,
(AstNodeItem) {
.tag = AST_NODE_TAG_CONTAINER_FIELD,
.main_token = main_token,
.data = {
.lhs = type_expr,
.rhs = addExtra(p, (AstNodeIndex[]) { align_expr, value_expr }, 2),
},
});
}
}
static AstNodeIndex parsePrimaryTypeExpr(Parser* p) {
const TokenizerTag tok = p->token_tags[p->tok_i];
switch (tok) {
case TOKENIZER_TAG_CHAR_LITERAL:
case TOKENIZER_TAG_NUMBER_LITERAL:
case TOKENIZER_TAG_KEYWORD_UNREACHABLE:
case TOKENIZER_TAG_KEYWORD_ANYFRAME:
case TOKENIZER_TAG_STRING_LITERAL:
case TOKENIZER_TAG_BUILTIN:
case TOKENIZER_TAG_KEYWORD_FN:
case TOKENIZER_TAG_KEYWORD_IF:
case TOKENIZER_TAG_KEYWORD_SWITCH:
case TOKENIZER_TAG_KEYWORD_EXTERN:
case TOKENIZER_TAG_KEYWORD_PACKED:
case TOKENIZER_TAG_KEYWORD_STRUCT:
case TOKENIZER_TAG_KEYWORD_OPAQUE:
case TOKENIZER_TAG_KEYWORD_ENUM:
case TOKENIZER_TAG_KEYWORD_UNION:
case TOKENIZER_TAG_KEYWORD_COMPTIME:
case TOKENIZER_TAG_MULTILINE_STRING_LITERAL_LINE:
fprintf(stderr, "parsePrimaryTypeExpr does not support %s\n", tokenizerGetTagString(tok));
exit(1);
case TOKENIZER_TAG_IDENTIFIER:
if (p->token_tags[p->tok_i + 1] == TOKENIZER_TAG_COLON) {
fprintf(stderr, "parsePrimaryTypeExpr does not support identifier followed by colon\n");
exit(1);
}
return addNode(
&p->nodes,
(AstNodeItem) {
.tag = AST_NODE_TAG_IDENTIFIER,
.main_token = nextToken(p),
.data = {} });
case TOKENIZER_TAG_KEYWORD_INLINE:
case TOKENIZER_TAG_KEYWORD_FOR:
case TOKENIZER_TAG_KEYWORD_WHILE:
case TOKENIZER_TAG_PERIOD:
case TOKENIZER_TAG_KEYWORD_ERROR:
case TOKENIZER_TAG_L_PAREN:
fprintf(stderr, "parsePrimaryTypeExpr does not support %s\n", tokenizerGetTagString(tok));
exit(1);
default:
return null_node;
}
}
static AstNodeIndex parseSuffixOp(Parser* p) {
const TokenizerTag tok = p->token_tags[p->tok_i];
switch (tok) {
case TOKENIZER_TAG_L_BRACKET:
case TOKENIZER_TAG_PERIOD_ASTERISK:
case TOKENIZER_TAG_INVALID_PERIODASTERISKS:
case TOKENIZER_TAG_PERIOD:
fprintf(stderr, "parseSuffixOp does not support %s\n", tokenizerGetTagString(tok));
exit(1);
default:
return null_node;
}
}
static AstNodeIndex parseSuffixExpr(Parser* p) {
bool ok;
eatToken(p, TOKENIZER_TAG_KEYWORD_ASYNC, &ok);
if (ok) {
fprintf(stderr, "async not supported\n");
exit(1);
}
AstNodeIndex res = parsePrimaryTypeExpr(p);
if (res == 0)
return res;
while (true) {
const AstNodeIndex suffix_op = parseSuffixOp(p);
if (suffix_op != 0) {
res = suffix_op;
continue;
}
eatToken(p, TOKENIZER_TAG_L_PAREN, &ok);
if (ok) {
fprintf(stderr, "parseSuffixExpr does not support expr with parens\n");
exit(1);
}
return res;
}
}
static AstNodeIndex expectToken(Parser* p, TokenizerTag tag, bool* ok) {
if (p->token_tags[p->tok_i] == tag) {
if (ok != NULL)
*ok = true;
return nextToken(p);
} else {
if (ok != NULL)
*ok = false;
return 0;
}
}
static AstNodeIndex parseErrorUnionExpr(Parser* p) {
const AstNodeIndex suffix_expr = parseSuffixExpr(p);
if (suffix_expr == 0)
return null_node;
bool ok;
const AstNodeIndex bang = eatToken(p, TOKENIZER_TAG_BANG, &ok);
if (!ok)
return suffix_expr;
return addNode(
&p->nodes,
(AstNodeItem) {
.tag = AST_NODE_TAG_ERROR_UNION,
.main_token = bang,
.data = {
.lhs = suffix_expr,
.rhs = expectTypeExpr(p),
} });
}
static AstNodeIndex parseTypeExpr(Parser* p) {
const AstNodeIndex tok = p->token_tags[p->tok_i];
switch (tok) {
case TOKENIZER_TAG_QUESTION_MARK:
case TOKENIZER_TAG_KEYWORD_ANYFRAME:
case TOKENIZER_TAG_ASTERISK:
case TOKENIZER_TAG_ASTERISK_ASTERISK:
case TOKENIZER_TAG_L_BRACKET:
fprintf(stderr, "parseTypeExpr not supported for %s\n", tokenizerGetTagString(tok));
exit(1);
default:
return parseErrorUnionExpr(p);
}
}
static SmallSpan parseParamDeclList(Parser* p) {
// can only parse functions with no declarations
bool ok;
AstTokenIndex got_token = eatToken(p, TOKENIZER_TAG_L_PAREN, &ok);
if (!ok) {
fprintf(stderr, "expected (, got %s\n", tokenizerGetTagString(got_token));
exit(1);
}
got_token = eatToken(p, TOKENIZER_TAG_R_PAREN, &ok);
if (!ok) {
fprintf(stderr, "expected ), got %s\n", tokenizerGetTagString(got_token));
exit(1);
}
return (SmallSpan) {
.tag = SMALL_SPAN_ZERO_OR_ONE,
};
}
static uint32_t reserveNode(Parser* p, AstNodeTag tag) {
astNodeListEnsureCapacity(&p->nodes, p->nodes.len + 1);
p->nodes.tags[p->nodes.len - 1] = tag;
return p->nodes.len - 1;
}
static AstNodeIndex parseFnProto(Parser* p) {
bool ok;
AstNodeIndex fn_token = eatToken(p, TOKENIZER_TAG_KEYWORD_FN, &ok);
if (!ok)
return null_node;
AstNodeIndex fn_proto_index = reserveNode(p, AST_NODE_TAG_FN_PROTO);
eatToken(p, TOKENIZER_TAG_IDENTIFIER, NULL);
SmallSpan params = parseParamDeclList(p);
const AstNodeIndex align_expr = parseByteAlign(p);
const AstNodeIndex addrspace_expr = parseAddrSpace(p);
const AstNodeIndex section_expr = parseLinkSection(p);
const AstNodeIndex callconv_expr = parseCallconv(p);
eatToken(p, TOKENIZER_TAG_BANG, NULL);
const AstNodeIndex return_type_expr = parseTypeExpr(p);
if (align_expr == 0 && section_expr == 0 && callconv_expr == 0 && addrspace_expr == 0) {
if (params.tag == SMALL_SPAN_ZERO_OR_ONE)
return setNode(
p,
fn_proto_index,
(AstNodeItem) {
.tag = AST_NODE_TAG_FN_PROTO_SIMPLE,
.main_token = fn_token,
.data = {
.lhs = params.payload.zero_or_one,
.rhs = return_type_expr,
},
});
}
fprintf(stderr, "parseFnProto does not support complex function decls\n");
exit(1);
return 0; // tcc
}
static AstTokenIndex parseBlockLabel(Parser* p) {
if (p->token_tags[p->tok_i] == TOKENIZER_TAG_IDENTIFIER && p->token_tags[p->tok_i + 1] == TOKENIZER_TAG_COLON) {
const AstTokenIndex identifier = p->tok_i;
p->tok_i += 2;
return identifier;
}
return null_node;
}
static AstNodeIndex parseForStatement(Parser* p) {
bool ok;
const AstNodeIndex for_token = eatToken(p, TOKENIZER_TAG_KEYWORD_FOR, &ok);
if (!ok)
return null_node;
(void)for_token;
fprintf(stderr, "parseForStatement cannot parse for statements\n");
return 0; // tcc
}
static AstNodeIndex parseWhileStatement(Parser* p) {
bool ok;
const AstNodeIndex while_token = eatToken(p, TOKENIZER_TAG_KEYWORD_WHILE, &ok);
if (!ok)
return null_node;
(void)while_token;
fprintf(stderr, "parseWhileStatement cannot parse while statements\n");
return 0; // tcc
}
static AstNodeIndex parseLoopStatement(Parser* p) {
bool ok_inline_token;
eatToken(p, TOKENIZER_TAG_KEYWORD_INLINE, &ok_inline_token);
const AstNodeIndex for_statement = parseForStatement(p);
if (for_statement != 0)
return for_statement;
const AstNodeIndex while_statement = parseWhileStatement(p);
if (while_statement != 0)
return while_statement;
if (!ok_inline_token)
return null_node;
fprintf(stderr, "If we've seen 'inline', there should have been a 'for' or 'while'\n");
exit(1);
return 0; // tcc
}
static AstNodeIndex parseAssignExpr(Parser* p) {
(void)p;
fprintf(stderr, "parseAssignExpr not implemented\n");
exit(1);
return 0; // tcc
}
static AstNodeIndex expectVarDeclExprStatement(Parser* p) {
(void)p;
fprintf(stderr, "expectVarDeclExprStatement not implemented\n");
exit(1);
return 0; // tcc
}
static AstNodeIndex parseLabeledStatement(Parser*);
static AstNodeIndex expectStatement(Parser* p, bool allow_defer_var) {
bool ok;
if (eatToken(p, TOKENIZER_TAG_KEYWORD_COMPTIME, &ok), ok) {
fprintf(stderr, "expectStatement: comptime keyword not supported\n");
exit(1);
}
const AstNodeIndex tok = p->token_tags[p->tok_i];
switch (tok) {
case TOKENIZER_TAG_KEYWORD_NOSUSPEND:
case TOKENIZER_TAG_KEYWORD_SUSPEND:
case TOKENIZER_TAG_KEYWORD_DEFER:
case TOKENIZER_TAG_KEYWORD_ERRDEFER:
case TOKENIZER_TAG_KEYWORD_IF:
case TOKENIZER_TAG_KEYWORD_ENUM:
case TOKENIZER_TAG_KEYWORD_STRUCT:
case TOKENIZER_TAG_KEYWORD_UNION:;
const char* tok_str = tokenizerGetTagString(tok);
fprintf(stderr, "expectStatement does not support keyword %s\n", tok_str);
exit(1);
default:;
}
const AstNodeIndex labeled_statement = parseLabeledStatement(p);
if (labeled_statement != 0)
return labeled_statement;
if (allow_defer_var) {
return expectVarDeclExprStatement(p);
} else {
return parseAssignExpr(p);
}
}
typedef struct {
AstNodeIndexSlice* scratch;
uint32_t old_len;
} CleanupScratch;
static void cleanupScratch(CleanupScratch* c) { c->scratch->len = c->old_len; }
static AstNodeIndex parseBlock(Parser* p) {
bool ok;
const AstNodeIndex lbrace = eatToken(p, TOKENIZER_TAG_L_BRACE, &ok);
if (!ok)
return null_node;
CleanupScratch scratch_top __attribute__((__cleanup__(cleanupScratch))) = {
.scratch = &p->scratch,
.old_len = p->scratch.len,
};
while (1) {
if (p->token_tags[p->tok_i] == TOKENIZER_TAG_R_BRACE)
break;
// "const AstNodeIndex statement" once tinycc supports typeof_unqual (C23)
AstNodeIndex statement = expectStatement(p, true);
if (statement == 0)
break;
SLICE_APPEND(AstNodeIndex, &p->scratch, statement);
}
expectToken(p, TOKENIZER_TAG_R_BRACE, NULL);
const bool semicolon = (p->token_tags[p->tok_i] - 2 == TOKENIZER_TAG_SEMICOLON);
switch (p->scratch.len - scratch_top.old_len) {
case 0:
return addNode(
&p->nodes,
(AstNodeItem) {
.tag = AST_NODE_TAG_BLOCK_TWO,
.main_token = lbrace,
.data = {
.lhs = 0,
.rhs = 0,
},
});
case 1:
return addNode(
&p->nodes,
(AstNodeItem) {
.tag = semicolon ? AST_NODE_TAG_BLOCK_TWO_SEMICOLON : AST_NODE_TAG_BLOCK_TWO,
.main_token = lbrace,
.data = {
.lhs = p->scratch.arr[scratch_top.old_len],
.rhs = 0,
},
});
case 2:
return addNode(
&p->nodes,
(AstNodeItem) {
.tag = semicolon ? AST_NODE_TAG_BLOCK_TWO_SEMICOLON : AST_NODE_TAG_BLOCK_TWO,
.main_token = lbrace,
.data = {
.lhs = p->scratch.arr[scratch_top.old_len],
.rhs = p->scratch.arr[scratch_top.old_len + 1],
},
});
default:;
const uint32_t extra = p->scratch.len - scratch_top.old_len;
SLICE_ENSURE_CAPACITY(AstNodeIndex, &p->scratch, extra);
memcpy(
&p->extra_data.arr[p->extra_data.len],
&p->scratch.arr[scratch_top.old_len],
sizeof(AstNodeIndex) * extra);
p->extra_data.len += extra;
return addNode(
&p->nodes,
(AstNodeItem) {
.tag = semicolon ? AST_NODE_TAG_BLOCK_SEMICOLON : AST_NODE_TAG_BLOCK,
.main_token = lbrace,
.data = {
.lhs = p->scratch.arr[scratch_top.old_len],
.rhs = p->scratch.arr[p->scratch.len],
},
});
}
return 0;
}
static AstNodeIndex parseLabeledStatement(Parser* p) {
const AstNodeIndex label_token = parseBlockLabel(p);
const AstNodeIndex block = parseBlock(p);
if (block != 0)
return block;
const AstNodeIndex loop_stmt = parseLoopStatement(p);
if (loop_stmt != 0)
return loop_stmt;
if (label_token != 0) {
fprintf(stderr, "parseLabeledStatement does not support labels\n");
exit(1);
}
return null_node;
}
static AstNodeIndex parseVarDeclProto(Parser* p) {
bool ok;
eatToken(p, TOKENIZER_TAG_KEYWORD_CONST, &ok);
if (!ok) {
eatToken(p, TOKENIZER_TAG_KEYWORD_VAR, &ok);
if (!ok)
return null_node;
}
fprintf(stderr, "parseVarDeclProto: parsing vars is not supported\n");
exit(1);
return 0; // tcc
}
static AstNodeIndex parseGlobalVarDecl(Parser* p) {
const AstNodeIndex var_decl = parseVarDeclProto(p);
if (var_decl == 0) {
return null_node;
}
fprintf(stderr, "parseGlobalVarDecl does not support parsing var decls\n");
exit(1);
return 0; // tcc
}
static AstNodeIndex expectTopLevelDecl(Parser* p) {
AstTokenIndex extern_export_inline_token = nextToken(p);
switch (p->token_tags[extern_export_inline_token]) {
case TOKENIZER_TAG_KEYWORD_EXTERN:
eatToken(p, TOKENIZER_TAG_STRING_LITERAL, NULL);
break;
case TOKENIZER_TAG_KEYWORD_EXPORT:
case TOKENIZER_TAG_KEYWORD_INLINE:
case TOKENIZER_TAG_KEYWORD_NOINLINE:
break;
default:
p->tok_i--;
}
AstNodeIndex fn_proto = parseFnProto(p);
if (fn_proto != 0) {
switch (p->token_tags[p->tok_i]) {
case TOKENIZER_TAG_SEMICOLON:
p->tok_i++;
return fn_proto;
case TOKENIZER_TAG_L_BRACE:;
AstNodeIndex fn_decl_index = reserveNode(p, AST_NODE_TAG_FN_DECL);
AstNodeIndex body_block = parseBlock(p);
return setNode(
p,
fn_decl_index,
(AstNodeItem) {
.tag = AST_NODE_TAG_FN_DECL,
.main_token = p->nodes.main_tokens[fn_proto],
.data = { .lhs = fn_proto, .rhs = body_block },
});
default:
exit(1); // Expected semicolon or left brace
}
}
eatToken(p, TOKENIZER_TAG_KEYWORD_THREADLOCAL, NULL);
AstNodeIndex var_decl = parseGlobalVarDecl(p);
if (var_decl != 0) {
return var_decl;
}
// assuming the program is correct...
fprintf(stderr, "the next token should be usingnamespace, which is not supported\n");
exit(1);
return 0; // make tcc happy
}
void findNextContainerMember(Parser* p) {
uint32_t level = 0;
while (true) {
AstTokenIndex tok = nextToken(p);
switch (p->token_tags[tok]) {
// Any of these can start a new top level declaration
case TOKENIZER_TAG_KEYWORD_TEST:
case TOKENIZER_TAG_KEYWORD_COMPTIME:
case TOKENIZER_TAG_KEYWORD_PUB:
case TOKENIZER_TAG_KEYWORD_EXPORT:
case TOKENIZER_TAG_KEYWORD_EXTERN:
case TOKENIZER_TAG_KEYWORD_INLINE:
case TOKENIZER_TAG_KEYWORD_NOINLINE:
case TOKENIZER_TAG_KEYWORD_USINGNAMESPACE:
case TOKENIZER_TAG_KEYWORD_THREADLOCAL:
case TOKENIZER_TAG_KEYWORD_CONST:
case TOKENIZER_TAG_KEYWORD_VAR:
case TOKENIZER_TAG_KEYWORD_FN:
if (level == 0) {
p->tok_i--;
return;
}
break;
case TOKENIZER_TAG_IDENTIFIER:
if (p->token_tags[tok + 1] == TOKENIZER_TAG_COMMA && level == 0) {
p->tok_i--;
return;
}
break;
case TOKENIZER_TAG_COMMA:
case TOKENIZER_TAG_SEMICOLON:
// This decl was likely meant to end here
if (level == 0)
return;
break;
case TOKENIZER_TAG_L_PAREN:
case TOKENIZER_TAG_L_BRACKET:
case TOKENIZER_TAG_L_BRACE:
level++;
break;
case TOKENIZER_TAG_R_PAREN:
case TOKENIZER_TAG_R_BRACKET:
if (level != 0)
level--;
break;
case TOKENIZER_TAG_R_BRACE:
if (level == 0) {
// end of container, exit
p->tok_i--;
return;
}
level--;
break;
case TOKENIZER_TAG_EOF:
p->tok_i--;
return;
default:
break;
}
}
}
static Members parseContainerMembers(Parser* p) {
const uint32_t scratch_top = p->scratch.len;
// ast_token_index last_field;
bool ok;
while (eatToken(p, TOKENIZER_TAG_CONTAINER_DOC_COMMENT, &ok) && ok)
;
FieldState field_state = { .tag = FIELD_STATE_NONE };
bool trailing = false;
AstNodeIndex top_level_decl;
while (1) {
eatDocComments(p);
switch (p->token_tags[p->tok_i]) {
case TOKENIZER_TAG_KEYWORD_TEST:
case TOKENIZER_TAG_KEYWORD_COMPTIME:
case TOKENIZER_TAG_KEYWORD_USINGNAMESPACE:;
const char* str = tokenizerGetTagString(p->token_tags[p->tok_i]);
fprintf(stderr, "%s not implemented in parseContainerMembers\n", str);
exit(1);
case TOKENIZER_TAG_KEYWORD_PUB:
p->tok_i++;
top_level_decl = expectTopLevelDecl(p);
if (top_level_decl != 0) {
if (field_state.tag == FIELD_STATE_SEEN) {
field_state.tag = FIELD_STATE_END;
field_state.payload.end = top_level_decl;
}
SLICE_APPEND(AstNodeIndex, &p->scratch, top_level_decl);
}
trailing = (p->token_tags[p->tok_i - 1] == TOKENIZER_TAG_SEMICOLON);
break;
case TOKENIZER_TAG_KEYWORD_CONST:
case TOKENIZER_TAG_KEYWORD_VAR:
case TOKENIZER_TAG_KEYWORD_THREADLOCAL:
case TOKENIZER_TAG_KEYWORD_EXPORT:
case TOKENIZER_TAG_KEYWORD_EXTERN:
case TOKENIZER_TAG_KEYWORD_INLINE:
case TOKENIZER_TAG_KEYWORD_NOINLINE:
case TOKENIZER_TAG_KEYWORD_FN:;
top_level_decl = expectTopLevelDecl(p);
if (top_level_decl != 0) {
if (field_state.tag == FIELD_STATE_SEEN) {
field_state.tag = FIELD_STATE_END;
field_state.payload.end = top_level_decl;
}
SLICE_APPEND(AstNodeIndex, &p->scratch, top_level_decl);
}
trailing = (p->token_tags[p->tok_i - 1] == TOKENIZER_TAG_SEMICOLON);
break;
case TOKENIZER_TAG_EOF:
case TOKENIZER_TAG_R_BRACE:
goto break_loop;
default:;
// skip parseCStyleContainer
const AstNodeIndex container_field = expectContainerField(p);
switch (field_state.tag) {
case FIELD_STATE_NONE:
field_state.tag = FIELD_STATE_SEEN;
break;
case FIELD_STATE_SEEN:
break;
case FIELD_STATE_END:
fprintf(stderr, "parseContainerMembers error condition\n");
exit(1);
}
SLICE_APPEND(AstNodeIndex, &p->scratch, container_field);
switch (p->token_tags[p->tok_i]) {
case TOKENIZER_TAG_COMMA:
p->tok_i++;
trailing = true;
continue;
case TOKENIZER_TAG_R_BRACE:
case TOKENIZER_TAG_EOF:
trailing = false;
goto break_loop;
default:
continue;
}
findNextContainerMember(p);
continue;
}
}
break_loop:;
const uint32_t scratch_len = p->scratch.len;
p->scratch.len = scratch_top;
const uint32_t n_items = scratch_len - scratch_top;
switch (n_items) {
case 0:
return (Members) {
.len = 0,
.lhs = 0,
.rhs = 0,
.trailing = trailing,
};
case 1:
return (Members) {
.len = 1,
.lhs = p->scratch.arr[scratch_top],
.rhs = 0,
.trailing = trailing,
};
case 2:
return (Members) {
.len = 2,
.lhs = p->scratch.arr[scratch_top],
.rhs = p->scratch.arr[scratch_top + 1],
.trailing = trailing,
};
default:
return (Members) {
.len = n_items,
.lhs = p->scratch.arr[scratch_top],
.rhs = p->scratch.arr[scratch_len],
.trailing = trailing,
};
}
}