more parser

This commit is contained in:
2025-02-05 16:29:10 +00:00
parent 5c65136bf5
commit 15abfb585f
2 changed files with 109 additions and 10 deletions

View File

@@ -15,6 +15,7 @@ static AstNodeIndex parseTypeExpr(Parser*);
static AstNodeIndex parseBlock(Parser* p);
static AstNodeIndex parseLabeledStatement(Parser*);
static AstNodeIndex parseExpr(Parser*);
static AstNodeIndex expectExpr(Parser*);
typedef struct {
enum { FIELD_STATE_NONE, FIELD_STATE_SEEN, FIELD_STATE_END } tag;
@@ -219,8 +220,8 @@ static AstNodeIndex expectContainerField(Parser* p) {
}
static AstNodeIndex parseBuiltinCall(Parser* p) {
const AstNodeIndex builtin_token = assertToken(p, TOKEN_BUILTIN);
eatToken(p, TOKEN_L_PAREN);
const AstTokenIndex builtin_token = assertToken(p, TOKEN_BUILTIN);
assertToken(p, TOKEN_L_PAREN);
CleanupScratch scratch_top __attribute__((__cleanup__(cleanupScratch)))
= initCleanupScratch(p);
@@ -229,16 +230,75 @@ static AstNodeIndex parseBuiltinCall(Parser* p) {
if (eatToken(p, TOKEN_R_PAREN) != null_token)
break;
const AstNodeIndex param = parseExpr(p);
const AstNodeIndex param = expectExpr(p);
SLICE_APPEND(AstNodeIndex, &p->scratch, param);
// TODO finish
switch (p->token_tags[p->tok_i]) {
case TOKEN_COMMA:
p->tok_i++;
break;
case TOKEN_R_PAREN:
p->tok_i++;
goto next;
default:
fprintf(stderr, "expected comma after arg\n");
exit(1);
}
next:;
}
(void)builtin_token;
fprintf(stderr, "parseBuiltinCall not implemented\n");
exit(1);
return 0; // tcc
const bool comma = (p->token_tags[p->tok_i - 2] == TOKEN_COMMA);
const uint32_t params_len = p->scratch.len - scratch_top.old_len;
switch (params_len) {
case 0:
return addNode(&p->nodes,
(AstNodeItem) {
.tag = AST_NODE_BUILTIN_CALL_TWO,
.main_token = builtin_token,
.data = {
.lhs = 0,
.rhs = 0,
},
});
case 1:
return addNode(&p->nodes,
(AstNodeItem) {
.tag = comma ?
AST_NODE_BUILTIN_CALL_TWO_COMMA :
AST_NODE_BUILTIN_CALL_TWO,
.main_token = builtin_token,
.data = {
.lhs = p->scratch.arr[scratch_top.old_len],
.rhs = 0,
},
});
case 2:
return addNode(&p->nodes,
(AstNodeItem) {
.tag = comma ?
AST_NODE_BUILTIN_CALL_TWO_COMMA :
AST_NODE_BUILTIN_CALL_TWO,
.main_token = builtin_token,
.data = {
.lhs = p->scratch.arr[scratch_top.old_len],
.rhs = p->scratch.arr[scratch_top.old_len+1],
},
});
default:;
const AstSubRange span
= listToSpan(p, &p->scratch.arr[scratch_top.old_len], params_len);
return addNode(
&p->nodes,
(AstNodeItem) {
.tag = comma ?
AST_NODE_BUILTIN_CALL_COMMA :
AST_NODE_BUILTIN_CALL,
.main_token = builtin_token,
.data = {
.lhs = span.start,
.rhs = span.end,
},
});
}
}
static AstNodeIndex parsePrimaryTypeExpr(Parser* p) {
@@ -248,7 +308,16 @@ static AstNodeIndex parsePrimaryTypeExpr(Parser* p) {
case TOKEN_NUMBER_LITERAL:
case TOKEN_KEYWORD_UNREACHABLE:
case TOKEN_KEYWORD_ANYFRAME:
fprintf(stderr, "parsePrimaryTypeExpr does not support %s\n",
tokenizerGetTagString(tok));
exit(1);
case TOKEN_STRING_LITERAL:
return addNode(&p->nodes,
(AstNodeItem) {
.tag = AST_NODE_STRING_LITERAL,
.main_token = nextToken(p),
.data = {},
});
case TOKEN_BUILTIN:
return parseBuiltinCall(p);
case TOKEN_KEYWORD_FN:
@@ -700,7 +769,6 @@ static OperInfo operTable(TokenizerTag tok_tag) {
}
static AstNodeIndex parseExprPrecedence(Parser* p, int32_t min_prec) {
(void)p;
assert(min_prec >= 0);
AstNodeIndex node = parsePrefixExpr(p);
@@ -746,6 +814,12 @@ static AstNodeIndex parseExprPrecedence(Parser* p, int32_t min_prec) {
static AstNodeIndex parseExpr(Parser* p) { return parseExprPrecedence(p, 0); }
static AstNodeIndex expectExpr(Parser* p) {
const AstNodeIndex node = parseExpr(p);
assert(node != 0);
return node;
}
static AstNodeIndex parsePrimaryExpr(Parser* p) {
const char* tok = tokenizerGetTagString(p->token_tags[p->tok_i]);
switch (p->token_tags[p->tok_i]) {
@@ -812,6 +886,8 @@ static AstNodeIndex parsePrimaryExpr(Parser* p) {
default:
return parseCurlySuffixExpr(p);
}
return 0; // tcc
}
static AstNodeIndex parsePrefixExpr(Parser* p) {

View File

@@ -162,6 +162,29 @@ fn testTokenize(source: [:0]const u8, expected_token_tags: []const Token.Tag) !v
try std.testing.expectEqual(source.len, last_token.loc.end);
}
test "my function" {
try testTokenize(
\\pub fn main() void {
\\ @panic("hello");
\\}
\\
, &.{
.keyword_pub,
.keyword_fn,
.identifier,
.l_paren,
.r_paren,
.identifier,
.l_brace,
.builtin,
.l_paren,
.string_literal,
.r_paren,
.semicolon,
.r_brace,
});
}
test "keywords" {
try testTokenize("test const else", &.{ .keyword_test, .keyword_const, .keyword_else });
}