remove usage of __attribute__((__cleanup__(fn)))
problematic with tcc.
This commit is contained in:
10
common.h
10
common.h
@@ -2,16 +2,6 @@
|
||||
#ifndef _ZIG0_COMMON_H__
|
||||
#define _ZIG0_COMMON_H__
|
||||
|
||||
// tcc doesn't define __GNUC__, causing glibc to #define __attribute__ to
|
||||
// nothing.
|
||||
#if defined(__TINYC__) && !defined(__GNUC__)
|
||||
#define __GNUC__ 2
|
||||
#define __GNUC_MINOR__ 0
|
||||
#undef __REDIRECT
|
||||
#undef __REDIRECT_NTH
|
||||
#undef __REDIRECT_NTHNL
|
||||
#endif
|
||||
|
||||
#include <stdint.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
|
||||
162
parser.c
162
parser.c
@@ -36,11 +36,6 @@ typedef struct {
|
||||
} payload;
|
||||
} SmallSpan;
|
||||
|
||||
typedef struct {
|
||||
AstNodeIndexSlice* scratch;
|
||||
uint32_t old_len;
|
||||
} CleanupScratch;
|
||||
|
||||
typedef struct {
|
||||
AstNodeIndex align_node;
|
||||
AstNodeIndex addrspace_node;
|
||||
@@ -66,7 +61,6 @@ static AstNodeIndex addNode(AstNodeList*, AstNodeItem);
|
||||
static AstNodeTag assignOpNode(TokenizerTag);
|
||||
static AstTokenIndex assertToken(Parser*, TokenizerTag);
|
||||
static void astNodeListEnsureCapacity(AstNodeList*, uint32_t);
|
||||
static void cleanupScratch(CleanupScratch*);
|
||||
static AstTokenIndex eatDocComments(Parser*);
|
||||
static AstTokenIndex eatToken(Parser*, TokenizerTag);
|
||||
static AstNodeIndex expectBlockExprStatement(Parser*);
|
||||
@@ -83,7 +77,6 @@ static AstNodeIndex expectVarDeclExprStatement(Parser*, AstTokenIndex);
|
||||
static void findNextContainerMember(Parser*);
|
||||
static AstNodeIndex finishAssignExpr(Parser*, AstNodeIndex);
|
||||
static uint32_t forPrefix(Parser*);
|
||||
static CleanupScratch initCleanupScratch(Parser*);
|
||||
static AstSubRange listToSpan(Parser*, const AstNodeIndex*, uint32_t);
|
||||
static AstNodeIndex makePtrTypeNode(
|
||||
Parser*, AstTokenIndex, AstNodeIndex, PtrModifiers, AstNodeIndex);
|
||||
@@ -142,15 +135,6 @@ static AstNodeIndex setNode(Parser*, uint32_t, AstNodeItem);
|
||||
static uint32_t tokenTagLexemeLen(TokenizerTag);
|
||||
static bool tokensOnSameLine(Parser*, AstTokenIndex, AstTokenIndex);
|
||||
|
||||
static CleanupScratch initCleanupScratch(Parser* p) {
|
||||
return (CleanupScratch) {
|
||||
.scratch = &p->scratch,
|
||||
.old_len = p->scratch.len,
|
||||
};
|
||||
}
|
||||
|
||||
static void cleanupScratch(CleanupScratch* c) { c->scratch->len = c->old_len; }
|
||||
|
||||
static AstSubRange membersToSpan(const Members self, Parser* p) {
|
||||
if (self.len <= 2) {
|
||||
const AstNodeIndex nodes[] = { self.lhs, self.rhs };
|
||||
@@ -236,8 +220,7 @@ void parseRoot(Parser* p) {
|
||||
}
|
||||
|
||||
static Members parseContainerMembers(Parser* p) {
|
||||
CleanupScratch scratch_top __attribute__((__cleanup__(cleanupScratch)))
|
||||
= initCleanupScratch(p);
|
||||
const uint32_t scratch_top = p->scratch.len;
|
||||
while (eatToken(p, TOKEN_CONTAINER_DOC_COMMENT) != null_token)
|
||||
;
|
||||
|
||||
@@ -349,7 +332,8 @@ static Members parseContainerMembers(Parser* p) {
|
||||
|
||||
break_loop:;
|
||||
|
||||
const uint32_t items_len = p->scratch.len - scratch_top.old_len;
|
||||
const uint32_t items_len = p->scratch.len - scratch_top;
|
||||
p->scratch.len = scratch_top;
|
||||
switch (items_len) {
|
||||
case 0:
|
||||
return (Members) {
|
||||
@@ -361,20 +345,20 @@ break_loop:;
|
||||
case 1:
|
||||
return (Members) {
|
||||
.len = 1,
|
||||
.lhs = p->scratch.arr[scratch_top.old_len],
|
||||
.lhs = p->scratch.arr[scratch_top],
|
||||
.rhs = 0,
|
||||
.trailing = trailing,
|
||||
};
|
||||
case 2:
|
||||
return (Members) {
|
||||
.len = 2,
|
||||
.lhs = p->scratch.arr[scratch_top.old_len],
|
||||
.rhs = p->scratch.arr[scratch_top.old_len + 1],
|
||||
.lhs = p->scratch.arr[scratch_top],
|
||||
.rhs = p->scratch.arr[scratch_top + 1],
|
||||
.trailing = trailing,
|
||||
};
|
||||
default:;
|
||||
const AstSubRange span
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top.old_len], items_len);
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top], items_len);
|
||||
return (Members) {
|
||||
.len = items_len,
|
||||
.lhs = span.start,
|
||||
@@ -834,8 +818,7 @@ static AstNodeIndex expectStatement(Parser* p, bool allow_defer_var) {
|
||||
|
||||
static AstNodeIndex expectVarDeclExprStatement(
|
||||
Parser* p, AstTokenIndex comptime_token) {
|
||||
CleanupScratch scratch_top __attribute__((__cleanup__(cleanupScratch)))
|
||||
= initCleanupScratch(p);
|
||||
const uint32_t scratch_top = p->scratch.len;
|
||||
|
||||
while (true) {
|
||||
const AstNodeIndex var_decl_proto = parseVarDeclProto(p);
|
||||
@@ -849,7 +832,7 @@ static AstNodeIndex expectVarDeclExprStatement(
|
||||
break;
|
||||
}
|
||||
|
||||
const uint32_t lhs_count = p->scratch.len - scratch_top.old_len;
|
||||
const uint32_t lhs_count = p->scratch.len - scratch_top;
|
||||
assert(lhs_count > 0);
|
||||
|
||||
// Try to eat '=' for assignment/initialization
|
||||
@@ -860,7 +843,8 @@ static AstNodeIndex expectVarDeclExprStatement(
|
||||
// Destructure requires '='
|
||||
fail(p, "expected '='");
|
||||
}
|
||||
const AstNodeIndex lhs = p->scratch.arr[scratch_top.old_len];
|
||||
const AstNodeIndex lhs = p->scratch.arr[scratch_top];
|
||||
p->scratch.len = scratch_top;
|
||||
const AstNodeTag lhs_tag = p->nodes.tags[lhs];
|
||||
if (lhs_tag == AST_NODE_SIMPLE_VAR_DECL
|
||||
|| lhs_tag == AST_NODE_ALIGNED_VAR_DECL
|
||||
@@ -889,7 +873,8 @@ static AstNodeIndex expectVarDeclExprStatement(
|
||||
expectSemicolon(p);
|
||||
|
||||
if (lhs_count == 1) {
|
||||
const AstNodeIndex lhs = p->scratch.arr[scratch_top.old_len];
|
||||
const AstNodeIndex lhs = p->scratch.arr[scratch_top];
|
||||
p->scratch.len = scratch_top;
|
||||
const AstNodeTag lhs_tag = p->nodes.tags[lhs];
|
||||
if (lhs_tag == AST_NODE_SIMPLE_VAR_DECL
|
||||
|| lhs_tag == AST_NODE_ALIGNED_VAR_DECL
|
||||
@@ -924,10 +909,10 @@ static AstNodeIndex expectVarDeclExprStatement(
|
||||
const AstNodeIndex extra_start = p->extra_data.len;
|
||||
SLICE_ENSURE_CAPACITY(AstNodeIndex, &p->extra_data, lhs_count + 1);
|
||||
p->extra_data.arr[p->extra_data.len++] = lhs_count;
|
||||
memcpy(p->extra_data.arr + p->extra_data.len,
|
||||
&p->scratch.arr[scratch_top.old_len],
|
||||
memcpy(p->extra_data.arr + p->extra_data.len, &p->scratch.arr[scratch_top],
|
||||
lhs_count * sizeof(AstNodeIndex));
|
||||
p->extra_data.len += lhs_count;
|
||||
p->scratch.len = scratch_top;
|
||||
|
||||
return addNode(&p->nodes,
|
||||
(AstNodeItem) {
|
||||
@@ -1975,8 +1960,7 @@ static AstNodeIndex parseBlock(Parser* p) {
|
||||
if (lbrace == null_token)
|
||||
return null_node;
|
||||
|
||||
CleanupScratch scratch_top __attribute__((__cleanup__(cleanupScratch)))
|
||||
= initCleanupScratch(p);
|
||||
const uint32_t scratch_top = p->scratch.len;
|
||||
|
||||
while (1) {
|
||||
if (p->token_tags[p->tok_i] == TOKEN_R_BRACE)
|
||||
@@ -1990,7 +1974,8 @@ static AstNodeIndex parseBlock(Parser* p) {
|
||||
SLICE_APPEND(AstNodeIndex, &p->scratch, statement);
|
||||
}
|
||||
expectToken(p, TOKEN_R_BRACE);
|
||||
const uint32_t statements_len = p->scratch.len - scratch_top.old_len;
|
||||
const uint32_t statements_len = p->scratch.len - scratch_top;
|
||||
p->scratch.len = scratch_top;
|
||||
const bool semicolon = statements_len != 0
|
||||
&& (p->token_tags[p->tok_i - 2] == TOKEN_SEMICOLON);
|
||||
switch (statements_len) {
|
||||
@@ -2012,7 +1997,7 @@ static AstNodeIndex parseBlock(Parser* p) {
|
||||
.tag = semicolon ? AST_NODE_BLOCK_TWO_SEMICOLON : AST_NODE_BLOCK_TWO,
|
||||
.main_token = lbrace,
|
||||
.data = {
|
||||
.lhs = p->scratch.arr[scratch_top.old_len],
|
||||
.lhs = p->scratch.arr[scratch_top],
|
||||
.rhs = 0,
|
||||
},
|
||||
});
|
||||
@@ -2023,13 +2008,13 @@ static AstNodeIndex parseBlock(Parser* p) {
|
||||
.tag = semicolon ? AST_NODE_BLOCK_TWO_SEMICOLON : AST_NODE_BLOCK_TWO,
|
||||
.main_token = lbrace,
|
||||
.data = {
|
||||
.lhs = p->scratch.arr[scratch_top.old_len],
|
||||
.rhs = p->scratch.arr[scratch_top.old_len + 1],
|
||||
.lhs = p->scratch.arr[scratch_top],
|
||||
.rhs = p->scratch.arr[scratch_top + 1],
|
||||
},
|
||||
});
|
||||
default:;
|
||||
const AstSubRange span = listToSpan(
|
||||
p, &p->scratch.arr[scratch_top.old_len], statements_len);
|
||||
const AstSubRange span
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top], statements_len);
|
||||
return addNode(
|
||||
&p->nodes,
|
||||
(AstNodeItem) {
|
||||
@@ -2173,8 +2158,7 @@ static AstNodeIndex parseCurlySuffixExpr(Parser* p) {
|
||||
// lbrace is the lbrace token index.
|
||||
static AstNodeIndex parseInitList(
|
||||
Parser* p, AstNodeIndex lhs, AstTokenIndex lbrace) {
|
||||
CleanupScratch scratch_top __attribute__((__cleanup__(cleanupScratch)))
|
||||
= initCleanupScratch(p);
|
||||
const uint32_t scratch_top = p->scratch.len;
|
||||
|
||||
const AstNodeIndex field_init = parseFieldInit(p);
|
||||
if (field_init != 0) {
|
||||
@@ -2196,7 +2180,8 @@ static AstNodeIndex parseInitList(
|
||||
SLICE_APPEND(AstNodeIndex, &p->scratch, next);
|
||||
}
|
||||
const bool comma = p->token_tags[p->tok_i - 2] == TOKEN_COMMA;
|
||||
const uint32_t inits_len = p->scratch.len - scratch_top.old_len;
|
||||
const uint32_t inits_len = p->scratch.len - scratch_top;
|
||||
p->scratch.len = scratch_top;
|
||||
if (lhs == 0) {
|
||||
// Anonymous struct init: .{...}
|
||||
switch (inits_len) {
|
||||
@@ -2211,16 +2196,16 @@ static AstNodeIndex parseInitList(
|
||||
.main_token = lbrace,
|
||||
.data = {
|
||||
.lhs = inits_len >= 1
|
||||
? p->scratch.arr[scratch_top.old_len]
|
||||
? p->scratch.arr[scratch_top]
|
||||
: 0,
|
||||
.rhs = inits_len >= 2
|
||||
? p->scratch.arr[scratch_top.old_len + 1]
|
||||
? p->scratch.arr[scratch_top + 1]
|
||||
: 0,
|
||||
},
|
||||
});
|
||||
default:;
|
||||
const AstSubRange span = listToSpan(
|
||||
p, &p->scratch.arr[scratch_top.old_len], inits_len);
|
||||
const AstSubRange span
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top], inits_len);
|
||||
return addNode(&p->nodes,
|
||||
(AstNodeItem) {
|
||||
.tag = comma ? AST_NODE_STRUCT_INIT_DOT_COMMA
|
||||
@@ -2242,13 +2227,13 @@ static AstNodeIndex parseInitList(
|
||||
.data = {
|
||||
.lhs = lhs,
|
||||
.rhs = inits_len >= 1
|
||||
? p->scratch.arr[scratch_top.old_len]
|
||||
? p->scratch.arr[scratch_top]
|
||||
: 0,
|
||||
},
|
||||
});
|
||||
default:;
|
||||
const AstSubRange span = listToSpan(
|
||||
p, &p->scratch.arr[scratch_top.old_len], inits_len);
|
||||
const AstSubRange span
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top], inits_len);
|
||||
return addNode(&p->nodes,
|
||||
(AstNodeItem) {
|
||||
.tag = comma ? AST_NODE_STRUCT_INIT_COMMA
|
||||
@@ -2280,7 +2265,8 @@ static AstNodeIndex parseInitList(
|
||||
}
|
||||
|
||||
const bool comma = p->token_tags[p->tok_i - 2] == TOKEN_COMMA;
|
||||
const uint32_t elems_len = p->scratch.len - scratch_top.old_len;
|
||||
const uint32_t elems_len = p->scratch.len - scratch_top;
|
||||
p->scratch.len = scratch_top;
|
||||
if (lhs == 0) {
|
||||
// Anonymous array init: .{a, b, ...}
|
||||
switch (elems_len) {
|
||||
@@ -2294,16 +2280,16 @@ static AstNodeIndex parseInitList(
|
||||
.main_token = lbrace,
|
||||
.data = {
|
||||
.lhs = elems_len >= 1
|
||||
? p->scratch.arr[scratch_top.old_len]
|
||||
? p->scratch.arr[scratch_top]
|
||||
: 0,
|
||||
.rhs = elems_len >= 2
|
||||
? p->scratch.arr[scratch_top.old_len + 1]
|
||||
? p->scratch.arr[scratch_top + 1]
|
||||
: 0,
|
||||
},
|
||||
});
|
||||
default:;
|
||||
const AstSubRange span = listToSpan(
|
||||
p, &p->scratch.arr[scratch_top.old_len], elems_len);
|
||||
const AstSubRange span
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top], elems_len);
|
||||
return addNode(&p->nodes,
|
||||
(AstNodeItem) {
|
||||
.tag = comma ? AST_NODE_ARRAY_INIT_DOT_COMMA
|
||||
@@ -2331,12 +2317,12 @@ static AstNodeIndex parseInitList(
|
||||
.main_token = lbrace,
|
||||
.data = {
|
||||
.lhs = lhs,
|
||||
.rhs = p->scratch.arr[scratch_top.old_len],
|
||||
.rhs = p->scratch.arr[scratch_top],
|
||||
},
|
||||
});
|
||||
default:;
|
||||
const AstSubRange span
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top.old_len], elems_len);
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top], elems_len);
|
||||
return addNode(&p->nodes,
|
||||
(AstNodeItem) {
|
||||
.tag = comma ? AST_NODE_ARRAY_INIT_COMMA
|
||||
@@ -2391,8 +2377,7 @@ static AstNodeIndex parseSuffixExpr(Parser* p) {
|
||||
if (lparen == null_token)
|
||||
return res;
|
||||
|
||||
CleanupScratch scratch_top __attribute__((__cleanup__(cleanupScratch)))
|
||||
= initCleanupScratch(p);
|
||||
const uint32_t scratch_top = p->scratch.len;
|
||||
while (true) {
|
||||
if (eatToken(p, TOKEN_R_PAREN) != null_token)
|
||||
break;
|
||||
@@ -2407,7 +2392,8 @@ static AstNodeIndex parseSuffixExpr(Parser* p) {
|
||||
}
|
||||
|
||||
const bool comma = p->token_tags[p->tok_i - 2] == TOKEN_COMMA;
|
||||
const uint32_t params_len = p->scratch.len - scratch_top.old_len;
|
||||
const uint32_t params_len = p->scratch.len - scratch_top;
|
||||
p->scratch.len = scratch_top;
|
||||
switch (params_len) {
|
||||
case 0:
|
||||
res = addNode(
|
||||
@@ -2429,13 +2415,13 @@ static AstNodeIndex parseSuffixExpr(Parser* p) {
|
||||
.main_token = lparen,
|
||||
.data = {
|
||||
.lhs = res,
|
||||
.rhs = p->scratch.arr[scratch_top.old_len],
|
||||
.rhs = p->scratch.arr[scratch_top],
|
||||
},
|
||||
});
|
||||
break;
|
||||
default:;
|
||||
const AstSubRange span = listToSpan(
|
||||
p, &p->scratch.arr[scratch_top.old_len], params_len);
|
||||
const AstSubRange span
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top], params_len);
|
||||
res = addNode(
|
||||
&p->nodes,
|
||||
(AstNodeItem) {
|
||||
@@ -2684,8 +2670,7 @@ static AstNodeIndex parseAsmExpr(Parser* p) {
|
||||
// Complex asm with outputs, inputs, clobbers
|
||||
expectToken(p, TOKEN_COLON);
|
||||
|
||||
CleanupScratch scratch_top __attribute__((__cleanup__(cleanupScratch)))
|
||||
= initCleanupScratch(p);
|
||||
const uint32_t scratch_top = p->scratch.len;
|
||||
|
||||
// Parse outputs
|
||||
while (true) {
|
||||
@@ -2720,9 +2705,10 @@ static AstNodeIndex parseAsmExpr(Parser* p) {
|
||||
break;
|
||||
}
|
||||
const AstTokenIndex rparen = expectToken(p, TOKEN_R_PAREN);
|
||||
const uint32_t items_len = p->scratch.len - scratch_top.old_len;
|
||||
const AstSubRange items_span = listToSpan(
|
||||
p, &p->scratch.arr[scratch_top.old_len], items_len);
|
||||
const uint32_t items_len = p->scratch.len - scratch_top;
|
||||
const AstSubRange items_span
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top], items_len);
|
||||
p->scratch.len = scratch_top;
|
||||
return addNode(&p->nodes,
|
||||
(AstNodeItem) {
|
||||
.tag = AST_NODE_ASM_LEGACY,
|
||||
@@ -2742,9 +2728,10 @@ static AstNodeIndex parseAsmExpr(Parser* p) {
|
||||
clobbers = expectExpr(p);
|
||||
|
||||
const AstTokenIndex rparen = expectToken(p, TOKEN_R_PAREN);
|
||||
const uint32_t items_len = p->scratch.len - scratch_top.old_len;
|
||||
const uint32_t items_len = p->scratch.len - scratch_top;
|
||||
const AstSubRange items_span
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top.old_len], items_len);
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top], items_len);
|
||||
p->scratch.len = scratch_top;
|
||||
return addNode(&p->nodes,
|
||||
(AstNodeItem) {
|
||||
.tag = AST_NODE_ASM,
|
||||
@@ -2761,9 +2748,10 @@ static AstNodeIndex parseAsmExpr(Parser* p) {
|
||||
|
||||
// No clobbers
|
||||
const AstTokenIndex rparen = expectToken(p, TOKEN_R_PAREN);
|
||||
const uint32_t items_len = p->scratch.len - scratch_top.old_len;
|
||||
const uint32_t items_len = p->scratch.len - scratch_top;
|
||||
const AstSubRange items_span
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top.old_len], items_len);
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top], items_len);
|
||||
p->scratch.len = scratch_top;
|
||||
return addNode(&p->nodes,
|
||||
(AstNodeItem) {
|
||||
.tag = AST_NODE_ASM,
|
||||
@@ -3252,8 +3240,7 @@ static AstNodeIndex parseByteAlign(Parser* p) {
|
||||
}
|
||||
|
||||
static AstSubRange parseSwitchProngList(Parser* p) {
|
||||
CleanupScratch scratch_top __attribute__((__cleanup__(cleanupScratch)))
|
||||
= initCleanupScratch(p);
|
||||
const uint32_t scratch_top = p->scratch.len;
|
||||
while (true) {
|
||||
if (eatToken(p, TOKEN_R_BRACE) != null_token)
|
||||
break;
|
||||
@@ -3265,15 +3252,17 @@ static AstSubRange parseSwitchProngList(Parser* p) {
|
||||
if (p->token_tags[p->tok_i] == TOKEN_COMMA)
|
||||
p->tok_i++;
|
||||
}
|
||||
const uint32_t cases_len = p->scratch.len - scratch_top.old_len;
|
||||
return listToSpan(p, &p->scratch.arr[scratch_top.old_len], cases_len);
|
||||
const uint32_t cases_len = p->scratch.len - scratch_top;
|
||||
const AstSubRange span
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top], cases_len);
|
||||
p->scratch.len = scratch_top;
|
||||
return span;
|
||||
}
|
||||
|
||||
static SmallSpan parseParamDeclList(Parser* p) {
|
||||
expectToken(p, TOKEN_L_PAREN);
|
||||
|
||||
CleanupScratch scratch_top __attribute__((__cleanup__(cleanupScratch)))
|
||||
= initCleanupScratch(p);
|
||||
const uint32_t scratch_top = p->scratch.len;
|
||||
|
||||
// 0 = none, 1 = seen, 2 = nonfinal
|
||||
int varargs = 0;
|
||||
@@ -3310,7 +3299,8 @@ static SmallSpan parseParamDeclList(Parser* p) {
|
||||
fail(p, "varargs_nonfinal");
|
||||
}
|
||||
|
||||
const uint32_t params_len = p->scratch.len - scratch_top.old_len;
|
||||
const uint32_t params_len = p->scratch.len - scratch_top;
|
||||
p->scratch.len = scratch_top;
|
||||
switch (params_len) {
|
||||
case 0:
|
||||
return (SmallSpan) {
|
||||
@@ -3320,11 +3310,11 @@ static SmallSpan parseParamDeclList(Parser* p) {
|
||||
case 1:
|
||||
return (SmallSpan) {
|
||||
.tag = SMALL_SPAN_ZERO_OR_ONE,
|
||||
.payload = { .zero_or_one = p->scratch.arr[scratch_top.old_len] },
|
||||
.payload = { .zero_or_one = p->scratch.arr[scratch_top] },
|
||||
};
|
||||
default:;
|
||||
const AstSubRange span
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top.old_len], params_len);
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top], params_len);
|
||||
return (SmallSpan) {
|
||||
.tag = SMALL_SPAN_MULTI,
|
||||
.payload = { .multi = span },
|
||||
@@ -3336,8 +3326,7 @@ static AstNodeIndex parseBuiltinCall(Parser* p) {
|
||||
const AstTokenIndex builtin_token = assertToken(p, TOKEN_BUILTIN);
|
||||
assertToken(p, TOKEN_L_PAREN);
|
||||
|
||||
CleanupScratch scratch_top __attribute__((__cleanup__(cleanupScratch)))
|
||||
= initCleanupScratch(p);
|
||||
const uint32_t scratch_top = p->scratch.len;
|
||||
|
||||
while (true) {
|
||||
if (eatToken(p, TOKEN_R_PAREN) != null_token)
|
||||
@@ -3359,7 +3348,8 @@ static AstNodeIndex parseBuiltinCall(Parser* p) {
|
||||
end_loop:;
|
||||
|
||||
const bool comma = (p->token_tags[p->tok_i - 2] == TOKEN_COMMA);
|
||||
const uint32_t params_len = p->scratch.len - scratch_top.old_len;
|
||||
const uint32_t params_len = p->scratch.len - scratch_top;
|
||||
p->scratch.len = scratch_top;
|
||||
switch (params_len) {
|
||||
case 0:
|
||||
return addNode(&p->nodes,
|
||||
@@ -3379,7 +3369,7 @@ end_loop:;
|
||||
AST_NODE_BUILTIN_CALL_TWO,
|
||||
.main_token = builtin_token,
|
||||
.data = {
|
||||
.lhs = p->scratch.arr[scratch_top.old_len],
|
||||
.lhs = p->scratch.arr[scratch_top],
|
||||
.rhs = 0,
|
||||
},
|
||||
});
|
||||
@@ -3391,13 +3381,13 @@ end_loop:;
|
||||
AST_NODE_BUILTIN_CALL_TWO,
|
||||
.main_token = builtin_token,
|
||||
.data = {
|
||||
.lhs = p->scratch.arr[scratch_top.old_len],
|
||||
.rhs = p->scratch.arr[scratch_top.old_len+1],
|
||||
.lhs = p->scratch.arr[scratch_top],
|
||||
.rhs = p->scratch.arr[scratch_top+1],
|
||||
},
|
||||
});
|
||||
default:;
|
||||
const AstSubRange span
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top.old_len], params_len);
|
||||
= listToSpan(p, &p->scratch.arr[scratch_top], params_len);
|
||||
return addNode(
|
||||
&p->nodes,
|
||||
(AstNodeItem) {
|
||||
|
||||
Reference in New Issue
Block a user