astgen: assign_destructure, u0 int type, slice_length optimization

- Implement assignDestructure() and assignDestructureMaybeDecls() with
  RL_DESTRUCTURE result location, DestructureComponent types, rvalue
  handling for validate_destructure/elem_val_imm/store_node, and array
  init optimization.
- Fix tryResolvePrimitiveIdent to allow bit_count==0 (u0/i0 types) and
  reject leading zeros (u01, i007).
- Add nodeIsTriviallyZero and slice_length optimization for
  arr[start..][0..len] patterns in AST_NODE_SLICE and
  AST_NODE_SLICE_SENTINEL cases.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-14 14:04:06 +00:00
parent 84c578d31b
commit 9bc6ac6679
2 changed files with 592 additions and 20 deletions

View File

@@ -185,6 +185,19 @@ typedef enum {
RI_CTX_ASSIGNMENT, RI_CTX_ASSIGNMENT,
} ResultCtx; } ResultCtx;
// DestructureComponent: mirrors Zig's ResultInfo.Loc.DestructureComponent.
typedef enum {
DC_DISCARD,
DC_TYPED_PTR,
DC_INFERRED_PTR,
} DestructureComponentTag;
typedef struct {
DestructureComponentTag tag;
uint32_t inst; // ZIR inst ref (for DC_TYPED_PTR and DC_INFERRED_PTR).
uint32_t src_node; // Only for DC_TYPED_PTR.
} DestructureComponent;
typedef enum { typedef enum {
RL_NONE, // Just compute the value. RL_NONE, // Just compute the value.
RL_REF, // Compute a pointer to the value. RL_REF, // Compute a pointer to the value.
@@ -194,25 +207,40 @@ typedef enum {
RL_PTR, // Store result to typed pointer. data=alloc inst, src_node=node. RL_PTR, // Store result to typed pointer. data=alloc inst, src_node=node.
RL_INFERRED_PTR, // Store result to inferred pointer. data=alloc inst. RL_INFERRED_PTR, // Store result to inferred pointer. data=alloc inst.
RL_REF_COERCED_TY, // Ref with pointer type. data=ptr_ty_inst. RL_REF_COERCED_TY, // Ref with pointer type. data=ptr_ty_inst.
RL_DESTRUCTURE, // Destructure into multiple pointers.
} ResultLocTag; } ResultLocTag;
typedef struct { typedef struct {
ResultLocTag tag; ResultLocTag tag;
uint32_t data; // ZirInstRef: ty_inst for TY/COERCED_TY, alloc inst for uint32_t data; // ZirInstRef: ty_inst for TY/COERCED_TY, alloc inst for
// PTR/INFERRED_PTR. // PTR/INFERRED_PTR.
uint32_t src_node; // Only used for RL_PTR. uint32_t src_node; // Used for RL_PTR and RL_DESTRUCTURE.
ResultCtx ctx; // ResultInfo.Context (AstGen.zig:371). ResultCtx ctx; // ResultInfo.Context (AstGen.zig:371).
DestructureComponent* components; // Only for RL_DESTRUCTURE.
uint32_t components_len; // Only for RL_DESTRUCTURE.
} ResultLoc; } ResultLoc;
#define RL_NONE_VAL \ #define RL_NONE_VAL \
((ResultLoc) { \ ((ResultLoc) { .tag = RL_NONE, \
.tag = RL_NONE, .data = 0, .src_node = 0, .ctx = RI_CTX_NONE }) .data = 0, \
.src_node = 0, \
.ctx = RI_CTX_NONE, \
.components = NULL, \
.components_len = 0 })
#define RL_REF_VAL \ #define RL_REF_VAL \
((ResultLoc) { \ ((ResultLoc) { .tag = RL_REF, \
.tag = RL_REF, .data = 0, .src_node = 0, .ctx = RI_CTX_NONE }) .data = 0, \
.src_node = 0, \
.ctx = RI_CTX_NONE, \
.components = NULL, \
.components_len = 0 })
#define RL_DISCARD_VAL \ #define RL_DISCARD_VAL \
((ResultLoc) { \ ((ResultLoc) { .tag = RL_DISCARD, \
.tag = RL_DISCARD, .data = 0, .src_node = 0, .ctx = RI_CTX_NONE }) .data = 0, \
.src_node = 0, \
.ctx = RI_CTX_NONE, \
.components = NULL, \
.components_len = 0 })
#define RL_IS_REF(rl) ((rl).tag == RL_REF || (rl).tag == RL_REF_COERCED_TY) #define RL_IS_REF(rl) ((rl).tag == RL_REF || (rl).tag == RL_REF_COERCED_TY)
// --- Scope types (AstGen.zig:11621-11768) --- // --- Scope types (AstGen.zig:11621-11768) ---
@@ -661,6 +689,13 @@ static uint32_t addUnNode(
return addInstruction(gz, tag, data); return addInstruction(gz, tag, data);
} }
// Mirrors GenZir.addNode (AstGen.zig:12414).
static uint32_t addNode(GenZir* gz, ZirInstTag tag, uint32_t node) {
ZirInstData data;
data.node = (int32_t)node - (int32_t)gz->decl_node_index;
return addInstruction(gz, tag, data);
}
// Mirrors GenZir.addUnTok (AstGen.zig:12497). // Mirrors GenZir.addUnTok (AstGen.zig:12497).
static uint32_t addUnTok( static uint32_t addUnTok(
GenZir* gz, ZirInstTag tag, uint32_t operand, uint32_t abs_tok_index) { GenZir* gz, ZirInstTag tag, uint32_t operand, uint32_t abs_tok_index) {
@@ -2437,6 +2472,51 @@ static uint32_t rvalue(
addPlNodeBin( addPlNodeBin(
gz, ZIR_INST_STORE_TO_INFERRED_PTR, node, rl.data, result); gz, ZIR_INST_STORE_TO_INFERRED_PTR, node, rl.data, result);
return ZIR_REF_VOID_VALUE; return ZIR_REF_VOID_VALUE;
case RL_DESTRUCTURE: {
// validate_destructure (AstGen.zig:11225-11258).
uint32_t ds_node = rl.src_node;
uint32_t comp_len = rl.components_len;
// Emit validate_destructure: pl_node with ValidateDestructure payload.
// Payload: { operand, destructure_node (relative), expect_len }
{
uint32_t payload_idx = gz->astgen->extra_len;
ensureExtraCapacity(gz->astgen, 3);
gz->astgen->extra[gz->astgen->extra_len++] = result; // operand
gz->astgen->extra[gz->astgen->extra_len++]
= (uint32_t)((int32_t)ds_node
- (int32_t)gz->decl_node_index); // destructure_node
gz->astgen->extra[gz->astgen->extra_len++]
= comp_len; // expect_len
addPlNodePayloadIndex(
gz, ZIR_INST_VALIDATE_DESTRUCTURE, node, payload_idx);
}
for (uint32_t i = 0; i < comp_len; i++) {
const DestructureComponent* comp = &rl.components[i];
if (comp->tag == DC_DISCARD)
continue;
// elem_val_imm: operand=result, idx=i.
uint32_t elem_inst = reserveInstructionIndex(gz->astgen);
gz->astgen->inst_tags[elem_inst] = ZIR_INST_ELEM_VAL_IMM;
gz->astgen->inst_datas[elem_inst].elem_val_imm.operand = result;
gz->astgen->inst_datas[elem_inst].elem_val_imm.idx = i;
gzAppendInstruction(gz, elem_inst);
uint32_t elem_ref = elem_inst + ZIR_REF_START_INDEX;
switch (comp->tag) {
case DC_TYPED_PTR:
addPlNodeBin(gz, ZIR_INST_STORE_NODE,
comp->src_node != 0 ? comp->src_node : node, comp->inst,
elem_ref);
break;
case DC_INFERRED_PTR:
addPlNodeBin(gz, ZIR_INST_STORE_TO_INFERRED_PTR, node,
comp->inst, elem_ref);
break;
case DC_DISCARD:
break; // unreachable
}
}
return ZIR_REF_VOID_VALUE;
}
} }
return result; return result;
} }
@@ -2472,6 +2552,10 @@ static DeferCounts countDefers(const Scope* outer_scope, Scope* inner_scope);
static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node); static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node);
static void assignStmt(GenZir* gz, Scope* scope, uint32_t infix_node); static void assignStmt(GenZir* gz, Scope* scope, uint32_t infix_node);
static void assignDestructure(GenZir* gz, Scope* scope, uint32_t node);
static Scope* assignDestructureMaybeDecls(GenZir* gz, Scope* scope,
uint32_t node, ScopeLocalVal* val_scopes, uint32_t* val_idx,
ScopeLocalPtr* ptr_scopes, uint32_t* ptr_idx, uint32_t max_scopes);
static void assignOp( static void assignOp(
GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag); GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag);
static void assignShift( static void assignShift(
@@ -3616,6 +3700,10 @@ static uint32_t tryResolvePrimitiveIdent(GenZir* gz, uint32_t node) {
&& (source[tok_start] == 'u' || source[tok_start] == 'i')) { && (source[tok_start] == 'u' || source[tok_start] == 'i')) {
// Zig Signedness enum: unsigned=1, signed=0 // Zig Signedness enum: unsigned=1, signed=0
uint8_t signedness = (source[tok_start] == 'u') ? 1 : 0; uint8_t signedness = (source[tok_start] == 'u') ? 1 : 0;
// Reject leading zeros (e.g. u01, i007) but allow u0/i0.
if (tok_len >= 3 && source[tok_start + 1] == '0') {
return ZIR_REF_NONE;
}
uint16_t bit_count = 0; uint16_t bit_count = 0;
bool valid = true; bool valid = true;
for (uint32_t k = tok_start + 1; k < tok_end; k++) { for (uint32_t k = tok_start + 1; k < tok_end; k++) {
@@ -3627,7 +3715,7 @@ static uint32_t tryResolvePrimitiveIdent(GenZir* gz, uint32_t node) {
break; break;
} }
} }
if (valid && bit_count > 0) { if (valid) {
ZirInstData data; ZirInstData data;
data.int_type.src_node data.int_type.src_node
= (int32_t)node - (int32_t)gz->decl_node_index; = (int32_t)node - (int32_t)gz->decl_node_index;
@@ -5072,6 +5160,10 @@ static uint32_t structInitExpr(
= structInitExprAnon(gz, scope, node, fields, fields_len); = structInitExprAnon(gz, scope, node, fields, fields_len);
return rvalue(gz, rl, struct_inst, node); return rvalue(gz, rl, struct_inst, node);
} }
case RL_DESTRUCTURE:
// Struct value cannot be destructured (AstGen.zig:1854-1859).
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
} }
SET_ERROR(ag); SET_ERROR(ag);
return ZIR_REF_VOID_VALUE; return ZIR_REF_VOID_VALUE;
@@ -5256,6 +5348,31 @@ static uint32_t boolBinOp(
return bool_br + ZIR_REF_START_INDEX; return bool_br + ZIR_REF_START_INDEX;
} }
// Mirrors nodeIsTriviallyZero (AstGen.zig:10299-10313).
static bool nodeIsTriviallyZero(const Ast* tree, uint32_t node) {
if (tree->nodes.tags[node] != AST_NODE_NUMBER_LITERAL)
return false;
uint32_t tok = tree->nodes.main_tokens[node];
uint32_t tok_start = tree->tokens.starts[tok];
const char* source = (const char*)tree->source;
if (source[tok_start] != '0')
return false;
// Distinguish "0.." (range, token is "0") from "0.5" (float literal).
char c = source[tok_start + 1];
if (c == '.')
return source[tok_start + 2] == '.';
// Any alphanumeric or underscore means the token is longer than "0".
if (c >= '0' && c <= '9')
return false;
if (c >= 'a' && c <= 'z')
return false;
if (c >= 'A' && c <= 'Z')
return false;
if (c == '_')
return false;
return true;
}
// Mirrors expr (AstGen.zig:634) — main expression dispatcher. // Mirrors expr (AstGen.zig:634) — main expression dispatcher.
static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
AstGenCtx* ag = gz->astgen; AstGenCtx* ag = gz->astgen;
@@ -5650,18 +5767,54 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
addPlNodeBin(gz, ZIR_INST_SLICE_START, node, lhs, start), node); addPlNodeBin(gz, ZIR_INST_SLICE_START, node, lhs, start), node);
} }
case AST_NODE_SLICE: { case AST_NODE_SLICE: {
// Slice[rhs]: { start, end } (AstGen.zig:908-937). // Slice[rhs]: { start, end } (AstGen.zig:882-937).
const Ast* stree = ag->tree; const Ast* stree = ag->tree;
uint32_t start_node = stree->extra_data.arr[nd.rhs];
uint32_t end_node = stree->extra_data.arr[nd.rhs + 1];
// slice_length optimization (AstGen.zig:887-906).
if (stree->nodes.tags[nd.lhs] == AST_NODE_SLICE_OPEN
&& nodeIsTriviallyZero(stree, start_node)) {
AstData inner_nd = stree->nodes.datas[nd.lhs];
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, inner_nd.lhs);
ResultLoc usize_rl = { .tag = RL_COERCED_TY,
.data = ZIR_REF_USIZE_TYPE,
.src_node = 0,
.ctx = RI_CTX_NONE,
.components = NULL,
.components_len = 0 };
uint32_t start_ref = exprRl(gz, scope, usize_rl, inner_nd.rhs);
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t len_ref = exprRl(gz, scope, usize_rl, end_node);
emitDbgStmt(gz, saved_line, saved_col);
ensureExtraCapacity(ag, 5);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = lhs;
ag->extra[ag->extra_len++] = start_ref;
ag->extra[ag->extra_len++] = len_ref;
ag->extra[ag->extra_len++] = ZIR_REF_NONE; // no sentinel
int32_t src_off = (int32_t)nd.lhs - (int32_t)gz->decl_node_index;
memcpy(&ag->extra[ag->extra_len], &src_off, sizeof(uint32_t));
ag->extra_len++;
ZirInstData data;
data.pl_node.src_node
= (int32_t)node - (int32_t)gz->decl_node_index;
data.pl_node.payload_index = payload_index;
return rvalue(
gz, rl, addInstruction(gz, ZIR_INST_SLICE_LENGTH, data), node);
}
// Normal path.
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs); uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs);
advanceSourceCursorToMainToken(ag, gz, node); advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line; uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column; uint32_t saved_col = ag->source_column;
uint32_t start_node = stree->extra_data.arr[nd.rhs];
uint32_t end_node = stree->extra_data.arr[nd.rhs + 1];
ResultLoc usize_rl = { .tag = RL_COERCED_TY, ResultLoc usize_rl = { .tag = RL_COERCED_TY,
.data = ZIR_REF_USIZE_TYPE, .data = ZIR_REF_USIZE_TYPE,
.src_node = 0, .src_node = 0,
.ctx = RI_CTX_NONE }; .ctx = RI_CTX_NONE,
.components = NULL,
.components_len = 0 };
uint32_t start_ref = exprRl(gz, scope, usize_rl, start_node); uint32_t start_ref = exprRl(gz, scope, usize_rl, start_node);
uint32_t end_ref = exprRl(gz, scope, usize_rl, end_node); uint32_t end_ref = exprRl(gz, scope, usize_rl, end_node);
emitDbgStmt(gz, saved_line, saved_col); emitDbgStmt(gz, saved_line, saved_col);
@@ -5678,20 +5831,58 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
} }
case AST_NODE_SLICE_SENTINEL: { case AST_NODE_SLICE_SENTINEL: {
// SliceSentinel[rhs]: { start, end, sentinel } // SliceSentinel[rhs]: { start, end, sentinel }
// (AstGen.zig:908-925). // (AstGen.zig:882-937).
const Ast* stree = ag->tree; const Ast* stree = ag->tree;
uint32_t start_node = stree->extra_data.arr[nd.rhs];
uint32_t end_node = stree->extra_data.arr[nd.rhs + 1];
uint32_t sentinel_node = stree->extra_data.arr[nd.rhs + 2];
// slice_length optimization (AstGen.zig:887-906).
if (end_node != 0 && stree->nodes.tags[nd.lhs] == AST_NODE_SLICE_OPEN
&& nodeIsTriviallyZero(stree, start_node)) {
AstData inner_nd = stree->nodes.datas[nd.lhs];
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, inner_nd.lhs);
ResultLoc usize_rl = { .tag = RL_COERCED_TY,
.data = ZIR_REF_USIZE_TYPE,
.src_node = 0,
.ctx = RI_CTX_NONE,
.components = NULL,
.components_len = 0 };
uint32_t start_ref = exprRl(gz, scope, usize_rl, inner_nd.rhs);
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t len_ref = exprRl(gz, scope, usize_rl, end_node);
uint32_t sentinel_ref
= exprRl(gz, scope, RL_NONE_VAL, sentinel_node);
emitDbgStmt(gz, saved_line, saved_col);
ensureExtraCapacity(ag, 5);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = lhs;
ag->extra[ag->extra_len++] = start_ref;
ag->extra[ag->extra_len++] = len_ref;
ag->extra[ag->extra_len++] = sentinel_ref;
int32_t src_off = (int32_t)nd.lhs - (int32_t)gz->decl_node_index;
memcpy(&ag->extra[ag->extra_len], &src_off, sizeof(uint32_t));
ag->extra_len++;
ZirInstData data;
data.pl_node.src_node
= (int32_t)node - (int32_t)gz->decl_node_index;
data.pl_node.payload_index = payload_index;
return rvalue(
gz, rl, addInstruction(gz, ZIR_INST_SLICE_LENGTH, data), node);
}
// Normal path.
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs); uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs);
advanceSourceCursorToMainToken(ag, gz, node); advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line; uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column; uint32_t saved_col = ag->source_column;
uint32_t start_node = stree->extra_data.arr[nd.rhs];
uint32_t end_node = stree->extra_data.arr[nd.rhs + 1];
uint32_t sentinel_node = stree->extra_data.arr[nd.rhs + 2];
// start/end coerced to usize (AstGen.zig:911-912). // start/end coerced to usize (AstGen.zig:911-912).
ResultLoc usize_rl = { .tag = RL_COERCED_TY, ResultLoc usize_rl = { .tag = RL_COERCED_TY,
.data = ZIR_REF_USIZE_TYPE, .data = ZIR_REF_USIZE_TYPE,
.src_node = 0, .src_node = 0,
.ctx = RI_CTX_NONE }; .ctx = RI_CTX_NONE,
.components = NULL,
.components_len = 0 };
uint32_t start_ref = exprRl(gz, scope, usize_rl, start_node); uint32_t start_ref = exprRl(gz, scope, usize_rl, start_node);
uint32_t end_ref = (end_node != 0) uint32_t end_ref = (end_node != 0)
? exprRl(gz, scope, usize_rl, end_node) ? exprRl(gz, scope, usize_rl, end_node)
@@ -5702,7 +5893,9 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
ResultLoc sent_rl = { .tag = RL_COERCED_TY, ResultLoc sent_rl = { .tag = RL_COERCED_TY,
.data = sentinel_ty, .data = sentinel_ty,
.src_node = 0, .src_node = 0,
.ctx = RI_CTX_NONE }; .ctx = RI_CTX_NONE,
.components = NULL,
.components_len = 0 };
uint32_t sentinel_ref = exprRl(gz, scope, sent_rl, sentinel_node); uint32_t sentinel_ref = exprRl(gz, scope, sent_rl, sentinel_node);
emitDbgStmt(gz, saved_line, saved_col); emitDbgStmt(gz, saved_line, saved_col);
ensureExtraCapacity(ag, 4); ensureExtraCapacity(ag, 4);
@@ -6123,6 +6316,10 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
case AST_NODE_ASSIGN_SHL_SAT: case AST_NODE_ASSIGN_SHL_SAT:
assignShiftSat(gz, scope, node); assignShiftSat(gz, scope, node);
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node); return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
// assign_destructure (AstGen.zig:669-674).
case AST_NODE_ASSIGN_DESTRUCTURE:
assignDestructure(gz, scope, node);
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
default: default:
SET_ERROR(ag); SET_ERROR(ag);
return ZIR_REF_VOID_VALUE; return ZIR_REF_VOID_VALUE;
@@ -6448,6 +6645,36 @@ static uint32_t arrayInitDotExpr(
gz, ZIR_INST_VALIDATE_PTR_ARRAY_INIT, node, payload_index); gz, ZIR_INST_VALIDATE_PTR_ARRAY_INIT, node, payload_index);
return ZIR_REF_VOID_VALUE; return ZIR_REF_VOID_VALUE;
} }
case RL_DESTRUCTURE: {
// Destructure directly into result pointers (AstGen.zig:1552-1569).
if (elem_count != rl.components_len) {
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
for (uint32_t i = 0; i < elem_count; i++) {
const DestructureComponent* comp = &rl.components[i];
ResultLoc elem_rl;
switch (comp->tag) {
case DC_TYPED_PTR:
elem_rl = (ResultLoc) { .tag = RL_PTR,
.data = comp->inst,
.src_node = comp->src_node,
.ctx = RI_CTX_NONE };
break;
case DC_INFERRED_PTR:
elem_rl = (ResultLoc) { .tag = RL_INFERRED_PTR,
.data = comp->inst,
.src_node = 0,
.ctx = RI_CTX_NONE };
break;
case DC_DISCARD:
elem_rl = RL_DISCARD_VAL;
break;
}
exprRl(gz, scope, elem_rl, elements[i]);
}
return ZIR_REF_VOID_VALUE;
}
} }
// Fallback: anon init + rvalue. // Fallback: anon init + rvalue.
@@ -8845,6 +9072,341 @@ static void assignStmt(GenZir* gz, Scope* scope, uint32_t infix_node) {
} }
} }
// --- assignDestructure (AstGen.zig:3456-3504) ---
// Handles destructure assignments where LHS is only lvalue expressions (no
// new var/const declarations). Called from exprRl and blockExprStmts.
static void assignDestructure(GenZir* gz, Scope* scope, uint32_t node) {
emitDbgNode(gz, node);
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
// Parse assign_destructure node: lhs=extra_index, rhs=value_expr.
AstData nd = tree->nodes.datas[node];
uint32_t extra_start = nd.lhs;
uint32_t value_expr = nd.rhs;
uint32_t variable_count = tree->extra_data.arr[extra_start];
const uint32_t* variables = tree->extra_data.arr + extra_start + 1;
// Detect comptime token (AstGen.zig:3462-3464).
// Check if the first variable's first token (or the token before it)
// is keyword_comptime.
bool has_comptime = false;
if (variable_count > 0) {
uint32_t first_var = variables[0];
AstNodeTag first_tag = tree->nodes.tags[first_var];
uint32_t first_tok;
if (first_tag == AST_NODE_GLOBAL_VAR_DECL
|| first_tag == AST_NODE_LOCAL_VAR_DECL
|| first_tag == AST_NODE_ALIGNED_VAR_DECL
|| first_tag == AST_NODE_SIMPLE_VAR_DECL) {
first_tok = firstToken(tree, first_var);
} else {
first_tok = firstToken(tree, first_var) - 1;
}
if (first_tok < tree->tokens.len
&& tree->tokens.tags[first_tok] == TOKEN_KEYWORD_COMPTIME) {
has_comptime = true;
}
}
if (has_comptime && gz->is_comptime) {
// Redundant comptime in already comptime scope (AstGen.zig:3466-3468).
SET_ERROR(ag);
return;
}
// If comptime, wrap in sub-block (AstGen.zig:3471-3477).
GenZir gz_buf;
GenZir* inner_gz = gz;
if (has_comptime) {
gz_buf = makeSubBlock(gz, scope);
gz_buf.is_comptime = true;
inner_gz = &gz_buf;
}
// Build rl_components (AstGen.zig:3479-3492).
DestructureComponent* rl_components
= malloc(variable_count * sizeof(DestructureComponent));
if (!rl_components)
exit(1);
for (uint32_t i = 0; i < variable_count; i++) {
uint32_t variable_node = variables[i];
// Check for `_` identifier (AstGen.zig:3481-3487).
if (tree->nodes.tags[variable_node] == AST_NODE_IDENTIFIER) {
uint32_t ident_tok = tree->nodes.main_tokens[variable_node];
uint32_t tok_start = tree->tokens.starts[ident_tok];
if (tree->source[tok_start] == '_'
&& (tok_start + 1 >= tree->source_len
|| !((tree->source[tok_start + 1] >= 'a'
&& tree->source[tok_start + 1] <= 'z')
|| (tree->source[tok_start + 1] >= 'A'
&& tree->source[tok_start + 1] <= 'Z')
|| tree->source[tok_start + 1] == '_'
|| (tree->source[tok_start + 1] >= '0'
&& tree->source[tok_start + 1] <= '9')))) {
rl_components[i].tag = DC_DISCARD;
rl_components[i].inst = 0;
rl_components[i].src_node = 0;
continue;
}
}
// lvalExpr: evaluate as ref (AstGen.zig:3488-3491).
rl_components[i].tag = DC_TYPED_PTR;
rl_components[i].inst
= exprRl(inner_gz, scope, RL_REF_VAL, variable_node);
rl_components[i].src_node = variable_node;
}
// Build destructure result location and evaluate RHS
// (AstGen.zig:3494-3499).
ResultLoc ds_rl;
memset(&ds_rl, 0, sizeof(ds_rl));
ds_rl.tag = RL_DESTRUCTURE;
ds_rl.src_node = node;
ds_rl.components = rl_components;
ds_rl.components_len = variable_count;
(void)exprRl(inner_gz, scope, ds_rl, value_expr);
// If comptime, finish block_comptime (AstGen.zig:3501-3505).
if (has_comptime) {
uint32_t comptime_block_inst
= makeBlockInst(ag, ZIR_INST_BLOCK_COMPTIME, gz, node);
addBreak(inner_gz, ZIR_INST_BREAK_INLINE, comptime_block_inst,
ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
setBlockComptimeBody(ag, inner_gz, comptime_block_inst,
COMPTIME_REASON_COMPTIME_KEYWORD);
gzAppendInstruction(gz, comptime_block_inst);
gzUnstack(inner_gz);
}
free(rl_components);
}
// --- assignDestructureMaybeDecls (AstGen.zig:3507-3729) ---
// Handles destructure assignments that may contain const/var declarations.
// Returns new scope containing any declared variables.
static Scope* assignDestructureMaybeDecls(GenZir* gz, Scope* scope,
uint32_t node, ScopeLocalVal* val_scopes, uint32_t* val_idx,
ScopeLocalPtr* ptr_scopes, uint32_t* ptr_idx, uint32_t max_scopes) {
(void)val_scopes;
(void)val_idx;
emitDbgNode(gz, node);
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
// Parse assign_destructure node.
AstData nd = tree->nodes.datas[node];
uint32_t extra_start = nd.lhs;
uint32_t value_expr = nd.rhs;
uint32_t variable_count = tree->extra_data.arr[extra_start];
const uint32_t* variables = tree->extra_data.arr + extra_start + 1;
// Detect comptime token.
bool has_comptime = false;
if (variable_count > 0) {
uint32_t first_var = variables[0];
AstNodeTag first_tag = tree->nodes.tags[first_var];
uint32_t first_tok;
if (first_tag == AST_NODE_GLOBAL_VAR_DECL
|| first_tag == AST_NODE_LOCAL_VAR_DECL
|| first_tag == AST_NODE_ALIGNED_VAR_DECL
|| first_tag == AST_NODE_SIMPLE_VAR_DECL) {
first_tok = firstToken(tree, first_var);
} else {
first_tok = firstToken(tree, first_var) - 1;
}
if (first_tok < tree->tokens.len
&& tree->tokens.tags[first_tok] == TOKEN_KEYWORD_COMPTIME) {
has_comptime = true;
}
}
bool is_comptime = has_comptime || gz->is_comptime;
bool value_is_comptime = tree->nodes.tags[value_expr] == AST_NODE_COMPTIME;
if (has_comptime && gz->is_comptime) {
SET_ERROR(ag); // redundant comptime
}
// First pass: build rl_components (AstGen.zig:3535-3620).
DestructureComponent* rl_components
= malloc(variable_count * sizeof(DestructureComponent));
if (!rl_components)
exit(1);
bool any_lvalue_expr = false;
for (uint32_t i = 0; i < variable_count; i++) {
uint32_t variable_node = variables[i];
AstNodeTag vtag = tree->nodes.tags[variable_node];
// Check for `_` identifier (AstGen.zig:3537-3544).
if (vtag == AST_NODE_IDENTIFIER) {
uint32_t ident_tok = tree->nodes.main_tokens[variable_node];
uint32_t tok_start = tree->tokens.starts[ident_tok];
if (tree->source[tok_start] == '_'
&& (tok_start + 1 >= tree->source_len
|| !((tree->source[tok_start + 1] >= 'a'
&& tree->source[tok_start + 1] <= 'z')
|| (tree->source[tok_start + 1] >= 'A'
&& tree->source[tok_start + 1] <= 'Z')
|| tree->source[tok_start + 1] == '_'
|| (tree->source[tok_start + 1] >= '0'
&& tree->source[tok_start + 1] <= '9')))) {
rl_components[i].tag = DC_DISCARD;
rl_components[i].inst = 0;
rl_components[i].src_node = 0;
continue;
}
}
// var/const declarations (AstGen.zig:3545-3607).
if (vtag == AST_NODE_GLOBAL_VAR_DECL || vtag == AST_NODE_LOCAL_VAR_DECL
|| vtag == AST_NODE_SIMPLE_VAR_DECL
|| vtag == AST_NODE_ALIGNED_VAR_DECL) {
AstData vnd = tree->nodes.datas[variable_node];
uint32_t mut_token = tree->nodes.main_tokens[variable_node];
bool var_is_const
= (tree->tokens.tags[mut_token] == TOKEN_KEYWORD_CONST);
bool this_comptime
= is_comptime || (var_is_const && value_is_comptime);
uint32_t type_node = vnd.lhs;
// AstGen.zig:3576-3607: typed vs inferred alloc.
if (type_node != 0) {
uint32_t type_inst = typeExpr(gz, scope, type_node);
ZirInstTag alloc_tag;
if (var_is_const)
alloc_tag = ZIR_INST_ALLOC;
else if (this_comptime)
alloc_tag = ZIR_INST_ALLOC_COMPTIME_MUT;
else
alloc_tag = ZIR_INST_ALLOC_MUT;
uint32_t ptr = addUnNode(gz, alloc_tag, type_inst, node);
rl_components[i].tag = DC_TYPED_PTR;
rl_components[i].inst = ptr;
rl_components[i].src_node = 0;
} else {
// Inferred alloc.
ZirInstTag alloc_tag;
if (var_is_const) {
alloc_tag = this_comptime
? ZIR_INST_ALLOC_INFERRED_COMPTIME
: ZIR_INST_ALLOC_INFERRED;
} else {
alloc_tag = this_comptime
? ZIR_INST_ALLOC_INFERRED_COMPTIME_MUT
: ZIR_INST_ALLOC_INFERRED_MUT;
}
uint32_t ptr = addNode(gz, alloc_tag, node);
rl_components[i].tag = DC_INFERRED_PTR;
rl_components[i].inst = ptr;
rl_components[i].src_node = 0;
}
continue;
}
// Lvalue expression (AstGen.zig:3609-3618).
any_lvalue_expr = true;
rl_components[i].tag = DC_TYPED_PTR;
rl_components[i].inst = 0; // will be filled in second pass
rl_components[i].src_node = variable_node;
}
// If comptime, wrap in sub-block (AstGen.zig:3627-3632).
GenZir gz_buf;
GenZir* inner_gz = gz;
if (has_comptime) {
gz_buf = makeSubBlock(gz, scope);
gz_buf.is_comptime = true;
inner_gz = &gz_buf;
}
// Second pass for lvalue expressions (AstGen.zig:3634-3642).
if (any_lvalue_expr) {
for (uint32_t i = 0; i < variable_count; i++) {
if (rl_components[i].tag != DC_TYPED_PTR)
continue;
AstNodeTag vtag = tree->nodes.tags[variables[i]];
if (vtag == AST_NODE_GLOBAL_VAR_DECL
|| vtag == AST_NODE_LOCAL_VAR_DECL
|| vtag == AST_NODE_SIMPLE_VAR_DECL
|| vtag == AST_NODE_ALIGNED_VAR_DECL)
continue;
rl_components[i].inst
= exprRl(inner_gz, scope, RL_REF_VAL, variables[i]);
}
}
// Evaluate RHS with destructure RL (AstGen.zig:3647-3652).
ResultLoc ds_rl;
memset(&ds_rl, 0, sizeof(ds_rl));
ds_rl.tag = RL_DESTRUCTURE;
ds_rl.src_node = node;
ds_rl.components = rl_components;
ds_rl.components_len = variable_count;
(void)exprRl(inner_gz, scope, ds_rl, value_expr);
// If comptime, finish block_comptime (AstGen.zig:3654-3660).
if (has_comptime) {
uint32_t comptime_block_inst
= makeBlockInst(ag, ZIR_INST_BLOCK_COMPTIME, gz, node);
addBreak(inner_gz, ZIR_INST_BREAK_INLINE, comptime_block_inst,
ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
setBlockComptimeBody(ag, inner_gz, comptime_block_inst,
COMPTIME_REASON_COMPTIME_KEYWORD);
gzAppendInstruction(gz, comptime_block_inst);
gzUnstack(inner_gz);
}
// Third pass: create scopes for declared variables (AstGen.zig:3664-3729).
Scope* cur_scope = scope;
for (uint32_t i = 0; i < variable_count; i++) {
uint32_t variable_node = variables[i];
AstNodeTag vtag = tree->nodes.tags[variable_node];
if (vtag != AST_NODE_LOCAL_VAR_DECL && vtag != AST_NODE_SIMPLE_VAR_DECL
&& vtag != AST_NODE_ALIGNED_VAR_DECL)
continue;
uint32_t mut_token = tree->nodes.main_tokens[variable_node];
bool var_is_const
= (tree->tokens.tags[mut_token] == TOKEN_KEYWORD_CONST);
uint32_t raw_ptr = rl_components[i].inst;
bool resolve_inferred = (rl_components[i].tag == DC_INFERRED_PTR);
// Resolve inferred alloc or make ptr const (AstGen.zig:3694-3700).
uint32_t final_ptr;
if (resolve_inferred)
final_ptr = addUnNode(
gz, ZIR_INST_RESOLVE_INFERRED_ALLOC, raw_ptr, variable_node);
else if (var_is_const)
final_ptr = addUnNode(gz, ZIR_INST_MAKE_PTR_CONST, raw_ptr, node);
else
final_ptr = raw_ptr;
// Create dbg_var_ptr (AstGen.zig:3710).
uint32_t name_token = mut_token + 1;
uint32_t ident_name = identAsString(ag, name_token);
addDbgVar(gz, ZIR_INST_DBG_VAR_PTR, ident_name, final_ptr);
// Create scope (AstGen.zig:3712-3722).
if (*ptr_idx < max_scopes) {
ptr_scopes[*ptr_idx] = (ScopeLocalPtr) {
.base = { .tag = SCOPE_LOCAL_PTR },
.parent = cur_scope,
.name = ident_name,
.ptr = final_ptr,
};
cur_scope = &ptr_scopes[*ptr_idx].base;
(*ptr_idx)++;
} else {
SET_ERROR(ag);
}
}
free(rl_components);
return cur_scope;
}
// --- assignOp (AstGen.zig:3731) --- // --- assignOp (AstGen.zig:3731) ---
// Handles compound assignment operators (+=, -=, *=, etc.). // Handles compound assignment operators (+=, -=, *=, etc.).
@@ -9289,6 +9851,8 @@ static void varDecl(GenZir* gz, Scope* scope, uint32_t node,
// Evaluate init with RL pointing to alloc (AstGen.zig:3313-3316). // Evaluate init with RL pointing to alloc (AstGen.zig:3313-3316).
ResultLoc init_rl; ResultLoc init_rl;
init_rl.components = NULL;
init_rl.components_len = 0;
if (type_node != 0) { if (type_node != 0) {
init_rl.tag = RL_PTR; init_rl.tag = RL_PTR;
init_rl.data = var_ptr; init_rl.data = var_ptr;
@@ -9361,6 +9925,8 @@ static void varDecl(GenZir* gz, Scope* scope, uint32_t node,
// Evaluate init with RL pointing to alloc (AstGen.zig:3395-3402). // Evaluate init with RL pointing to alloc (AstGen.zig:3395-3402).
ResultLoc var_init_rl; ResultLoc var_init_rl;
var_init_rl.components = NULL;
var_init_rl.components_len = 0;
if (type_node != 0) { if (type_node != 0) {
var_init_rl.tag = RL_PTR; var_init_rl.tag = RL_PTR;
var_init_rl.data = alloc_ref; var_init_rl.data = alloc_ref;
@@ -9645,6 +10211,12 @@ static void blockExprStmts(GenZir* gz, Scope* scope,
case AST_NODE_ASSIGN: case AST_NODE_ASSIGN:
assignStmt(gz, cur_scope, inner_node); assignStmt(gz, cur_scope, inner_node);
break; break;
// assign_destructure (AstGen.zig:2578).
case AST_NODE_ASSIGN_DESTRUCTURE:
cur_scope
= assignDestructureMaybeDecls(gz, cur_scope, inner_node,
val_scopes, &val_idx, ptr_scopes, &ptr_idx, 128);
break;
// Shift assignment operators (AstGen.zig:2585-2586). // Shift assignment operators (AstGen.zig:2585-2586).
case AST_NODE_ASSIGN_SHL: case AST_NODE_ASSIGN_SHL:
assignShift(gz, cur_scope, inner_node, ZIR_INST_SHL); assignShift(gz, cur_scope, inner_node, ZIR_INST_SHL);

View File

@@ -853,13 +853,13 @@ test "astgen: corpus astgen_test.zig" {
} }
test "astgen: corpus array_list.zig" { test "astgen: corpus array_list.zig" {
if (true) return error.SkipZigTest; // TODO: missing assign_destructure handler if (true) return error.SkipZigTest; // TODO: +2 ALLOC_MUT / -2 EXTENDED tag mismatch at [6639]
const gpa = std.testing.allocator; const gpa = std.testing.allocator;
try corpusCheck(gpa, @embedFile("../lib/std/array_list.zig")); try corpusCheck(gpa, @embedFile("../lib/std/array_list.zig"));
} }
test "astgen: corpus multi_array_list.zig" { test "astgen: corpus multi_array_list.zig" {
if (true) return error.SkipZigTest; // TODO: identifier resolution across namespace scopes if (true) return error.SkipZigTest; // TODO: parser bug - C parser produces nodes_len=1
const gpa = std.testing.allocator; const gpa = std.testing.allocator;
try corpusCheck(gpa, @embedFile("../lib/std/multi_array_list.zig")); try corpusCheck(gpa, @embedFile("../lib/std/multi_array_list.zig"));
} }