astgen: add shift assignment operators and grouped_expression unwrapping

Port assignShift (AstGen.zig:3786) and assignShiftSat (AstGen.zig:3812)
from upstream, handling <<=, >>=, and <<|= operators as both statements
in blockExprStmts and expressions in exprRl. Previously these fell
through to SET_ERROR.

Add grouped_expression unwrapping loop in blockExprStmts (matching
AstGen.zig:2569-2630) so that parenthesized statements like `(x += 1)`
are correctly dispatched to assignment handlers instead of going through
the default unusedResultExpr path.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-13 22:23:04 +00:00
parent f54d3f94a3
commit 2fc7e81d49

358
astgen.c
View File

@@ -2185,6 +2185,9 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node);
static void assignStmt(GenZir* gz, Scope* scope, uint32_t infix_node); static void assignStmt(GenZir* gz, Scope* scope, uint32_t infix_node);
static void assignOp( static void assignOp(
GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag); GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag);
static void assignShift(
GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag);
static void assignShiftSat(GenZir* gz, Scope* scope, uint32_t infix_node);
static uint32_t shiftOp( static uint32_t shiftOp(
GenZir* gz, Scope* scope, uint32_t node, ZirInstTag tag); GenZir* gz, Scope* scope, uint32_t node, ZirInstTag tag);
static void emitDbgStmt(GenZir* gz, uint32_t line, uint32_t column); static void emitDbgStmt(GenZir* gz, uint32_t line, uint32_t column);
@@ -4799,6 +4802,16 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
case AST_NODE_ASSIGN_MUL_SAT: case AST_NODE_ASSIGN_MUL_SAT:
assignOp(gz, scope, node, ZIR_INST_MUL_SAT); assignOp(gz, scope, node, ZIR_INST_MUL_SAT);
return ZIR_REF_VOID_VALUE; return ZIR_REF_VOID_VALUE;
// Shift assignment operators (AstGen.zig:676-687).
case AST_NODE_ASSIGN_SHL:
assignShift(gz, scope, node, ZIR_INST_SHL);
return ZIR_REF_VOID_VALUE;
case AST_NODE_ASSIGN_SHR:
assignShift(gz, scope, node, ZIR_INST_SHR);
return ZIR_REF_VOID_VALUE;
case AST_NODE_ASSIGN_SHL_SAT:
assignShiftSat(gz, scope, node);
return ZIR_REF_VOID_VALUE;
default: default:
SET_ERROR(ag); SET_ERROR(ag);
return ZIR_REF_VOID_VALUE; return ZIR_REF_VOID_VALUE;
@@ -6264,6 +6277,63 @@ static void assignOp(
addPlNodeBin(gz, ZIR_INST_STORE_NODE, infix_node, lhs_ptr, result); addPlNodeBin(gz, ZIR_INST_STORE_NODE, infix_node, lhs_ptr, result);
} }
// --- assignShift (AstGen.zig:3786) ---
// Handles <<= and >>= assignment operators.
static void assignShift(
GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag) {
emitDbgNode(gz, infix_node);
const AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstData nd = tree->nodes.datas[infix_node];
uint32_t lhs_node = nd.lhs;
uint32_t rhs_node = nd.rhs;
// Evaluate LHS as lvalue pointer (AstGen.zig:3797).
uint32_t lhs_ptr = exprRl(gz, scope, RL_REF_VAL, lhs_node);
// Load current value (AstGen.zig:3798).
uint32_t lhs = addUnNode(gz, ZIR_INST_LOAD, lhs_ptr, infix_node);
// RHS type is typeof_log2_int_type of LHS (AstGen.zig:3799).
uint32_t rhs_type
= addUnNode(gz, ZIR_INST_TYPEOF_LOG2_INT_TYPE, lhs, infix_node);
ResultLoc rhs_rl = {
.tag = RL_TY, .data = rhs_type, .src_node = 0, .ctx = RI_CTX_NONE
};
uint32_t rhs = exprRl(gz, scope, rhs_rl, rhs_node);
// Emit the shift operation (AstGen.zig:3802-3805).
uint32_t result = addPlNodeBin(gz, op_tag, infix_node, lhs, rhs);
// Store result back (AstGen.zig:3806-3809).
addPlNodeBin(gz, ZIR_INST_STORE_NODE, infix_node, lhs_ptr, result);
}
// --- assignShiftSat (AstGen.zig:3812) ---
// Handles <<|= saturating shift-left assignment.
static void assignShiftSat(GenZir* gz, Scope* scope, uint32_t infix_node) {
emitDbgNode(gz, infix_node);
const AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstData nd = tree->nodes.datas[infix_node];
uint32_t lhs_node = nd.lhs;
uint32_t rhs_node = nd.rhs;
// Evaluate LHS as lvalue pointer (AstGen.zig:3818).
uint32_t lhs_ptr = exprRl(gz, scope, RL_REF_VAL, lhs_node);
// Load current value (AstGen.zig:3819).
uint32_t lhs = addUnNode(gz, ZIR_INST_LOAD, lhs_ptr, infix_node);
// Saturating shift-left allows any integer type for both LHS and RHS
// (AstGen.zig:3820-3821).
uint32_t rhs = expr(gz, scope, rhs_node);
// Emit shl_sat (AstGen.zig:3823-3825).
uint32_t result = addPlNodeBin(gz, ZIR_INST_SHL_SAT, infix_node, lhs, rhs);
// Store result back (AstGen.zig:3827-3830).
addPlNodeBin(gz, ZIR_INST_STORE_NODE, infix_node, lhs_ptr, result);
}
// --- builtinEvalToError (BuiltinFn.zig) --- // --- builtinEvalToError (BuiltinFn.zig) ---
// Returns per-builtin eval_to_error. Default is .never; only a few are // Returns per-builtin eval_to_error. Default is .never; only a few are
// .maybe or .always. Mirrors BuiltinFn.list lookup in AstGen.zig:10539. // .maybe or .always. Mirrors BuiltinFn.list lookup in AstGen.zig:10539.
@@ -6911,142 +6981,164 @@ static void blockExprStmts(GenZir* gz, Scope* scope,
if (ag->has_compile_errors) if (ag->has_compile_errors)
return; return;
uint32_t stmt = statements[i]; uint32_t stmt = statements[i];
AstNodeTag tag = ag->tree->nodes.tags[stmt]; // Unwrap grouped_expression (parentheses) before dispatching
switch (tag) { // (AstGen.zig:2569-2630).
case AST_NODE_ASSIGN: uint32_t inner_node = stmt;
assignStmt(gz, cur_scope, stmt); for (;;) {
break; AstNodeTag tag = ag->tree->nodes.tags[inner_node];
// Compound assignment operators (AstGen.zig:2588-2607). switch (tag) {
case AST_NODE_ASSIGN_ADD: case AST_NODE_ASSIGN:
assignOp(gz, cur_scope, stmt, ZIR_INST_ADD); assignStmt(gz, cur_scope, inner_node);
break; break;
case AST_NODE_ASSIGN_SUB: // Shift assignment operators (AstGen.zig:2585-2586).
assignOp(gz, cur_scope, stmt, ZIR_INST_SUB); case AST_NODE_ASSIGN_SHL:
break; assignShift(gz, cur_scope, inner_node, ZIR_INST_SHL);
case AST_NODE_ASSIGN_MUL: break;
assignOp(gz, cur_scope, stmt, ZIR_INST_MUL); case AST_NODE_ASSIGN_SHR:
break; assignShift(gz, cur_scope, inner_node, ZIR_INST_SHR);
case AST_NODE_ASSIGN_DIV: break;
assignOp(gz, cur_scope, stmt, ZIR_INST_DIV); // Saturating shift-left assignment (AstGen.zig:680-682 via expr).
break; case AST_NODE_ASSIGN_SHL_SAT:
case AST_NODE_ASSIGN_MOD: assignShiftSat(gz, cur_scope, inner_node);
assignOp(gz, cur_scope, stmt, ZIR_INST_MOD_REM); break;
break; // Compound assignment operators (AstGen.zig:2588-2607).
case AST_NODE_ASSIGN_BIT_AND: case AST_NODE_ASSIGN_ADD:
assignOp(gz, cur_scope, stmt, ZIR_INST_BIT_AND); assignOp(gz, cur_scope, inner_node, ZIR_INST_ADD);
break; break;
case AST_NODE_ASSIGN_BIT_OR: case AST_NODE_ASSIGN_SUB:
assignOp(gz, cur_scope, stmt, ZIR_INST_BIT_OR); assignOp(gz, cur_scope, inner_node, ZIR_INST_SUB);
break; break;
case AST_NODE_ASSIGN_BIT_XOR: case AST_NODE_ASSIGN_MUL:
assignOp(gz, cur_scope, stmt, ZIR_INST_XOR); assignOp(gz, cur_scope, inner_node, ZIR_INST_MUL);
break; break;
case AST_NODE_ASSIGN_ADD_WRAP: case AST_NODE_ASSIGN_DIV:
assignOp(gz, cur_scope, stmt, ZIR_INST_ADDWRAP); assignOp(gz, cur_scope, inner_node, ZIR_INST_DIV);
break; break;
case AST_NODE_ASSIGN_SUB_WRAP: case AST_NODE_ASSIGN_MOD:
assignOp(gz, cur_scope, stmt, ZIR_INST_SUBWRAP); assignOp(gz, cur_scope, inner_node, ZIR_INST_MOD_REM);
break; break;
case AST_NODE_ASSIGN_MUL_WRAP: case AST_NODE_ASSIGN_BIT_AND:
assignOp(gz, cur_scope, stmt, ZIR_INST_MULWRAP); assignOp(gz, cur_scope, inner_node, ZIR_INST_BIT_AND);
break; break;
case AST_NODE_ASSIGN_ADD_SAT: case AST_NODE_ASSIGN_BIT_OR:
assignOp(gz, cur_scope, stmt, ZIR_INST_ADD_SAT); assignOp(gz, cur_scope, inner_node, ZIR_INST_BIT_OR);
break; break;
case AST_NODE_ASSIGN_SUB_SAT: case AST_NODE_ASSIGN_BIT_XOR:
assignOp(gz, cur_scope, stmt, ZIR_INST_SUB_SAT); assignOp(gz, cur_scope, inner_node, ZIR_INST_XOR);
break; break;
case AST_NODE_ASSIGN_MUL_SAT: case AST_NODE_ASSIGN_ADD_WRAP:
assignOp(gz, cur_scope, stmt, ZIR_INST_MUL_SAT); assignOp(gz, cur_scope, inner_node, ZIR_INST_ADDWRAP);
break; break;
case AST_NODE_SIMPLE_VAR_DECL: case AST_NODE_ASSIGN_SUB_WRAP:
case AST_NODE_LOCAL_VAR_DECL: assignOp(gz, cur_scope, inner_node, ZIR_INST_SUBWRAP);
case AST_NODE_ALIGNED_VAR_DECL: break;
if (val_idx < 64 && ptr_idx < 64) { case AST_NODE_ASSIGN_MUL_WRAP:
varDecl(gz, cur_scope, stmt, &val_scopes[val_idx], assignOp(gz, cur_scope, inner_node, ZIR_INST_MULWRAP);
&ptr_scopes[ptr_idx], &cur_scope); break;
// Check which one was used: if scope now points to case AST_NODE_ASSIGN_ADD_SAT:
// val_scopes[val_idx], advance val_idx; same for ptr. assignOp(gz, cur_scope, inner_node, ZIR_INST_ADD_SAT);
if (cur_scope == &val_scopes[val_idx].base) break;
val_idx++; case AST_NODE_ASSIGN_SUB_SAT:
else if (cur_scope == &ptr_scopes[ptr_idx].base) assignOp(gz, cur_scope, inner_node, ZIR_INST_SUB_SAT);
ptr_idx++; break;
} else { case AST_NODE_ASSIGN_MUL_SAT:
SET_ERROR(ag); assignOp(gz, cur_scope, inner_node, ZIR_INST_MUL_SAT);
} break;
break; case AST_NODE_SIMPLE_VAR_DECL:
// defer/errdefer (AstGen.zig:2580-2581). case AST_NODE_LOCAL_VAR_DECL:
case AST_NODE_DEFER: case AST_NODE_ALIGNED_VAR_DECL:
case AST_NODE_ERRDEFER: { if (val_idx < 64 && ptr_idx < 64) {
if (defer_idx >= 64) { varDecl(gz, cur_scope, stmt, &val_scopes[val_idx],
SET_ERROR(ag); &ptr_scopes[ptr_idx], &cur_scope);
// Check which one was used: if scope now points to
// val_scopes[val_idx], advance val_idx; same for ptr.
if (cur_scope == &val_scopes[val_idx].base)
val_idx++;
else if (cur_scope == &ptr_scopes[ptr_idx].base)
ptr_idx++;
} else {
SET_ERROR(ag);
}
break;
// defer/errdefer (AstGen.zig:2580-2581).
case AST_NODE_DEFER:
case AST_NODE_ERRDEFER: {
if (defer_idx >= 64) {
SET_ERROR(ag);
break;
}
ScopeTag scope_tag = (tag == AST_NODE_DEFER)
? SCOPE_DEFER_NORMAL
: SCOPE_DEFER_ERROR;
// Create sub-block for defer body (AstGen.zig:3123-3126).
GenZir defer_gen = makeSubBlock(gz, cur_scope);
// Evaluate deferred expression (AstGen.zig:3165).
// DEFER: lhs is the deferred expression, rhs = 0.
// ERRDEFER: lhs is optional error capture token, rhs is expr.
AstData dnd = ag->tree->nodes.datas[stmt];
uint32_t expr_node;
if (tag == AST_NODE_DEFER) {
expr_node = dnd.lhs;
} else {
expr_node = dnd.rhs;
}
// unusedResultExpr pattern (AstGen.zig:3165, 2641-2646).
emitDbgNode(&defer_gen, expr_node);
uint32_t defer_result
= expr(&defer_gen, &defer_gen.base, expr_node);
addEnsureResult(&defer_gen, defer_result, expr_node);
// Add break_inline at end (AstGen.zig:3167).
addBreak(&defer_gen, ZIR_INST_BREAK_INLINE, 0,
ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
// Write body to extra (AstGen.zig:3173-3175).
uint32_t raw_body_len = gzInstructionsLen(&defer_gen);
const uint32_t* body = gzInstructionsSlice(&defer_gen);
uint32_t extra_index = ag->extra_len;
uint32_t fixup_len
= countBodyLenAfterFixups(ag, body, raw_body_len);
ensureExtraCapacity(ag, fixup_len);
for (uint32_t b = 0; b < raw_body_len; b++)
appendPossiblyRefdBodyInst(ag, body[b]);
gzUnstack(&defer_gen);
// Create scope (AstGen.zig:3179-3185).
defer_scopes[defer_idx] = (ScopeDefer) {
.base = { .tag = scope_tag },
.parent = cur_scope,
.index = extra_index,
.len = fixup_len,
};
cur_scope = &defer_scopes[defer_idx].base;
defer_idx++;
break; break;
} }
ScopeTag scope_tag = (tag == AST_NODE_DEFER) ? SCOPE_DEFER_NORMAL // Grouped expression: unwrap parentheses (AstGen.zig:2600-2602).
: SCOPE_DEFER_ERROR; case AST_NODE_GROUPED_EXPRESSION:
// Create sub-block for defer body (AstGen.zig:3123-3126). inner_node = ag->tree->nodes.datas[inner_node].lhs;
GenZir defer_gen = makeSubBlock(gz, cur_scope); continue;
// while/for as statements (AstGen.zig:2605-2610).
// Evaluate deferred expression (AstGen.zig:3165). // These do NOT get emitDbgNode; they emit their own dbg_stmt.
// DEFER: lhs is the deferred expression, rhs = 0. case AST_NODE_WHILE_SIMPLE:
// ERRDEFER: lhs is optional error capture token, rhs is expr. case AST_NODE_WHILE_CONT:
AstData dnd = ag->tree->nodes.datas[stmt]; case AST_NODE_WHILE:
uint32_t expr_node; (void)whileExpr(gz, cur_scope, inner_node, true);
if (tag == AST_NODE_DEFER) { break;
expr_node = dnd.lhs; case AST_NODE_FOR_SIMPLE:
} else { case AST_NODE_FOR:
expr_node = dnd.rhs; (void)forExpr(gz, cur_scope, inner_node, true);
break;
default: {
// Expression statement (AstGen.zig:2627 unusedResultExpr).
emitDbgNode(gz, inner_node);
uint32_t result = expr(gz, cur_scope, inner_node);
noreturn_stmt = addEnsureResult(gz, result, inner_node);
break;
} }
// unusedResultExpr pattern (AstGen.zig:3165, 2641-2646). }
emitDbgNode(&defer_gen, expr_node); break; // Break out of the for(;;) unwrapping loop.
uint32_t defer_result
= expr(&defer_gen, &defer_gen.base, expr_node);
addEnsureResult(&defer_gen, defer_result, expr_node);
// Add break_inline at end (AstGen.zig:3167).
addBreak(&defer_gen, ZIR_INST_BREAK_INLINE, 0, ZIR_REF_VOID_VALUE,
AST_NODE_OFFSET_NONE);
// Write body to extra (AstGen.zig:3173-3175).
uint32_t raw_body_len = gzInstructionsLen(&defer_gen);
const uint32_t* body = gzInstructionsSlice(&defer_gen);
uint32_t extra_index = ag->extra_len;
uint32_t fixup_len
= countBodyLenAfterFixups(ag, body, raw_body_len);
ensureExtraCapacity(ag, fixup_len);
for (uint32_t b = 0; b < raw_body_len; b++)
appendPossiblyRefdBodyInst(ag, body[b]);
gzUnstack(&defer_gen);
// Create scope (AstGen.zig:3179-3185).
defer_scopes[defer_idx] = (ScopeDefer) {
.base = { .tag = scope_tag },
.parent = cur_scope,
.index = extra_index,
.len = fixup_len,
};
cur_scope = &defer_scopes[defer_idx].base;
defer_idx++;
break;
}
// while/for as statements (AstGen.zig:2605-2610).
// These do NOT get emitDbgNode; they emit their own dbg_stmt.
case AST_NODE_WHILE_SIMPLE:
case AST_NODE_WHILE_CONT:
case AST_NODE_WHILE:
(void)whileExpr(gz, cur_scope, stmt, true);
break;
case AST_NODE_FOR_SIMPLE:
case AST_NODE_FOR:
(void)forExpr(gz, cur_scope, stmt, true);
break;
default: {
// Expression statement (AstGen.zig:2627 unusedResultExpr).
emitDbgNode(gz, stmt);
uint32_t result = expr(gz, cur_scope, stmt);
noreturn_stmt = addEnsureResult(gz, result, stmt);
break;
}
} }
} }
// Emit normal defers at block exit (AstGen.zig:2633-2634). // Emit normal defers at block exit (AstGen.zig:2633-2634).