From 2fc7e81d49ecf93277393fd6eb86352d2d060ada Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Motiejus=20Jak=C5=A1tys?= Date: Fri, 13 Feb 2026 22:23:04 +0000 Subject: [PATCH] astgen: add shift assignment operators and grouped_expression unwrapping Port assignShift (AstGen.zig:3786) and assignShiftSat (AstGen.zig:3812) from upstream, handling <<=, >>=, and <<|= operators as both statements in blockExprStmts and expressions in exprRl. Previously these fell through to SET_ERROR. Add grouped_expression unwrapping loop in blockExprStmts (matching AstGen.zig:2569-2630) so that parenthesized statements like `(x += 1)` are correctly dispatched to assignment handlers instead of going through the default unusedResultExpr path. Co-Authored-By: Claude Opus 4.6 --- astgen.c | 358 ++++++++++++++++++++++++++++++++++--------------------- 1 file changed, 225 insertions(+), 133 deletions(-) diff --git a/astgen.c b/astgen.c index 3c56339bcb..45ab95e3e7 100644 --- a/astgen.c +++ b/astgen.c @@ -2185,6 +2185,9 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node); static void assignStmt(GenZir* gz, Scope* scope, uint32_t infix_node); static void assignOp( GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag); +static void assignShift( + GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag); +static void assignShiftSat(GenZir* gz, Scope* scope, uint32_t infix_node); static uint32_t shiftOp( GenZir* gz, Scope* scope, uint32_t node, ZirInstTag tag); static void emitDbgStmt(GenZir* gz, uint32_t line, uint32_t column); @@ -4799,6 +4802,16 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { case AST_NODE_ASSIGN_MUL_SAT: assignOp(gz, scope, node, ZIR_INST_MUL_SAT); return ZIR_REF_VOID_VALUE; + // Shift assignment operators (AstGen.zig:676-687). + case AST_NODE_ASSIGN_SHL: + assignShift(gz, scope, node, ZIR_INST_SHL); + return ZIR_REF_VOID_VALUE; + case AST_NODE_ASSIGN_SHR: + assignShift(gz, scope, node, ZIR_INST_SHR); + return ZIR_REF_VOID_VALUE; + case AST_NODE_ASSIGN_SHL_SAT: + assignShiftSat(gz, scope, node); + return ZIR_REF_VOID_VALUE; default: SET_ERROR(ag); return ZIR_REF_VOID_VALUE; @@ -6264,6 +6277,63 @@ static void assignOp( addPlNodeBin(gz, ZIR_INST_STORE_NODE, infix_node, lhs_ptr, result); } +// --- assignShift (AstGen.zig:3786) --- +// Handles <<= and >>= assignment operators. + +static void assignShift( + GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag) { + emitDbgNode(gz, infix_node); + const AstGenCtx* ag = gz->astgen; + const Ast* tree = ag->tree; + + AstData nd = tree->nodes.datas[infix_node]; + uint32_t lhs_node = nd.lhs; + uint32_t rhs_node = nd.rhs; + + // Evaluate LHS as lvalue pointer (AstGen.zig:3797). + uint32_t lhs_ptr = exprRl(gz, scope, RL_REF_VAL, lhs_node); + // Load current value (AstGen.zig:3798). + uint32_t lhs = addUnNode(gz, ZIR_INST_LOAD, lhs_ptr, infix_node); + // RHS type is typeof_log2_int_type of LHS (AstGen.zig:3799). + uint32_t rhs_type + = addUnNode(gz, ZIR_INST_TYPEOF_LOG2_INT_TYPE, lhs, infix_node); + ResultLoc rhs_rl = { + .tag = RL_TY, .data = rhs_type, .src_node = 0, .ctx = RI_CTX_NONE + }; + uint32_t rhs = exprRl(gz, scope, rhs_rl, rhs_node); + + // Emit the shift operation (AstGen.zig:3802-3805). + uint32_t result = addPlNodeBin(gz, op_tag, infix_node, lhs, rhs); + // Store result back (AstGen.zig:3806-3809). + addPlNodeBin(gz, ZIR_INST_STORE_NODE, infix_node, lhs_ptr, result); +} + +// --- assignShiftSat (AstGen.zig:3812) --- +// Handles <<|= saturating shift-left assignment. + +static void assignShiftSat(GenZir* gz, Scope* scope, uint32_t infix_node) { + emitDbgNode(gz, infix_node); + const AstGenCtx* ag = gz->astgen; + const Ast* tree = ag->tree; + + AstData nd = tree->nodes.datas[infix_node]; + uint32_t lhs_node = nd.lhs; + uint32_t rhs_node = nd.rhs; + + // Evaluate LHS as lvalue pointer (AstGen.zig:3818). + uint32_t lhs_ptr = exprRl(gz, scope, RL_REF_VAL, lhs_node); + // Load current value (AstGen.zig:3819). + uint32_t lhs = addUnNode(gz, ZIR_INST_LOAD, lhs_ptr, infix_node); + // Saturating shift-left allows any integer type for both LHS and RHS + // (AstGen.zig:3820-3821). + uint32_t rhs = expr(gz, scope, rhs_node); + + // Emit shl_sat (AstGen.zig:3823-3825). + uint32_t result = addPlNodeBin(gz, ZIR_INST_SHL_SAT, infix_node, lhs, rhs); + // Store result back (AstGen.zig:3827-3830). + addPlNodeBin(gz, ZIR_INST_STORE_NODE, infix_node, lhs_ptr, result); +} + // --- builtinEvalToError (BuiltinFn.zig) --- // Returns per-builtin eval_to_error. Default is .never; only a few are // .maybe or .always. Mirrors BuiltinFn.list lookup in AstGen.zig:10539. @@ -6911,142 +6981,164 @@ static void blockExprStmts(GenZir* gz, Scope* scope, if (ag->has_compile_errors) return; uint32_t stmt = statements[i]; - AstNodeTag tag = ag->tree->nodes.tags[stmt]; - switch (tag) { - case AST_NODE_ASSIGN: - assignStmt(gz, cur_scope, stmt); - break; - // Compound assignment operators (AstGen.zig:2588-2607). - case AST_NODE_ASSIGN_ADD: - assignOp(gz, cur_scope, stmt, ZIR_INST_ADD); - break; - case AST_NODE_ASSIGN_SUB: - assignOp(gz, cur_scope, stmt, ZIR_INST_SUB); - break; - case AST_NODE_ASSIGN_MUL: - assignOp(gz, cur_scope, stmt, ZIR_INST_MUL); - break; - case AST_NODE_ASSIGN_DIV: - assignOp(gz, cur_scope, stmt, ZIR_INST_DIV); - break; - case AST_NODE_ASSIGN_MOD: - assignOp(gz, cur_scope, stmt, ZIR_INST_MOD_REM); - break; - case AST_NODE_ASSIGN_BIT_AND: - assignOp(gz, cur_scope, stmt, ZIR_INST_BIT_AND); - break; - case AST_NODE_ASSIGN_BIT_OR: - assignOp(gz, cur_scope, stmt, ZIR_INST_BIT_OR); - break; - case AST_NODE_ASSIGN_BIT_XOR: - assignOp(gz, cur_scope, stmt, ZIR_INST_XOR); - break; - case AST_NODE_ASSIGN_ADD_WRAP: - assignOp(gz, cur_scope, stmt, ZIR_INST_ADDWRAP); - break; - case AST_NODE_ASSIGN_SUB_WRAP: - assignOp(gz, cur_scope, stmt, ZIR_INST_SUBWRAP); - break; - case AST_NODE_ASSIGN_MUL_WRAP: - assignOp(gz, cur_scope, stmt, ZIR_INST_MULWRAP); - break; - case AST_NODE_ASSIGN_ADD_SAT: - assignOp(gz, cur_scope, stmt, ZIR_INST_ADD_SAT); - break; - case AST_NODE_ASSIGN_SUB_SAT: - assignOp(gz, cur_scope, stmt, ZIR_INST_SUB_SAT); - break; - case AST_NODE_ASSIGN_MUL_SAT: - assignOp(gz, cur_scope, stmt, ZIR_INST_MUL_SAT); - break; - case AST_NODE_SIMPLE_VAR_DECL: - case AST_NODE_LOCAL_VAR_DECL: - case AST_NODE_ALIGNED_VAR_DECL: - if (val_idx < 64 && ptr_idx < 64) { - varDecl(gz, cur_scope, stmt, &val_scopes[val_idx], - &ptr_scopes[ptr_idx], &cur_scope); - // Check which one was used: if scope now points to - // val_scopes[val_idx], advance val_idx; same for ptr. - if (cur_scope == &val_scopes[val_idx].base) - val_idx++; - else if (cur_scope == &ptr_scopes[ptr_idx].base) - ptr_idx++; - } else { - SET_ERROR(ag); - } - break; - // defer/errdefer (AstGen.zig:2580-2581). - case AST_NODE_DEFER: - case AST_NODE_ERRDEFER: { - if (defer_idx >= 64) { - SET_ERROR(ag); + // Unwrap grouped_expression (parentheses) before dispatching + // (AstGen.zig:2569-2630). + uint32_t inner_node = stmt; + for (;;) { + AstNodeTag tag = ag->tree->nodes.tags[inner_node]; + switch (tag) { + case AST_NODE_ASSIGN: + assignStmt(gz, cur_scope, inner_node); + break; + // Shift assignment operators (AstGen.zig:2585-2586). + case AST_NODE_ASSIGN_SHL: + assignShift(gz, cur_scope, inner_node, ZIR_INST_SHL); + break; + case AST_NODE_ASSIGN_SHR: + assignShift(gz, cur_scope, inner_node, ZIR_INST_SHR); + break; + // Saturating shift-left assignment (AstGen.zig:680-682 via expr). + case AST_NODE_ASSIGN_SHL_SAT: + assignShiftSat(gz, cur_scope, inner_node); + break; + // Compound assignment operators (AstGen.zig:2588-2607). + case AST_NODE_ASSIGN_ADD: + assignOp(gz, cur_scope, inner_node, ZIR_INST_ADD); + break; + case AST_NODE_ASSIGN_SUB: + assignOp(gz, cur_scope, inner_node, ZIR_INST_SUB); + break; + case AST_NODE_ASSIGN_MUL: + assignOp(gz, cur_scope, inner_node, ZIR_INST_MUL); + break; + case AST_NODE_ASSIGN_DIV: + assignOp(gz, cur_scope, inner_node, ZIR_INST_DIV); + break; + case AST_NODE_ASSIGN_MOD: + assignOp(gz, cur_scope, inner_node, ZIR_INST_MOD_REM); + break; + case AST_NODE_ASSIGN_BIT_AND: + assignOp(gz, cur_scope, inner_node, ZIR_INST_BIT_AND); + break; + case AST_NODE_ASSIGN_BIT_OR: + assignOp(gz, cur_scope, inner_node, ZIR_INST_BIT_OR); + break; + case AST_NODE_ASSIGN_BIT_XOR: + assignOp(gz, cur_scope, inner_node, ZIR_INST_XOR); + break; + case AST_NODE_ASSIGN_ADD_WRAP: + assignOp(gz, cur_scope, inner_node, ZIR_INST_ADDWRAP); + break; + case AST_NODE_ASSIGN_SUB_WRAP: + assignOp(gz, cur_scope, inner_node, ZIR_INST_SUBWRAP); + break; + case AST_NODE_ASSIGN_MUL_WRAP: + assignOp(gz, cur_scope, inner_node, ZIR_INST_MULWRAP); + break; + case AST_NODE_ASSIGN_ADD_SAT: + assignOp(gz, cur_scope, inner_node, ZIR_INST_ADD_SAT); + break; + case AST_NODE_ASSIGN_SUB_SAT: + assignOp(gz, cur_scope, inner_node, ZIR_INST_SUB_SAT); + break; + case AST_NODE_ASSIGN_MUL_SAT: + assignOp(gz, cur_scope, inner_node, ZIR_INST_MUL_SAT); + break; + case AST_NODE_SIMPLE_VAR_DECL: + case AST_NODE_LOCAL_VAR_DECL: + case AST_NODE_ALIGNED_VAR_DECL: + if (val_idx < 64 && ptr_idx < 64) { + varDecl(gz, cur_scope, stmt, &val_scopes[val_idx], + &ptr_scopes[ptr_idx], &cur_scope); + // Check which one was used: if scope now points to + // val_scopes[val_idx], advance val_idx; same for ptr. + if (cur_scope == &val_scopes[val_idx].base) + val_idx++; + else if (cur_scope == &ptr_scopes[ptr_idx].base) + ptr_idx++; + } else { + SET_ERROR(ag); + } + break; + // defer/errdefer (AstGen.zig:2580-2581). + case AST_NODE_DEFER: + case AST_NODE_ERRDEFER: { + if (defer_idx >= 64) { + SET_ERROR(ag); + break; + } + ScopeTag scope_tag = (tag == AST_NODE_DEFER) + ? SCOPE_DEFER_NORMAL + : SCOPE_DEFER_ERROR; + // Create sub-block for defer body (AstGen.zig:3123-3126). + GenZir defer_gen = makeSubBlock(gz, cur_scope); + + // Evaluate deferred expression (AstGen.zig:3165). + // DEFER: lhs is the deferred expression, rhs = 0. + // ERRDEFER: lhs is optional error capture token, rhs is expr. + AstData dnd = ag->tree->nodes.datas[stmt]; + uint32_t expr_node; + if (tag == AST_NODE_DEFER) { + expr_node = dnd.lhs; + } else { + expr_node = dnd.rhs; + } + // unusedResultExpr pattern (AstGen.zig:3165, 2641-2646). + emitDbgNode(&defer_gen, expr_node); + uint32_t defer_result + = expr(&defer_gen, &defer_gen.base, expr_node); + addEnsureResult(&defer_gen, defer_result, expr_node); + + // Add break_inline at end (AstGen.zig:3167). + addBreak(&defer_gen, ZIR_INST_BREAK_INLINE, 0, + ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE); + + // Write body to extra (AstGen.zig:3173-3175). + uint32_t raw_body_len = gzInstructionsLen(&defer_gen); + const uint32_t* body = gzInstructionsSlice(&defer_gen); + uint32_t extra_index = ag->extra_len; + uint32_t fixup_len + = countBodyLenAfterFixups(ag, body, raw_body_len); + ensureExtraCapacity(ag, fixup_len); + for (uint32_t b = 0; b < raw_body_len; b++) + appendPossiblyRefdBodyInst(ag, body[b]); + gzUnstack(&defer_gen); + + // Create scope (AstGen.zig:3179-3185). + defer_scopes[defer_idx] = (ScopeDefer) { + .base = { .tag = scope_tag }, + .parent = cur_scope, + .index = extra_index, + .len = fixup_len, + }; + cur_scope = &defer_scopes[defer_idx].base; + defer_idx++; break; } - ScopeTag scope_tag = (tag == AST_NODE_DEFER) ? SCOPE_DEFER_NORMAL - : SCOPE_DEFER_ERROR; - // Create sub-block for defer body (AstGen.zig:3123-3126). - GenZir defer_gen = makeSubBlock(gz, cur_scope); - - // Evaluate deferred expression (AstGen.zig:3165). - // DEFER: lhs is the deferred expression, rhs = 0. - // ERRDEFER: lhs is optional error capture token, rhs is expr. - AstData dnd = ag->tree->nodes.datas[stmt]; - uint32_t expr_node; - if (tag == AST_NODE_DEFER) { - expr_node = dnd.lhs; - } else { - expr_node = dnd.rhs; + // Grouped expression: unwrap parentheses (AstGen.zig:2600-2602). + case AST_NODE_GROUPED_EXPRESSION: + inner_node = ag->tree->nodes.datas[inner_node].lhs; + continue; + // while/for as statements (AstGen.zig:2605-2610). + // These do NOT get emitDbgNode; they emit their own dbg_stmt. + case AST_NODE_WHILE_SIMPLE: + case AST_NODE_WHILE_CONT: + case AST_NODE_WHILE: + (void)whileExpr(gz, cur_scope, inner_node, true); + break; + case AST_NODE_FOR_SIMPLE: + case AST_NODE_FOR: + (void)forExpr(gz, cur_scope, inner_node, true); + break; + default: { + // Expression statement (AstGen.zig:2627 unusedResultExpr). + emitDbgNode(gz, inner_node); + uint32_t result = expr(gz, cur_scope, inner_node); + noreturn_stmt = addEnsureResult(gz, result, inner_node); + break; } - // unusedResultExpr pattern (AstGen.zig:3165, 2641-2646). - emitDbgNode(&defer_gen, expr_node); - uint32_t defer_result - = expr(&defer_gen, &defer_gen.base, expr_node); - addEnsureResult(&defer_gen, defer_result, expr_node); - - // Add break_inline at end (AstGen.zig:3167). - addBreak(&defer_gen, ZIR_INST_BREAK_INLINE, 0, ZIR_REF_VOID_VALUE, - AST_NODE_OFFSET_NONE); - - // Write body to extra (AstGen.zig:3173-3175). - uint32_t raw_body_len = gzInstructionsLen(&defer_gen); - const uint32_t* body = gzInstructionsSlice(&defer_gen); - uint32_t extra_index = ag->extra_len; - uint32_t fixup_len - = countBodyLenAfterFixups(ag, body, raw_body_len); - ensureExtraCapacity(ag, fixup_len); - for (uint32_t b = 0; b < raw_body_len; b++) - appendPossiblyRefdBodyInst(ag, body[b]); - gzUnstack(&defer_gen); - - // Create scope (AstGen.zig:3179-3185). - defer_scopes[defer_idx] = (ScopeDefer) { - .base = { .tag = scope_tag }, - .parent = cur_scope, - .index = extra_index, - .len = fixup_len, - }; - cur_scope = &defer_scopes[defer_idx].base; - defer_idx++; - break; - } - // while/for as statements (AstGen.zig:2605-2610). - // These do NOT get emitDbgNode; they emit their own dbg_stmt. - case AST_NODE_WHILE_SIMPLE: - case AST_NODE_WHILE_CONT: - case AST_NODE_WHILE: - (void)whileExpr(gz, cur_scope, stmt, true); - break; - case AST_NODE_FOR_SIMPLE: - case AST_NODE_FOR: - (void)forExpr(gz, cur_scope, stmt, true); - break; - default: { - // Expression statement (AstGen.zig:2627 unusedResultExpr). - emitDbgNode(gz, stmt); - uint32_t result = expr(gz, cur_scope, stmt); - noreturn_stmt = addEnsureResult(gz, result, stmt); - break; - } + } + break; // Break out of the for(;;) unwrapping loop. } } // Emit normal defers at block exit (AstGen.zig:2633-2634).