diff --git a/stage0/astgen.c b/stage0/astgen.c index c3e09c2a16..df82cb13e5 100644 --- a/stage0/astgen.c +++ b/stage0/astgen.c @@ -2351,6 +2351,9 @@ static uint32_t arrayInitDotExpr( GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node); static uint32_t switchExpr( GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node); +static uint32_t switchExprErrUnion(GenZir* gz, Scope* scope, ResultLoc rl, + uint32_t catch_or_if_node, + int node_ty); // 0=catch, 1=if static uint32_t whileExpr( GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node, bool is_statement); #define EVAL_TO_ERROR_NEVER 0 @@ -3128,12 +3131,15 @@ decl_table: // Decl table lookup (AstGen.zig:8462-8520). for (uint32_t i = 0; i < ag->decl_table_len; i++) { if (ag->decl_names[i] == name_str) { - ZirInstTag itag - = (RL_IS_REF(rl)) ? ZIR_INST_DECL_REF : ZIR_INST_DECL_VAL; ZirInstData data; data.str_tok.start = name_str; data.str_tok.src_tok = tokenIndexToRelative(gz, ident_token); - return addInstruction(gz, itag, data); + if (RL_IS_REF(rl)) { + return addInstruction(gz, ZIR_INST_DECL_REF, data); + } else { + uint32_t result = addInstruction(gz, ZIR_INST_DECL_VAL, data); + return rvalueNoCoercePreRef(gz, rl, result, node); + } } } @@ -4973,6 +4979,29 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { uint32_t pt = (ag->tree->tokens.tags[catch_token + 1] == TOKEN_PIPE) ? catch_token + 2 : UINT32_MAX; + // Detect catch |err| switch(err) { ... } pattern + // (AstGen.zig:1023-1029). + if (pt != UINT32_MAX) { + AstData catch_data = ag->tree->nodes.datas[node]; + uint32_t rhs = catch_data.rhs; + AstNodeTag rhs_tag = ag->tree->nodes.tags[rhs]; + if ((rhs_tag == AST_NODE_SWITCH + || rhs_tag == AST_NODE_SWITCH_COMMA) + && ag->tree->tokens.tags[ag->tree->nodes.main_tokens[rhs]] + == TOKEN_KEYWORD_SWITCH) { + // Check switch condition is identifier matching capture. + uint32_t cond = ag->tree->nodes.datas[rhs].lhs; + if (ag->tree->nodes.tags[cond] == AST_NODE_IDENTIFIER + && tokenIdentEql( + ag->tree, pt, ag->tree->nodes.main_tokens[cond])) { + // Apply ri.br(): convert coerced_ty to ty. + ResultLoc brl = rl; + if (brl.tag == RL_COERCED_TY) + brl.tag = RL_TY; + return switchExprErrUnion(gz, scope, brl, node, 0); + } + } + } if (RL_IS_REF(rl)) { return orelseCatchExpr(gz, scope, rl, node, ZIR_INST_IS_NON_ERR_PTR, ZIR_INST_ERR_UNION_PAYLOAD_UNSAFE_PTR, @@ -4997,8 +5026,41 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { return arrayInitDotExpr(gz, scope, rl, node); // if (AstGen.zig:1013-1024). case AST_NODE_IF_SIMPLE: - case AST_NODE_IF: + case AST_NODE_IF: { + // Detect switchExprErrUnion pattern: if (x) |v| { ... } else |err| + // switch (err) { ... } (AstGen.zig:862-871). + if (tag == AST_NODE_IF) { + uint32_t then_node_if = ag->tree->extra_data.arr[nd.rhs]; + uint32_t else_node_if = ag->tree->extra_data.arr[nd.rhs + 1]; + if (else_node_if != 0) { + uint32_t else_tok = lastToken(ag->tree, then_node_if) + 1; + uint32_t err_tok = 0; + if (else_tok + 1 < ag->tree->tokens.len + && ag->tree->tokens.tags[else_tok + 1] == TOKEN_PIPE) { + err_tok = else_tok + 2; + } + if (err_tok != 0) { + AstNodeTag else_tag = ag->tree->nodes.tags[else_node_if]; + if ((else_tag == AST_NODE_SWITCH + || else_tag == AST_NODE_SWITCH_COMMA) + && ag->tree->tokens.tags[ag->tree->nodes + .main_tokens[else_node_if]] + == TOKEN_KEYWORD_SWITCH) { + uint32_t sw_cond + = ag->tree->nodes.datas[else_node_if].lhs; + if (ag->tree->nodes.tags[sw_cond] + == AST_NODE_IDENTIFIER + && tokenIdentEql(ag->tree, err_tok, + ag->tree->nodes.main_tokens[sw_cond])) { + return switchExprErrUnion( + gz, scope, rlBr(rl), node, 1); + } + } + } + } + } return ifExpr(gz, scope, rlBr(rl), node); + } // for (AstGen.zig:1043-1060). case AST_NODE_FOR_SIMPLE: case AST_NODE_FOR: @@ -5842,11 +5904,13 @@ static uint32_t ifExpr(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { : ZIR_INST_ERR_UNION_CODE; uint32_t err_inst = addUnNode(&else_scope, err_tag, cond_inst, cond_node); + // identAsString must be called before the underscore check, + // matching upstream order (AstGen.zig:6459-6462). + uint32_t err_name = identAsString(ag, error_token); if (tokenIsUnderscore(tree, error_token)) { // Discard |_| (AstGen.zig:6461-6462). // else_sub_scope stays as &else_scope.base } else { - uint32_t err_name = identAsString(ag, error_token); error_val_scope = (ScopeLocalVal) { .base = { .tag = SCOPE_LOCAL_VAL }, .parent = &else_scope.base, @@ -6837,6 +6901,504 @@ static uint32_t switchCaseValues(const Ast* tree, uint32_t case_node, } } +// --- switchExprErrUnion (AstGen.zig:7127-7623) --- +// Handles `catch |err| switch(err) { ... }` and +// `if (x) |v| { ... } else |err| switch (err) { ... }` optimization. +// Emits ZIR_INST_SWITCH_BLOCK_ERR_UNION instead of separate catch/if + switch. +// node_ty: 0=catch, 1=if. +static uint32_t switchExprErrUnion(GenZir* parent_gz, Scope* scope, + ResultLoc rl, uint32_t catch_or_if_node, int node_ty) { + AstGenCtx* ag = parent_gz->astgen; + const Ast* tree = ag->tree; + AstData cnd = tree->nodes.datas[catch_or_if_node]; + + // Extract switch_node, operand_node, error_payload based on node_ty + // (AstGen.zig:7138-7154). + uint32_t switch_node, operand_node, error_payload; + // For `if` case, we also need payload_token and then_node. + uint32_t if_payload_token = 0; // 0 = no payload + uint32_t if_then_node = 0; + if (node_ty == 0) { + // catch: rhs=switch, lhs=operand, main_token=catch keyword + switch_node = cnd.rhs; + operand_node = cnd.lhs; + uint32_t catch_token = tree->nodes.main_tokens[catch_or_if_node]; + error_payload = catch_token + 2; // token after `catch |` + } else { + // if: parse fullIf structure (AstGen.zig:7138-7154). + operand_node = cnd.lhs; + if_then_node = tree->extra_data.arr[cnd.rhs]; + uint32_t else_node = tree->extra_data.arr[cnd.rhs + 1]; + switch_node = else_node; + // Compute error_token. + uint32_t else_tok = lastToken(tree, if_then_node) + 1; + error_payload = else_tok + 2; // else |err| => err is at else_tok+2 + // Compute payload_token (if (cond) |val|). + uint32_t last_cond_tok = lastToken(tree, operand_node); + uint32_t pipe_tok = last_cond_tok + 2; + if (pipe_tok < tree->tokens.len + && tree->tokens.tags[pipe_tok] == TOKEN_PIPE) { + if_payload_token = pipe_tok + 1; + } + } + + // Parse switch: lhs=condition, rhs=extra index for SubRange. + AstData snd = tree->nodes.datas[switch_node]; + uint32_t sw_extra = snd.rhs; + uint32_t cases_start = tree->extra_data.arr[sw_extra]; + uint32_t cases_end = tree->extra_data.arr[sw_extra + 1]; + const uint32_t* case_nodes_arr = tree->extra_data.arr + cases_start; + uint32_t case_count = cases_end - cases_start; + + bool do_err_trace = (ag->fn_ret_ty != 0); + bool need_rl = nodesNeedRlContains(ag, catch_or_if_node); + ResultLoc break_rl + = breakResultInfo(parent_gz, rl, catch_or_if_node, need_rl); + bool need_result_rvalue = (break_rl.tag != rl.tag); + + // payload_is_ref (AstGen.zig:7168-7171). + bool payload_is_ref; + if (node_ty == 1) { + payload_is_ref = (if_payload_token != 0 + && tree->tokens.tags[if_payload_token] == TOKEN_ASTERISK); + } else { + payload_is_ref = (rl.tag == RL_REF || rl.tag == RL_REF_COERCED_TY); + } + + // First pass: categorize cases (AstGen.zig:7173-7229). + uint32_t scalar_cases_len = 0; + uint32_t multi_cases_len = 0; + bool has_else = false; + uint32_t else_ci = UINT32_MAX; // index into case_nodes_arr + + for (uint32_t ci = 0; ci < case_count; ci++) { + uint32_t cn = case_nodes_arr[ci]; + uint32_t single_buf; + const uint32_t* values; + uint32_t values_len = switchCaseValues(tree, cn, &single_buf, &values); + + if (values_len == 0) { + has_else = true; + else_ci = ci; + continue; + } + + if (values_len == 1 + && tree->nodes.tags[values[0]] != AST_NODE_SWITCH_RANGE) { + scalar_cases_len++; + } else { + multi_cases_len++; + } + } + + // Operand rl (AstGen.zig:7231-7234). + ResultLoc operand_rl = payload_is_ref ? RL_REF_VAL : RL_NONE_VAL; + operand_rl.ctx = RI_CTX_ERROR_HANDLING_EXPR; + + // Save operand source location (AstGen.zig:7236-7237). + advanceSourceCursorToNode(ag, operand_node); + uint32_t operand_lc_line = ag->source_line - parent_gz->decl_line; + uint32_t operand_lc_col = ag->source_column; + + uint32_t raw_operand = reachableExpr( + parent_gz, scope, operand_rl, operand_node, switch_node); + + // --- Payload buffer --- + // Table: [non_error_prong] [else?] [scalar_0..N] [multi_0..N] + uint32_t case_table_start = 0; + uint32_t scalar_case_table = case_table_start + 1 + (has_else ? 1u : 0u); + uint32_t multi_case_table = scalar_case_table + scalar_cases_len; + uint32_t case_table_end = multi_case_table + multi_cases_len; + + uint32_t pay_cap = case_table_end + case_count * 16; + if (pay_cap < 64) + pay_cap = 64; + uint32_t* pay = malloc(pay_cap * sizeof(uint32_t)); + if (!pay) + abort(); + uint32_t pay_len = case_table_end; + + GenZir block_scope = makeSubBlock(parent_gz, scope); + block_scope.instructions_top = UINT32_MAX; // unstacked + block_scope.break_result_info = break_rl; + + // dbg_stmt before switch_block_err_union (AstGen.zig:7249). + emitDbgStmtForceCurrentIndex(parent_gz, operand_lc_line, operand_lc_col); + + uint32_t switch_inst = makeBlockInst( + ag, ZIR_INST_SWITCH_BLOCK_ERR_UNION, parent_gz, switch_node); + + GenZir case_scope = makeSubBlock(parent_gz, &block_scope.base); + + // --- Non-error prong (AstGen.zig:7255-7391) --- + { + uint32_t body_len_index = pay_len; + pay[case_table_start] = body_len_index; + ensurePayCapacity(&pay, &pay_cap, pay_len, 1); + pay_len = body_len_index + 1; // body_len slot + + case_scope.instructions_top = ag->scratch_inst_len; + + ZirInstTag unwrap_tag = payload_is_ref + ? ZIR_INST_ERR_UNION_PAYLOAD_UNSAFE_PTR + : ZIR_INST_ERR_UNION_PAYLOAD_UNSAFE; + uint32_t unwrapped = addUnNode( + &case_scope, unwrap_tag, raw_operand, catch_or_if_node); + + if (node_ty == 0) { + // catch: rvalue the unwrapped payload and break + // (AstGen.zig:7299-7314). + uint32_t case_result + = (rl.tag == RL_REF || rl.tag == RL_REF_COERCED_TY) + ? unwrapped + : rvalue(&case_scope, block_scope.break_result_info, unwrapped, + catch_or_if_node); + addBreak(&case_scope, ZIR_INST_BREAK, switch_inst, case_result, + (int32_t)catch_or_if_node + - (int32_t)parent_gz->decl_node_index); + } else { + // if: evaluate then branch with payload capture + // (AstGen.zig:7316-7368). + ScopeLocalVal payload_val_scope; + memset(&payload_val_scope, 0, sizeof(payload_val_scope)); + Scope* then_sub_scope = &case_scope.base; + + if (if_payload_token != 0) { + uint32_t name_token + = if_payload_token + (payload_is_ref ? 1u : 0u); + uint32_t ident_name = identAsString(ag, name_token); + if (tokenIsUnderscore(tree, name_token)) { + // Discard: sub_scope stays as case_scope. + } else { + payload_val_scope = (ScopeLocalVal) { + .base = { .tag = SCOPE_LOCAL_VAL }, + .parent = &case_scope.base, + .gen_zir = &case_scope, + .name = ident_name, + .inst = unwrapped, + .token_src = name_token, + }; + addDbgVar(&case_scope, ZIR_INST_DBG_VAR_VAL, ident_name, + unwrapped); + then_sub_scope = &payload_val_scope.base; + } + } else { + // No payload: ensure payload is void + // (AstGen.zig:7346-7350). + addUnNode(&case_scope, ZIR_INST_ENSURE_ERR_UNION_PAYLOAD_VOID, + raw_operand, catch_or_if_node); + } + + uint32_t then_result = exprRl(&case_scope, then_sub_scope, + block_scope.break_result_info, if_then_node); + if (!refIsNoReturn(parent_gz, then_result)) { + addBreak(&case_scope, ZIR_INST_BREAK, switch_inst, then_result, + (int32_t)if_then_node + - (int32_t)parent_gz->decl_node_index); + } + } + + uint32_t raw_body_len = gzInstructionsLen(&case_scope); + const uint32_t* body = gzInstructionsSlice(&case_scope); + const uint32_t extra_refs[1] = { switch_inst }; + uint32_t body_len = countBodyLenAfterFixupsExtraRefs( + ag, body, raw_body_len, extra_refs, 1); + + // ProngInfo: body_len, capture, not inline, no tag + // (AstGen.zig:7375-7389). + uint32_t capture = 0; // none + if (node_ty == 1 && if_payload_token != 0) { + capture = payload_is_ref ? 2u : 1u; // by_ref or by_val + } + pay[body_len_index] + = (body_len & 0x0FFFFFFFu) | ((capture & 3u) << 28); + + ensurePayCapacity(&pay, &pay_cap, pay_len, body_len); + appendBodyWithFixupsExtraRefsPay( + ag, body, raw_body_len, extra_refs, 1, &pay, &pay_len, &pay_cap); + + gzUnstack(&case_scope); + } + + // --- Error capture name (AstGen.zig:7329-7342) --- + uint32_t err_name = identAsString(ag, error_payload); + + // Allocate shared value_placeholder for error capture + // (AstGen.zig:7345-7353). + uint32_t err_inst = ag->inst_len; + ensureInstCapacity(ag, 1); + ag->inst_tags[err_inst] = ZIR_INST_EXTENDED; + ZirInstData edata; + memset(&edata, 0xaa, sizeof(edata)); + edata.extended.opcode = (uint16_t)ZIR_EXT_VALUE_PLACEHOLDER; + ag->inst_datas[err_inst] = edata; + ag->inst_len++; + + // --- Error cases (AstGen.zig:7356-7520) --- + uint32_t multi_case_index = 0; + uint32_t scalar_case_index = 0; + bool any_uses_err_capture = false; + + for (uint32_t ci = 0; ci < case_count; ci++) { + uint32_t cn = case_nodes_arr[ci]; + AstNodeTag ct = tree->nodes.tags[cn]; + AstData cd = tree->nodes.datas[cn]; + bool is_inline = (ct == AST_NODE_SWITCH_CASE_INLINE_ONE + || ct == AST_NODE_SWITCH_CASE_INLINE); + + uint32_t single_buf; + const uint32_t* values; + uint32_t values_len = switchCaseValues(tree, cn, &single_buf, &values); + + bool is_multi_case = values_len > 1 + || (values_len == 1 + && tree->nodes.tags[values[0]] == AST_NODE_SWITCH_RANGE); + + // Set up error capture scope (AstGen.zig:7366-7404). + uint32_t dbg_var_name = 0; + uint32_t dbg_var_inst = 0; + ScopeLocalVal err_scope; + ScopeLocalVal capture_scope_val; + memset(&err_scope, 0, sizeof(err_scope)); + memset(&capture_scope_val, 0, sizeof(capture_scope_val)); + + err_scope = (ScopeLocalVal) { + .base = { .tag = SCOPE_LOCAL_VAL }, + .parent = &case_scope.base, + .gen_zir = &case_scope, + .inst = err_inst + ZIR_REF_START_INDEX, + .token_src = error_payload, + .name = err_name, + }; + + Scope* sub_scope = &err_scope.base; + + // Check for case payload capture (AstGen.zig:7381-7404). + uint32_t arrow_token = tree->nodes.main_tokens[cn]; + bool has_payload = false; + if (tree->tokens.tags[arrow_token + 1] == TOKEN_PIPE) { + uint32_t capture_token = arrow_token + 2; + if (tree->tokens.tags[capture_token] == TOKEN_IDENTIFIER) { + has_payload = true; + if (!tokenIsUnderscore(tree, capture_token)) { + uint32_t tag_name = identAsString(ag, capture_token); + capture_scope_val = (ScopeLocalVal) { + .base = { .tag = SCOPE_LOCAL_VAL }, + .parent = &case_scope.base, + .gen_zir = &case_scope, + .inst = switch_inst + ZIR_REF_START_INDEX, + .token_src = capture_token, + .name = tag_name, + }; + dbg_var_name = tag_name; + dbg_var_inst = switch_inst + ZIR_REF_START_INDEX; + // err_scope parent points to capture_scope_val + err_scope.parent = &capture_scope_val.base; + } + } + } + + // Fill item data in pay buffer (AstGen.zig:7406-7462). + ensurePayCapacity(&pay, &pay_cap, pay_len, 32); + uint32_t hdr = pay_len; + uint32_t prong_info_slot = 0; + + if (is_multi_case) { + pay[multi_case_table + multi_case_index] = hdr; + multi_case_index++; + pay[pay_len++] = 0; // items_len placeholder + pay[pay_len++] = 0; // ranges_len placeholder + prong_info_slot = pay_len++; + + uint32_t nitems = 0; + uint32_t nranges = 0; + for (uint32_t vi = 0; vi < values_len; vi++) { + if (tree->nodes.tags[values[vi]] == AST_NODE_SWITCH_RANGE) { + nranges++; + continue; + } + nitems++; + ensurePayCapacity(&pay, &pay_cap, pay_len, 1); + pay[pay_len++] = comptimeExpr(parent_gz, scope, RL_NONE_VAL, + values[vi], COMPTIME_REASON_SWITCH_ITEM); + } + for (uint32_t vi = 0; vi < values_len; vi++) { + if (tree->nodes.tags[values[vi]] != AST_NODE_SWITCH_RANGE) + continue; + AstData rng = tree->nodes.datas[values[vi]]; + ensurePayCapacity(&pay, &pay_cap, pay_len, 2); + pay[pay_len++] = comptimeExpr(parent_gz, scope, RL_NONE_VAL, + rng.lhs, COMPTIME_REASON_SWITCH_ITEM); + pay[pay_len++] = comptimeExpr(parent_gz, scope, RL_NONE_VAL, + rng.rhs, COMPTIME_REASON_SWITCH_ITEM); + } + pay[hdr] = nitems; + pay[hdr + 1] = nranges; + } else if (ci == else_ci) { + pay[case_table_start + 1] = hdr; + prong_info_slot = pay_len++; + } else { + // Scalar case. + pay[scalar_case_table + scalar_case_index] = hdr; + scalar_case_index++; + pay[pay_len++] = comptimeExpr(parent_gz, scope, RL_NONE_VAL, + values[0], COMPTIME_REASON_SWITCH_ITEM); + prong_info_slot = pay_len++; + } + + // Evaluate body (AstGen.zig:7464-7518). + { + case_scope.instructions_top = ag->scratch_inst_len; + + if (do_err_trace && nodeMayAppendToErrorTrace(tree, operand_node)) + addSaveErrRetIndex(&case_scope, ZIR_REF_NONE); + + if (dbg_var_name != 0) { + addDbgVar(&case_scope, ZIR_INST_DBG_VAR_VAL, dbg_var_name, + dbg_var_inst); + } + + uint32_t body_node = cd.rhs; + uint32_t result = fullBodyExpr(&case_scope, sub_scope, + block_scope.break_result_info, body_node); + + // Track err capture usage (AstGen.zig:7489-7494). + // The upstream tracks scope usage via used/discarded fields. + // We approximate: err capture is "used" if the err_scope was + // found during identifier lookup (i.e., the err name was + // referenced in the body). Since we don't track scope usage + // in C, we check if any instruction in the body references + // the err_inst placeholder. + bool uses_err = false; + { + uint32_t rbl = gzInstructionsLen(&case_scope); + const uint32_t* rbody = gzInstructionsSlice(&case_scope); + for (uint32_t bi = 0; bi < rbl; bi++) { + uint32_t inst = rbody[bi]; + // Check if any instruction data references err_inst. + ZirInstData d = ag->inst_datas[inst]; + if (d.un_node.operand == err_inst + ZIR_REF_START_INDEX) { + uses_err = true; + break; + } + } + } + if (uses_err) { + addDbgVar(&case_scope, ZIR_INST_DBG_VAR_VAL, err_name, + err_inst + ZIR_REF_START_INDEX); + any_uses_err_capture = true; + } + + if (!refIsNoReturn(parent_gz, result)) { + if (do_err_trace) + restoreErrRetIndex(&case_scope, switch_inst, + block_scope.break_result_info, body_node, result); + addBreak(&case_scope, ZIR_INST_BREAK, switch_inst, result, + (int32_t)body_node - (int32_t)parent_gz->decl_node_index); + } + + uint32_t raw_body_len = gzInstructionsLen(&case_scope); + const uint32_t* body = gzInstructionsSlice(&case_scope); + + uint32_t extra_refs[2]; + uint32_t extra_refs_len = 0; + extra_refs[extra_refs_len++] = switch_inst; + if (uses_err) + extra_refs[extra_refs_len++] = err_inst; + + uint32_t body_len = countBodyLenAfterFixupsExtraRefs( + ag, body, raw_body_len, extra_refs, extra_refs_len); + + uint32_t capture = has_payload ? 1u : 0u; // by_val or none + uint32_t prong_info = (body_len & 0x0FFFFFFFu) + | ((capture & 3u) << 28) | ((is_inline ? 1u : 0u) << 30); + pay[prong_info_slot] = prong_info; + + ensurePayCapacity(&pay, &pay_cap, pay_len, body_len); + appendBodyWithFixupsExtraRefsPay(ag, body, raw_body_len, + extra_refs, extra_refs_len, &pay, &pay_len, &pay_cap); + + gzUnstack(&case_scope); + } + } + + // Now add switch_inst to parent (AstGen.zig:7522). + gzAppendInstruction(parent_gz, switch_inst); + + // --- Serialize to extra (AstGen.zig:7524-7615) --- + ensureExtraCapacity(ag, + 3 + (multi_cases_len > 0 ? 1u : 0u) + (any_uses_err_capture ? 1u : 0u) + + pay_len - case_table_end); + uint32_t payload_index = ag->extra_len; + + // SwitchBlockErrUnion: operand (AstGen.zig:7529). + ag->extra[ag->extra_len++] = raw_operand; + + // SwitchBlockErrUnion: bits (AstGen.zig:7530-7538). + { + uint32_t bits = 0; + if (multi_cases_len > 0) + bits |= 1u; // has_multi_cases (bit 0) + if (has_else) + bits |= (1u << 1); // has_else (bit 1) + if (any_uses_err_capture) + bits |= (1u << 2); // any_uses_err_capture (bit 2) + if (payload_is_ref) + bits |= (1u << 3); // payload_is_ref (bit 3) + bits |= (scalar_cases_len & 0x0FFFFFFFu) << 4; // scalar_cases_len + ag->extra[ag->extra_len++] = bits; + } + + // SwitchBlockErrUnion: main_src_node_offset (AstGen.zig:7539). + ag->extra[ag->extra_len++] = (uint32_t)((int32_t)catch_or_if_node + - (int32_t)parent_gz->decl_node_index); + + // multi_cases_len (AstGen.zig:7542-7544). + if (multi_cases_len > 0) + ag->extra[ag->extra_len++] = multi_cases_len; + + // err_inst (AstGen.zig:7546-7548). + if (any_uses_err_capture) + ag->extra[ag->extra_len++] = err_inst; + + ag->inst_datas[switch_inst].pl_node.payload_index = payload_index; + + // Serialize case data from pay table (AstGen.zig:7552-7613). + for (uint32_t i = 0; i < case_table_end; i++) { + uint32_t si = pay[i]; + uint32_t body_len_idx = si; + uint32_t end = si; + + if (i < scalar_case_table) { + // Non-error or else prong: [prong_info, body...] + end += 1; + } else if (i < multi_case_table) { + // Scalar: [item, prong_info, body...] + body_len_idx = si + 1; + end += 2; + } else { + // Multi: [items_len, ranges_len, prong_info, items..., ranges...] + body_len_idx = si + 2; + uint32_t ni = pay[si]; + uint32_t nr = pay[si + 1]; + end += 3 + ni + nr * 2; + } + uint32_t prong_info = pay[body_len_idx]; + uint32_t bl = prong_info & 0x0FFFFFFFu; + end += bl; + ensureExtraCapacity(ag, end - si); + for (uint32_t j = si; j < end; j++) + ag->extra[ag->extra_len++] = pay[j]; + } + + free(pay); + + if (need_result_rvalue) + return rvalue( + parent_gz, rl, switch_inst + ZIR_REF_START_INDEX, switch_node); + return switch_inst + ZIR_REF_START_INDEX; +} + static uint32_t switchExpr( GenZir* parent_gz, Scope* scope, ResultLoc rl, uint32_t node) { AstGenCtx* ag = parent_gz->astgen; @@ -7020,7 +7582,7 @@ static uint32_t switchExpr( ensureInstCapacity(ag, 1); ag->inst_tags[tag_inst] = ZIR_INST_EXTENDED; ZirInstData tdata; - memset(&tdata, 0, sizeof(tdata)); + memset(&tdata, 0xaa, sizeof(tdata)); tdata.extended.opcode = (uint16_t)ZIR_EXT_VALUE_PLACEHOLDER; ag->inst_datas[tag_inst] = tdata; ag->inst_len++; diff --git a/stage0/astgen_test.zig b/stage0/astgen_test.zig index 6c33fd03b6..f208e82808 100644 --- a/stage0/astgen_test.zig +++ b/stage0/astgen_test.zig @@ -243,7 +243,21 @@ fn expectEqualZir(gpa: Allocator, ref: Zir, got: c.Zir) !void { const ref_datas = ref.instructions.items(.data); // 1. Compare lengths. - try std.testing.expectEqual(ref_len, got.inst_len); + if (ref_len != got.inst_len) { + std.debug.print("inst_len mismatch: ref={d} got={d}\n", .{ ref_len, got.inst_len }); + var ref_counts: [265]u32 = .{0} ** 265; + var got_counts: [265]u32 = .{0} ** 265; + for (0..ref_len) |i| ref_counts[@intFromEnum(ref_tags[i])] += 1; + for (0..got.inst_len) |i| got_counts[got.inst_tags[i]] += 1; + for (0..265) |t| { + if (ref_counts[t] != got_counts[t]) + std.debug.print("tag {d}: ref={d} got={d} (diff={d})\n", .{ + t, ref_counts[t], got_counts[t], + @as(i32, @intCast(got_counts[t])) - @as(i32, @intCast(ref_counts[t])), + }); + } + return error.TestExpectedEqual; + } // 2. Compare instruction tags. for (0..ref_len) |i| { @@ -793,10 +807,6 @@ test "astgen: corpus test_all.zig" { } test "astgen: corpus build.zig" { - // TODO: 6 extra instructions — missing switchExprErrUnion optimization - // (catch |err| switch(err) pattern emits SWITCH_BLOCK instead of - // SWITCH_BLOCK_ERR_UNION). - if (true) return error.SkipZigTest; const gpa = std.testing.allocator; try corpusCheck(gpa, @embedFile("../build.zig")); }