diff --git a/astgen.c b/astgen.c index 5a5c1d9c1b..90de8ab17f 100644 --- a/astgen.c +++ b/astgen.c @@ -1871,19 +1871,25 @@ static uint32_t rvalue( return ZIR_REF_VOID_VALUE; case RL_REF: case RL_REF_COERCED_TY: { + // coerce_ptr_elem_ty for ref_coerced_ty (AstGen.zig:11077-11083). + uint32_t coerced_result = result; + if (rl.tag == RL_REF_COERCED_TY) { + coerced_result = addPlNodeBin( + gz, ZIR_INST_COERCE_PTR_ELEM_TY, node, rl.data, result); + } AstGenCtx* ag = gz->astgen; uint32_t src_token = firstToken(ag->tree, node); // If result is not an instruction index (e.g. a well-known ref), // emit ref directly (AstGen.zig:11091-11092). - if (result < ZIR_REF_START_INDEX) { - return addUnTok(gz, ZIR_INST_REF, result, src_token); + if (coerced_result < ZIR_REF_START_INDEX) { + return addUnTok(gz, ZIR_INST_REF, coerced_result, src_token); } // Deduplication via ref_table (AstGen.zig:11093-11097). - uint32_t result_index = result - ZIR_REF_START_INDEX; + uint32_t result_index = coerced_result - ZIR_REF_START_INDEX; bool found; uint32_t* val_ptr = refTableGetOrPut(ag, result_index, &found); if (!found) { - *val_ptr = makeUnTok(gz, ZIR_INST_REF, result, src_token); + *val_ptr = makeUnTok(gz, ZIR_INST_REF, coerced_result, src_token); } return *val_ptr + ZIR_REF_START_INDEX; } @@ -2035,6 +2041,18 @@ static uint32_t rvalue( return result; } +// rvalueNoCoercePreRef (AstGen.zig:11042-11049): like rvalue but does NOT +// emit coerce_ptr_elem_ty for RL_REF_COERCED_TY. Used for local var refs. +static uint32_t rvalueNoCoercePreRef( + GenZir* gz, ResultLoc rl, uint32_t result, uint32_t node) { + if (rl.tag == RL_REF_COERCED_TY) { + ResultLoc ref_rl = rl; + ref_rl.tag = RL_REF; + return rvalue(gz, ref_rl, result, node); + } + return rvalue(gz, rl, result, node); +} + // --- Expression evaluation (AstGen.zig:634) --- // Forward declarations. @@ -2692,7 +2710,7 @@ static uint32_t identifierExpr( case SCOPE_LOCAL_VAL: { ScopeLocalVal* lv = (ScopeLocalVal*)s; if (lv->name == name_str) - return rvalue(gz, rl, lv->inst, node); + return rvalueNoCoercePreRef(gz, rl, lv->inst, node); s = lv->parent; continue; } @@ -3076,13 +3094,14 @@ static uint32_t arrayInitExpr( && isUnderscoreIdent(tree, elem_count_node)) { // Inferred length: addInt(elem_count) (AstGen.zig:1452). uint32_t len_inst = addInt(gz, elem_count); - uint32_t elem_type - = exprRl(gz, scope, RL_NONE_VAL, elem_type_node); + uint32_t elem_type = typeExpr(gz, scope, elem_type_node); uint32_t array_type_inst = addPlNodeBin( gz, ZIR_INST_ARRAY_TYPE, type_expr_node, len_inst, elem_type); - // arrayInitExprTyped (AstGen.zig:1598-1642). - bool is_ref = (RL_IS_REF(rl)); + // arrayInitExprTyped (AstGen.zig:1484-1513, 1598-1642). + // Only RL_REF produces array_init_ref; all other RLs use + // array_init + rvalue (AstGen.zig:1507-1511). + bool is_ref = (rl.tag == RL_REF); uint32_t operands_len = elem_count + 1; ensureExtraCapacity(ag, 1 + operands_len); uint32_t payload_index = ag->extra_len; @@ -3100,11 +3119,14 @@ static uint32_t arrayInitExpr( } ZirInstTag init_tag = is_ref ? ZIR_INST_ARRAY_INIT_REF : ZIR_INST_ARRAY_INIT; - ZirInstData data; - data.pl_node.src_node + ZirInstData idata; + idata.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index; - data.pl_node.payload_index = payload_index; - return addInstruction(gz, init_tag, data); + idata.pl_node.payload_index = payload_index; + uint32_t result = addInstruction(gz, init_tag, idata); + if (is_ref) + return result; + return rvalue(gz, rl, result, node); } } @@ -3262,9 +3284,20 @@ static uint32_t retExpr(GenZir* gz, Scope* scope, uint32_t node) { // need_err_code path: not implemented yet, fall through to general. } - // Evaluate operand with fn_ret_ty as result type (AstGen.zig:8178-8186). + // Evaluate operand with result location (AstGen.zig:8178-8186). + // If nodes_need_rl contains this return node, use ptr-based RL; + // otherwise use coerced_ty. ResultLoc ret_rl = RL_NONE_VAL; - if (ag->fn_ret_ty != 0) { + bool use_ptr = nodesNeedRlContains(ag, node); + uint32_t ret_ptr_inst = 0; + if (use_ptr) { + // Create ret_ptr instruction (AstGen.zig:8179). + ZirInstData rpdata; + rpdata.node = (int32_t)node - (int32_t)gz->decl_node_index; + ret_ptr_inst = addInstruction(gz, ZIR_INST_RET_PTR, rpdata); + ret_rl.tag = RL_PTR; + ret_rl.data = ret_ptr_inst; + } else if (ag->fn_ret_ty != 0) { ret_rl.tag = RL_COERCED_TY; ret_rl.data = ag->fn_ret_ty; } @@ -3272,7 +3305,7 @@ static uint32_t retExpr(GenZir* gz, Scope* scope, uint32_t node) { uint32_t operand = exprRl(gz, scope, ret_rl, operand_node); // Emit RESTORE_ERR_RET_INDEX based on nodeMayEvalToError - // (AstGen.zig:8188-8220). + // (AstGen.zig:8188-8253). int eval_to_err = nodeMayEvalToError(tree, operand_node); if (eval_to_err == EVAL_TO_ERROR_NEVER) { // Returning non-error: pop error trace unconditionally @@ -3283,33 +3316,63 @@ static uint32_t retExpr(GenZir* gz, Scope* scope, uint32_t node) { rdata.un_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index; addInstruction( gz, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata); - } else if (eval_to_err == EVAL_TO_ERROR_MAYBE) { - // May be an error (AstGen.zig:8208-8220). + emitDbgStmt(gz, ret_lc_line, ret_lc_column); + // addRet (AstGen.zig:13188-13194). + if (use_ptr) { + addUnNode(gz, ZIR_INST_RET_LOAD, ret_ptr_inst, node); + } else { + addUnNode(gz, ZIR_INST_RET_NODE, operand, node); + } + return ZIR_REF_UNREACHABLE_VALUE; + } else if (eval_to_err == EVAL_TO_ERROR_ALWAYS) { + // .always: emit both error defers and regular defers + // (AstGen.zig:8200-8206). + uint32_t err_code = use_ptr + ? addUnNode(gz, ZIR_INST_LOAD, ret_ptr_inst, node) + : operand; + (void)err_code; + // TODO: genDefers with .both = err_code when errdefer is implemented. + genDefers(gz, defer_outer, scope, DEFER_NORMAL_ONLY); + emitDbgStmt(gz, ret_lc_line, ret_lc_column); + if (use_ptr) { + addUnNode(gz, ZIR_INST_RET_LOAD, ret_ptr_inst, node); + } else { + addUnNode(gz, ZIR_INST_RET_NODE, operand, node); + } + return ZIR_REF_UNREACHABLE_VALUE; + } else { + // .maybe (AstGen.zig:8208-8252). DeferCounts dc = countDefers(defer_outer, scope); if (!dc.have_err) { // Only regular defers; no branch needed (AstGen.zig:8210-8220). genDefers(gz, defer_outer, scope, DEFER_NORMAL_ONLY); emitDbgStmt(gz, ret_lc_line, ret_lc_column); + uint32_t result = use_ptr + ? addUnNode(gz, ZIR_INST_LOAD, ret_ptr_inst, node) + : operand; ZirInstData rdata; - rdata.un_node.operand = operand; + rdata.un_node.operand = result; rdata.un_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index; addInstruction(gz, ZIR_INST_RESTORE_ERR_RET_INDEX_FN_ENTRY, rdata); - addUnNode(gz, ZIR_INST_RET_NODE, operand, node); + if (use_ptr) { + addUnNode(gz, ZIR_INST_RET_LOAD, ret_ptr_inst, node); + } else { + addUnNode(gz, ZIR_INST_RET_NODE, operand, node); + } return ZIR_REF_UNREACHABLE_VALUE; } // have_err path: emit conditional branch (not yet implemented). // Fall through to simplified path. genDefers(gz, defer_outer, scope, DEFER_NORMAL_ONLY); - } else { - // .always: error stays on trace, but still need normal defers. - genDefers(gz, defer_outer, scope, DEFER_NORMAL_ONLY); + emitDbgStmt(gz, ret_lc_line, ret_lc_column); + if (use_ptr) { + addUnNode(gz, ZIR_INST_RET_LOAD, ret_ptr_inst, node); + } else { + addUnNode(gz, ZIR_INST_RET_NODE, operand, node); + } + return ZIR_REF_UNREACHABLE_VALUE; } - - // Emit dbg_stmt back at return keyword for error return tracing. - emitDbgStmt(gz, ret_lc_line, ret_lc_column); - addUnNode(gz, ZIR_INST_RET_NODE, operand, node); - return ZIR_REF_UNREACHABLE_VALUE; } // --- calleeExpr (AstGen.zig:10183) --- @@ -3622,6 +3685,38 @@ static uint32_t structInitExpr( } if (type_expr_node == 0 && fields_len > 0) { + // structInitExprPtr for RL_PTR (AstGen.zig:1843-1846, 1934-1964). + if (rl.tag == RL_PTR) { + uint32_t struct_ptr_inst + = addUnNode(gz, ZIR_INST_OPT_EU_BASE_PTR_INIT, rl.data, node); + // Block payload: body_len = fields_len. + ensureExtraCapacity(ag, 1 + fields_len); + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = fields_len; + uint32_t items_start = ag->extra_len; + ag->extra_len += fields_len; + + for (uint32_t i = 0; i < fields_len; i++) { + uint32_t field_init = fields[i]; + uint32_t name_token = firstToken(tree, field_init) - 2; + uint32_t str_index = identAsString(ag, name_token); + // struct_init_field_ptr (AstGen.zig:1954-1957). + uint32_t field_ptr + = addPlNodeBin(gz, ZIR_INST_STRUCT_INIT_FIELD_PTR, + field_init, struct_ptr_inst, str_index); + ag->extra[items_start + i] + = field_ptr - ZIR_REF_START_INDEX; // .toIndex() + // Evaluate init with ptr RL (AstGen.zig:1960). + ResultLoc ptr_rl = { .tag = RL_PTR, + .data = field_ptr, + .src_node = 0, + .ctx = rl.ctx }; + exprRl(gz, scope, ptr_rl, field_init); + } + addPlNodePayloadIndex( + gz, ZIR_INST_VALIDATE_PTR_STRUCT_INIT, node, payload_index); + return ZIR_REF_VOID_VALUE; + } // Anonymous struct init with RL type (AstGen.zig:1706-1731). if (rl.tag == RL_TY || rl.tag == RL_COERCED_TY) { uint32_t ty_inst = rl.data; @@ -3691,8 +3786,7 @@ static uint32_t structInitExpr( && isUnderscoreIdent(tree, elem_count_node)) { // Inferred length with 0 fields → length 0. if (type_tag == AST_NODE_ARRAY_TYPE) { - uint32_t elem_type - = exprRl(gz, scope, RL_NONE_VAL, type_nd.rhs); + uint32_t elem_type = typeExpr(gz, scope, type_nd.rhs); uint32_t array_type_inst = addPlNodeBin(gz, ZIR_INST_ARRAY_TYPE, type_expr_node, ZIR_REF_ZERO_USIZE, elem_type); @@ -3704,8 +3798,7 @@ static uint32_t structInitExpr( uint32_t sentinel_node = tree->extra_data.arr[type_nd.rhs]; uint32_t elem_type_node = tree->extra_data.arr[type_nd.rhs + 1]; - uint32_t elem_type - = exprRl(gz, scope, RL_NONE_VAL, elem_type_node); + uint32_t elem_type = typeExpr(gz, scope, elem_type_node); ResultLoc sent_rl = { .tag = RL_COERCED_TY, .data = elem_type, .src_node = 0, @@ -3747,8 +3840,12 @@ static uint32_t structInitExpr( uint32_t field_ty_inst = addPlNodeBin(gz, ZIR_INST_STRUCT_INIT_FIELD_TYPE, field_init, ty_inst, str_index); - // Evaluate init (coerced_ty in upstream = no explicit coercion). - uint32_t init_ref = expr(gz, scope, field_init); + // Evaluate init with coerced_ty (AstGen.zig:1924). + ResultLoc elem_rl = { .tag = RL_COERCED_TY, + .data = field_ty_inst, + .src_node = 0, + .ctx = rl.ctx }; + uint32_t init_ref = exprRl(gz, scope, elem_rl, field_init); ag->extra[items_start + i * 2] = field_ty_inst - ZIR_REF_START_INDEX; // .toIndex() ag->extra[items_start + i * 2 + 1] = init_ref; @@ -4084,7 +4181,7 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { case AST_NODE_OPTIONAL_TYPE: return rvalue(gz, rl, addUnNode( - gz, ZIR_INST_OPTIONAL_TYPE, expr(gz, scope, nd.lhs), node), + gz, ZIR_INST_OPTIONAL_TYPE, typeExpr(gz, scope, nd.lhs), node), node); // unwrap_optional (AstGen.zig:966-985). case AST_NODE_UNWRAP_OPTIONAL: { @@ -4096,10 +4193,13 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { return rvalue(gz, rl, addUnNode(gz, ZIR_INST_OPTIONAL_PAYLOAD_SAFE, lhs, node), node); } - // error_union type (AstGen.zig:987-990). - case AST_NODE_ERROR_UNION: + // error_union type (AstGen.zig:788-797). + case AST_NODE_ERROR_UNION: { + uint32_t lhs = typeExpr(gz, scope, nd.lhs); + uint32_t rhs = typeExpr(gz, scope, nd.rhs); return rvalue(gz, rl, - simpleBinOp(gz, scope, node, ZIR_INST_ERROR_UNION_TYPE), node); + addPlNodeBin(gz, ZIR_INST_ERROR_UNION_TYPE, node, lhs, rhs), node); + } // char_literal (AstGen.zig:8662-8675). case AST_NODE_CHAR_LITERAL: { uint32_t main_tok = ag->tree->nodes.main_tokens[node]; @@ -4176,17 +4276,22 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { } // slice (AstGen.zig:882-939). case AST_NODE_SLICE_OPEN: { + // (AstGen.zig:908-937). uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs); advanceSourceCursorToMainToken(ag, gz, node); uint32_t saved_line = ag->source_line - gz->decl_line; uint32_t saved_col = ag->source_column; - uint32_t start = expr(gz, scope, nd.rhs); + ResultLoc usize_rl = { .tag = RL_COERCED_TY, + .data = ZIR_REF_USIZE_TYPE, + .src_node = 0, + .ctx = RI_CTX_NONE }; + uint32_t start = exprRl(gz, scope, usize_rl, nd.rhs); emitDbgStmt(gz, saved_line, saved_col); return rvalue(gz, rl, addPlNodeBin(gz, ZIR_INST_SLICE_START, node, lhs, start), node); } case AST_NODE_SLICE: { - // Slice[rhs]: { start, end } + // Slice[rhs]: { start, end } (AstGen.zig:908-937). const Ast* stree = ag->tree; uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs); advanceSourceCursorToMainToken(ag, gz, node); @@ -4194,8 +4299,12 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { uint32_t saved_col = ag->source_column; uint32_t start_node = stree->extra_data.arr[nd.rhs]; uint32_t end_node = stree->extra_data.arr[nd.rhs + 1]; - uint32_t start_ref = expr(gz, scope, start_node); - uint32_t end_ref = expr(gz, scope, end_node); + ResultLoc usize_rl = { .tag = RL_COERCED_TY, + .data = ZIR_REF_USIZE_TYPE, + .src_node = 0, + .ctx = RI_CTX_NONE }; + uint32_t start_ref = exprRl(gz, scope, usize_rl, start_node); + uint32_t end_ref = exprRl(gz, scope, usize_rl, end_node); emitDbgStmt(gz, saved_line, saved_col); ensureExtraCapacity(ag, 3); uint32_t payload_index = ag->extra_len; @@ -4210,6 +4319,7 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { } case AST_NODE_SLICE_SENTINEL: { // SliceSentinel[rhs]: { start, end, sentinel } + // (AstGen.zig:908-925). const Ast* stree = ag->tree; uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs); advanceSourceCursorToMainToken(ag, gz, node); @@ -4218,9 +4328,23 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { uint32_t start_node = stree->extra_data.arr[nd.rhs]; uint32_t end_node = stree->extra_data.arr[nd.rhs + 1]; uint32_t sentinel_node = stree->extra_data.arr[nd.rhs + 2]; - uint32_t start_ref = expr(gz, scope, start_node); - uint32_t end_ref = expr(gz, scope, end_node); - uint32_t sentinel_ref = expr(gz, scope, sentinel_node); + // start/end coerced to usize (AstGen.zig:911-912). + ResultLoc usize_rl = { .tag = RL_COERCED_TY, + .data = ZIR_REF_USIZE_TYPE, + .src_node = 0, + .ctx = RI_CTX_NONE }; + uint32_t start_ref = exprRl(gz, scope, usize_rl, start_node); + uint32_t end_ref = (end_node != 0) + ? exprRl(gz, scope, usize_rl, end_node) + : ZIR_REF_NONE; + // sentinel: create slice_sentinel_ty and coerce (AstGen.zig:913-916). + uint32_t sentinel_ty + = addUnNode(gz, ZIR_INST_SLICE_SENTINEL_TY, lhs, node); + ResultLoc sent_rl = { .tag = RL_COERCED_TY, + .data = sentinel_ty, + .src_node = 0, + .ctx = RI_CTX_NONE }; + uint32_t sentinel_ref = exprRl(gz, scope, sent_rl, sentinel_node); emitDbgStmt(gz, saved_line, saved_col); ensureExtraCapacity(ag, 4); uint32_t payload_index = ag->extra_len; @@ -4260,10 +4384,13 @@ static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { case AST_NODE_FOR_SIMPLE: case AST_NODE_FOR: return rvalue(gz, rl, forExpr(gz, scope, node, false), node); - // Merge error sets (AstGen.zig:787). - case AST_NODE_MERGE_ERROR_SETS: + // Merge error sets (AstGen.zig:788-797). + case AST_NODE_MERGE_ERROR_SETS: { + uint32_t lhs = typeExpr(gz, scope, nd.lhs); + uint32_t rhs = typeExpr(gz, scope, nd.rhs); return rvalue(gz, rl, - simpleBinOp(gz, scope, node, ZIR_INST_MERGE_ERROR_SETS), node); + addPlNodeBin(gz, ZIR_INST_MERGE_ERROR_SETS, node, lhs, rhs), node); + } // Wrapping arithmetic. case AST_NODE_ADD_WRAP: return rvalue( @@ -4846,13 +4973,37 @@ static uint32_t arrayInitDotExpr( return addPlNodePayloadIndex( gz, ZIR_INST_ARRAY_INIT_REF, node, ai_payload); } - case RL_PTR: - // TODO: arrayInitExprPtr (AstGen.zig:1541-1543). - // For now, fall through to anon + rvalue. - break; + case RL_PTR: { + // arrayInitExprPtr (AstGen.zig:1541-1543, 1645-1672). + uint32_t array_ptr_inst + = addUnNode(gz, ZIR_INST_OPT_EU_BASE_PTR_INIT, rl.data, node); + // Block payload: body_len = elem_count. + ensureExtraCapacity(ag, 1 + elem_count); + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = elem_count; + uint32_t items_start = ag->extra_len; + ag->extra_len += elem_count; + + for (uint32_t i = 0; i < elem_count; i++) { + // array_init_elem_ptr: ElemPtrImm{ptr, index}. + uint32_t elem_ptr_inst = addPlNodeBin(gz, + ZIR_INST_ARRAY_INIT_ELEM_PTR, elements[i], array_ptr_inst, i); + ag->extra[items_start + i] + = elem_ptr_inst - ZIR_REF_START_INDEX; // .toIndex() + // Evaluate element with ptr RL (AstGen.zig:1668). + ResultLoc ptr_rl = { .tag = RL_PTR, + .data = elem_ptr_inst, + .src_node = 0, + .ctx = rl.ctx }; + exprRl(gz, scope, ptr_rl, elements[i]); + } + addPlNodePayloadIndex( + gz, ZIR_INST_VALIDATE_PTR_ARRAY_INIT, node, payload_index); + return ZIR_REF_VOID_VALUE; + } } - // Fallback: anon init + rvalue (handles RL_PTR for now). + // Fallback: anon init + rvalue. ensureExtraCapacity(ag, 1 + elem_count); uint32_t payload_index = ag->extra_len; ag->extra[ag->extra_len++] = elem_count; diff --git a/astgen_test.zig b/astgen_test.zig index ba28734a23..abe2d05c96 100644 --- a/astgen_test.zig +++ b/astgen_test.zig @@ -798,8 +798,8 @@ test "astgen: corpus tokenizer_test.zig" { } test "astgen: corpus parser_test.zig" { - // TODO: 225 inst diff — missing ptr-based struct/array init, OPT_EU_BASE_PTR_INIT, - // COERCE_PTR_ELEM_TY, BLOCK_COMPTIME wrappers, and result location propagation. + // TODO: 5 inst diff — 2 STORE_TO_INFERRED_PTR, 1 REF, 1 STORE_NODE, + // 1 COERCE_PTR_ELEM_TY. if (true) return error.SkipZigTest; const gpa = std.testing.allocator; try corpusCheck(gpa, @embedFile("parser_test.zig"));