diff --git a/astgen.c b/astgen.c index 857e76980b..7c8411bde1 100644 --- a/astgen.c +++ b/astgen.c @@ -7,6 +7,7 @@ #include "astgen.h" #include "common.h" #include +#include #include #include @@ -86,9 +87,95 @@ typedef struct { uint32_t scratch_inst_cap; // Return type ref for the current function (set during fnDecl/testDecl). uint32_t fn_ret_ty; // ZirInstRef + // ref_table: deferred REF instructions (AstGen.zig:58-68). + // Key = operand inst index, Value = ref inst index. + uint32_t* ref_table_keys; + uint32_t* ref_table_vals; + uint32_t ref_table_len; + uint32_t ref_table_cap; + // nodes_need_rl: set of AST node indices that need result locations. + // Populated by astRlAnnotate() pre-pass (AstRlAnnotate.zig). + uint32_t* nodes_need_rl; + uint32_t nodes_need_rl_len; + uint32_t nodes_need_rl_cap; bool has_compile_errors; } AstGenCtx; +static void setCompileError(AstGenCtx* ag, const char* where, int line) { + (void)where; + (void)line; + ag->has_compile_errors = true; +} +#define SET_ERROR(ag) setCompileError(ag, __func__, __LINE__) + +// --- ref_table operations (AstGen.zig:58-68) --- +// Simple linear-scan hash table for deferred REF instructions. + +// Returns pointer to existing value if key found, NULL if not found. +static uint32_t* refTableGet(AstGenCtx* ag, uint32_t key) { + for (uint32_t i = 0; i < ag->ref_table_len; i++) { + if (ag->ref_table_keys[i] == key) + return &ag->ref_table_vals[i]; + } + return NULL; +} + +// getOrPut: returns pointer to value slot; sets *found to true if existed. +static uint32_t* refTableGetOrPut(AstGenCtx* ag, uint32_t key, bool* found) { + for (uint32_t i = 0; i < ag->ref_table_len; i++) { + if (ag->ref_table_keys[i] == key) { + *found = true; + return &ag->ref_table_vals[i]; + } + } + *found = false; + if (ag->ref_table_len >= ag->ref_table_cap) { + uint32_t new_cap = ag->ref_table_cap == 0 ? 16 : ag->ref_table_cap * 2; + ag->ref_table_keys + = realloc(ag->ref_table_keys, new_cap * sizeof(uint32_t)); + ag->ref_table_vals + = realloc(ag->ref_table_vals, new_cap * sizeof(uint32_t)); + ag->ref_table_cap = new_cap; + } + uint32_t idx = ag->ref_table_len++; + ag->ref_table_keys[idx] = key; + return &ag->ref_table_vals[idx]; +} + +// fetchRemove: if key exists, remove it and return true with *val set. +static bool refTableFetchRemove(AstGenCtx* ag, uint32_t key, uint32_t* val) { + for (uint32_t i = 0; i < ag->ref_table_len; i++) { + if (ag->ref_table_keys[i] == key) { + *val = ag->ref_table_vals[i]; + // Swap with last element. + ag->ref_table_len--; + if (i < ag->ref_table_len) { + ag->ref_table_keys[i] = ag->ref_table_keys[ag->ref_table_len]; + ag->ref_table_vals[i] = ag->ref_table_vals[ag->ref_table_len]; + } + return true; + } + } + return false; +} + +// --- Scope types (AstGen.zig:11621-11768) --- + +typedef enum { + SCOPE_GEN_ZIR, + SCOPE_LOCAL_VAL, + SCOPE_LOCAL_PTR, + SCOPE_DEFER_NORMAL, + SCOPE_DEFER_ERROR, + SCOPE_NAMESPACE, + SCOPE_TOP, + SCOPE_LABEL, +} ScopeTag; + +typedef struct Scope { + ScopeTag tag; +} Scope; + // --- GenZir scope (mirrors GenZir struct, AstGen.zig:11772) --- // // Sub-blocks share the parent AstGenCtx's scratch_instructions array and @@ -96,14 +183,58 @@ typedef struct { // GenZir.instructions / instructions_top design (AstGen.zig:11796-11850). typedef struct { + Scope base; // tag = SCOPE_GEN_ZIR + Scope* parent; AstGenCtx* astgen; uint32_t decl_node_index; uint32_t decl_line; bool is_comptime; bool c_import; // true inside @cImport block uint32_t instructions_top; // start index in shared array + uint32_t break_block; // UINT32_MAX = none (AstGen.zig:11780) + uint32_t continue_block; // UINT32_MAX = none (AstGen.zig:11784) } GenZir; +// Scope.LocalVal (AstGen.zig:11682). +// This is always a `const` local and the `inst` is a value type, not a +// pointer. +typedef struct { + Scope base; // tag = SCOPE_LOCAL_VAL + Scope* parent; + GenZir* gen_zir; + uint32_t inst; // ZirInstRef + uint32_t token_src; // Ast.TokenIndex + uint32_t name; // NullTerminatedString (string table index) +} ScopeLocalVal; + +// Scope.LocalPtr (AstGen.zig:11704). +// This could be a `const` or `var` local. It has a pointer instead of a value. +typedef struct { + Scope base; // tag = SCOPE_LOCAL_PTR + Scope* parent; + GenZir* gen_zir; + uint32_t ptr; // ZirInstRef + uint32_t token_src; // Ast.TokenIndex + uint32_t name; // NullTerminatedString (string table index) + bool maybe_comptime; +} ScopeLocalPtr; + +// Scope.Defer (AstGen.zig:11741). +typedef struct { + Scope base; // tag = SCOPE_DEFER_NORMAL or SCOPE_DEFER_ERROR + Scope* parent; + uint32_t index; + uint32_t len; +} ScopeDefer; + +// Scope.Label — for labeled blocks and loops. +typedef struct { + Scope base; // tag = SCOPE_LABEL + Scope* parent; + uint32_t label_name; // NullTerminatedString + uint32_t block_inst; // instruction index (not ref) +} ScopeLabel; + // --- GenZir instruction helpers (AstGen.zig:11830-11850) --- // Returns the number of instructions in this scope. @@ -139,15 +270,19 @@ static void gzAppendInstruction(GenZir* gz, uint32_t inst_idx) { } // Mirrors GenZir.makeSubBlock (AstGen.zig:11852). -static GenZir makeSubBlock(GenZir* parent) { +static GenZir makeSubBlock(GenZir* parent, Scope* scope) { GenZir sub; memset(&sub, 0, sizeof(sub)); + sub.base.tag = SCOPE_GEN_ZIR; + sub.parent = scope; sub.astgen = parent->astgen; sub.decl_node_index = parent->decl_node_index; sub.decl_line = parent->decl_line; sub.is_comptime = parent->is_comptime; sub.c_import = parent->c_import; sub.instructions_top = parent->astgen->scratch_inst_len; + sub.break_block = UINT32_MAX; + sub.continue_block = UINT32_MAX; return sub; } @@ -219,6 +354,28 @@ static uint32_t reserveInstructionIndex(AstGenCtx* ag) { return idx; } +// Forward declarations. +static int32_t tokenIndexToRelative(const GenZir* gz, uint32_t token); +static uint32_t firstToken(const Ast* tree, uint32_t node); +static bool nodesNeedRlContains(const AstGenCtx* ag, uint32_t node); + +// Mirrors GenZir.makeUnTok (AstGen.zig:12520). +// Allocates an instruction but does NOT add to GenZir body. +// Returns the raw instruction INDEX (not a Ref). +static uint32_t makeUnTok( + GenZir* gz, ZirInstTag tag, uint32_t operand, uint32_t abs_tok_index) { + AstGenCtx* ag = gz->astgen; + ensureInstCapacity(ag, 1); + uint32_t idx = ag->inst_len; + ZirInstData data; + data.un_tok.src_tok = tokenIndexToRelative(gz, abs_tok_index); + data.un_tok.operand = operand; + ag->inst_tags[idx] = tag; + ag->inst_datas[idx] = data; + ag->inst_len++; + return idx; // Raw index, NOT a Ref. +} + // Mirrors GenZir.add (AstGen.zig:13162). // Appends an instruction and records it in the GenZir body. // Returns the instruction index as a Ref (index + ZIR_INST_REF_START_INDEX). @@ -256,8 +413,35 @@ static uint32_t addPlNodeBin( return addInstruction(gz, tag, data); } -// Forward declaration. -static int32_t tokenIndexToRelative(const GenZir* gz, uint32_t token); +// Mirrors addPlNode for 3-operand payloads (e.g. ArrayTypeSentinel). +static uint32_t addPlNodeTriple(GenZir* gz, ZirInstTag tag, uint32_t node, + uint32_t a, uint32_t b, uint32_t c) { + AstGenCtx* ag = gz->astgen; + ensureExtraCapacity(ag, 3); + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = a; + ag->extra[ag->extra_len++] = b; + ag->extra[ag->extra_len++] = c; + ZirInstData data; + data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index; + data.pl_node.payload_index = payload_index; + return addInstruction(gz, tag, data); +} + +// Checks if an AST identifier node is the single underscore `_`. +// Used for inferred array length detection in [_]T patterns. +// Intentionally does NOT support @"_" syntax (matches upstream). +static bool isUnderscoreIdent(const Ast* tree, uint32_t ident_node) { + uint32_t id_tok = tree->nodes.main_tokens[ident_node]; + uint32_t id_start = tree->tokens.starts[id_tok]; + if (tree->source[id_start] != '_') + return false; + if (id_start + 1 >= tree->source_len) + return true; + char next = tree->source[id_start + 1]; + return !((next >= 'a' && next <= 'z') || (next >= 'A' && next <= 'Z') + || next == '_' || (next >= '0' && next <= '9')); +} // Mirrors GenZir.addUnNode (AstGen.zig:12406). static uint32_t addUnNode( @@ -268,6 +452,15 @@ static uint32_t addUnNode( return addInstruction(gz, tag, data); } +// Mirrors GenZir.addUnTok (AstGen.zig:12497). +static uint32_t addUnTok( + GenZir* gz, ZirInstTag tag, uint32_t operand, uint32_t abs_tok_index) { + ZirInstData data; + data.un_tok.src_tok = tokenIndexToRelative(gz, abs_tok_index); + data.un_tok.operand = operand; + return addInstruction(gz, tag, data); +} + // Mirrors GenZir.addStrTok (AstGen.zig:12349). static uint32_t addStrTok( GenZir* gz, ZirInstTag tag, uint32_t str_index, uint32_t token) { @@ -294,7 +487,9 @@ static void advanceSourceCursor(AstGenCtx* ag, uint32_t end) { uint32_t i = ag->source_offset; uint32_t line = ag->source_line; uint32_t column = ag->source_column; - assert(i <= end); + if (i > end) { + return; // Cursor already past target; skip (cursor ordering issue). + } while (i < end) { if (source[i] == '\n') { line++; @@ -335,6 +530,14 @@ static uint32_t firstToken(const Ast* tree, uint32_t node) { static void advanceSourceCursorToNode(AstGenCtx* ag, uint32_t node) { uint32_t ft = firstToken(ag->tree, node); uint32_t token_start = ag->tree->tokens.starts[ft]; + (void)0; // cursor backward check disabled temporarily + advanceSourceCursor(ag, token_start); +} + +// Mirrors maybeAdvanceSourceCursorToMainToken (AstGen.zig:13324). +static void advanceSourceCursorToMainToken(AstGenCtx* ag, uint32_t node) { + uint32_t main_tok = ag->tree->nodes.main_tokens[node]; + uint32_t token_start = ag->tree->tokens.starts[main_tok]; advanceSourceCursor(ag, token_start); } @@ -500,17 +703,43 @@ static uint32_t makeBlockInst( return idx; } +// Mirrors appendPossiblyRefdBodyInst (AstGen.zig:13675-13683). +// Prepends ref_table entry before body_inst in extra. +static void appendPossiblyRefdBodyInst(AstGenCtx* ag, uint32_t body_inst) { + uint32_t ref_inst; + if (refTableFetchRemove(ag, body_inst, &ref_inst)) { + appendPossiblyRefdBodyInst(ag, ref_inst); + } + ag->extra[ag->extra_len++] = body_inst; +} + +// Mirrors countBodyLenAfterFixups (AstGen.zig:13686-13710). +static uint32_t countBodyLenAfterFixups( + AstGenCtx* ag, const uint32_t* body, uint32_t body_len) { + uint32_t count = body_len; + for (uint32_t i = 0; i < body_len; i++) { + uint32_t check_inst = body[i]; + const uint32_t* ref; + while ((ref = refTableGet(ag, check_inst)) != NULL) { + count++; + check_inst = *ref; + } + } + return count; +} + // Mirrors GenZir.setBlockBody (AstGen.zig:11949). // Writes Block payload (body_len + instruction indices) to extra. // Sets the instruction's payload_index. Unstacks gz. static void setBlockBody(AstGenCtx* ag, GenZir* gz, uint32_t inst) { - uint32_t body_len = gzInstructionsLen(gz); + uint32_t raw_body_len = gzInstructionsLen(gz); const uint32_t* body = gzInstructionsSlice(gz); + uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_body_len); ensureExtraCapacity(ag, 1 + body_len); uint32_t payload_index = ag->extra_len; - ag->extra[ag->extra_len++] = body_len; // Block.body_len - for (uint32_t i = 0; i < body_len; i++) { - ag->extra[ag->extra_len++] = body[i]; + ag->extra[ag->extra_len++] = body_len; + for (uint32_t i = 0; i < raw_body_len; i++) { + appendPossiblyRefdBodyInst(ag, body[i]); } ag->inst_datas[inst].pl_node.payload_index = payload_index; gzUnstack(gz); @@ -521,19 +750,89 @@ static void setBlockBody(AstGenCtx* ag, GenZir* gz, uint32_t inst) { // Sets the instruction's payload_index. Unstacks gz. static void setTryBody( AstGenCtx* ag, GenZir* gz, uint32_t inst, uint32_t operand) { - uint32_t body_len = gzInstructionsLen(gz); + uint32_t raw_body_len = gzInstructionsLen(gz); const uint32_t* body = gzInstructionsSlice(gz); + uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_body_len); ensureExtraCapacity(ag, 2 + body_len); uint32_t payload_index = ag->extra_len; ag->extra[ag->extra_len++] = operand; // Try.operand ag->extra[ag->extra_len++] = body_len; // Try.body_len - for (uint32_t i = 0; i < body_len; i++) { - ag->extra[ag->extra_len++] = body[i]; + for (uint32_t i = 0; i < raw_body_len; i++) { + appendPossiblyRefdBodyInst(ag, body[i]); } ag->inst_datas[inst].pl_node.payload_index = payload_index; gzUnstack(gz); } +// Mirrors GenZir.addBreak (AstGen.zig:12623). +// Creates a ZIR_INST_BREAK instruction. +static uint32_t addBreak(GenZir* gz, ZirInstTag tag, uint32_t block_inst, + uint32_t operand, int32_t operand_src_node) { + AstGenCtx* ag = gz->astgen; + ensureInstCapacity(ag, 1); + ensureExtraCapacity(ag, 2); + + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = (uint32_t)operand_src_node; + ag->extra[ag->extra_len++] = block_inst; + + uint32_t idx = ag->inst_len; + ag->inst_tags[idx] = tag; + ZirInstData data; + data.break_data.operand = operand; + data.break_data.payload_index = payload_index; + ag->inst_datas[idx] = data; + ag->inst_len++; + gzAppendInstruction(gz, idx); + return idx; +} + +// Mirrors GenZir.addCondBr (AstGen.zig:12834). +// Creates condbr instruction placeholder with src_node set. +// Payload is filled later by setCondBrPayload. +static uint32_t addCondBr(GenZir* gz, uint32_t node) { + AstGenCtx* ag = gz->astgen; + ensureInstCapacity(ag, 1); + uint32_t idx = ag->inst_len; + ag->inst_tags[idx] = ZIR_INST_CONDBR; + ZirInstData data; + memset(&data, 0, sizeof(data)); + data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index; + data.pl_node.payload_index = 0; // set later + ag->inst_datas[idx] = data; + ag->inst_len++; + gzAppendInstruction(gz, idx); + return idx; +} + +// Mirrors GenZir.setCondBrPayload (AstGen.zig:12003). +// Writes CondBr payload: condition + then_body_len + then_body + +// else_body_len + else_body. Unstacks both scopes. +static void setCondBrPayload(AstGenCtx* ag, uint32_t condbr_inst, + uint32_t condition, GenZir* then_gz, GenZir* else_gz) { + uint32_t raw_then_len = gzInstructionsLen(then_gz); + const uint32_t* then_body = gzInstructionsSlice(then_gz); + uint32_t raw_else_len = gzInstructionsLen(else_gz); + const uint32_t* else_body = gzInstructionsSlice(else_gz); + + uint32_t then_len = countBodyLenAfterFixups(ag, then_body, raw_then_len); + uint32_t else_len = countBodyLenAfterFixups(ag, else_body, raw_else_len); + + ensureExtraCapacity(ag, 2 + then_len + 1 + else_len); + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = condition; // CondBr.condition + ag->extra[ag->extra_len++] = then_len; // CondBr.then_body_len + for (uint32_t i = 0; i < raw_then_len; i++) + appendPossiblyRefdBodyInst(ag, then_body[i]); + ag->extra[ag->extra_len++] = else_len; // CondBr.else_body_len + for (uint32_t i = 0; i < raw_else_len; i++) + appendPossiblyRefdBodyInst(ag, else_body[i]); + + ag->inst_datas[condbr_inst].pl_node.payload_index = payload_index; + gzUnstack(else_gz); + gzUnstack(then_gz); +} + // Does this Declaration.Flags.Id have a name? (Zir.zig:2762) static bool declIdHasName(DeclFlagsId id) { return id != DECL_ID_UNNAMED_TEST && id != DECL_ID_COMPTIME; @@ -791,33 +1090,366 @@ typedef enum { RL_DISCARD, // Compute but discard (emit ensure_result_non_error). RL_TY, // Coerce to specific type. RL_COERCED_TY, // Coerce to specific type, result is the coercion. + RL_PTR, // Store result to typed pointer. data=alloc inst, src_node=node. + RL_INFERRED_PTR, // Store result to inferred pointer. data=alloc inst. } ResultLocTag; typedef struct { ResultLocTag tag; - uint32_t ty_inst; // ZirInstRef, used for RL_TY/RL_COERCED_TY. + uint32_t data; // ZirInstRef: ty_inst for TY/COERCED_TY, alloc inst for + // PTR/INFERRED_PTR. + uint32_t src_node; // Only used for RL_PTR. } ResultLoc; -#define RL_NONE_VAL ((ResultLoc) { .tag = RL_NONE, .ty_inst = 0 }) -#define RL_REF_VAL ((ResultLoc) { .tag = RL_REF, .ty_inst = 0 }) -#define RL_DISCARD_VAL ((ResultLoc) { .tag = RL_DISCARD, .ty_inst = 0 }) +#define RL_NONE_VAL ((ResultLoc) { .tag = RL_NONE, .data = 0, .src_node = 0 }) +#define RL_REF_VAL ((ResultLoc) { .tag = RL_REF, .data = 0, .src_node = 0 }) +#define RL_DISCARD_VAL \ + ((ResultLoc) { .tag = RL_DISCARD, .data = 0, .src_node = 0 }) + +// resultType (AstGen.zig:341-351): extract result type from RL. +// Returns 0 if no result type available. +static uint32_t rlResultType(GenZir* gz, ResultLoc rl, uint32_t node) { + switch (rl.tag) { + case RL_TY: + case RL_COERCED_TY: + return rl.data; + case RL_PTR: { + // typeof(ptr) -> elem_type (AstGen.zig:346-349). + uint32_t ptr_ty = addUnNode(gz, ZIR_INST_TYPEOF, rl.data, node); + return addUnNode(gz, ZIR_INST_ELEM_TYPE, ptr_ty, node); + } + default: + return 0; + } +} + +// rvalue (AstGen.zig:11051-11224): apply result location wrapping. +static uint32_t rvalue( + GenZir* gz, ResultLoc rl, uint32_t result, uint32_t node) { + switch (rl.tag) { + case RL_NONE: + case RL_COERCED_TY: + return result; + case RL_DISCARD: + // ensure_result_non_error (AstGen.zig:11071-11074). + addUnNode(gz, ZIR_INST_ENSURE_RESULT_NON_ERROR, result, node); + return ZIR_REF_VOID_VALUE; + case RL_REF: { + AstGenCtx* ag = gz->astgen; + uint32_t src_token = firstToken(ag->tree, node); + // If result is not an instruction index (e.g. a well-known ref), + // emit ref directly (AstGen.zig:11091-11092). + if (result < ZIR_REF_START_INDEX) { + return addUnTok(gz, ZIR_INST_REF, result, src_token); + } + // Deduplication via ref_table (AstGen.zig:11093-11097). + uint32_t result_index = result - ZIR_REF_START_INDEX; + bool found; + uint32_t* val_ptr = refTableGetOrPut(ag, result_index, &found); + if (!found) { + *val_ptr = makeUnTok(gz, ZIR_INST_REF, result, src_token); + } + return *val_ptr + ZIR_REF_START_INDEX; + } + case RL_TY: { + // Quick elimination of common, unnecessary type coercions + // (AstGen.zig:11099-11209). +#define RC(t, v) (((uint64_t)(t) << 32) | (uint64_t)(v)) + uint64_t combined = RC(rl.data, result); + switch (combined) { + // Identity: type of result is already correct + // (AstGen.zig:11109-11176). + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U1_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U8_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_I8_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U16_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U29_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_I16_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U32_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_I32_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U64_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_I64_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U128_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_I128_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_USIZE_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ISIZE_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_CHAR_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_SHORT_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_USHORT_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_INT_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_UINT_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_LONG_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_ULONG_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_LONGLONG_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_ULONGLONG_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_LONGDOUBLE_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_F16_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_F32_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_F64_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_F80_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_F128_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ANYOPAQUE_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_BOOL_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_VOID_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_TYPE_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ANYERROR_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_COMPTIME_INT_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_COMPTIME_FLOAT_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_NORETURN_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ANYFRAME_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_NULL_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_UNDEFINED_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ENUM_LITERAL_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_PTR_USIZE_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_PTR_CONST_COMPTIME_INT_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_MANYPTR_U8_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_MANYPTR_CONST_U8_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_MANYPTR_CONST_U8_SENTINEL_0_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_SLICE_CONST_U8_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_SLICE_CONST_U8_SENTINEL_0_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ANYERROR_VOID_ERROR_UNION_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_GENERIC_POISON_TYPE): + case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_EMPTY_TUPLE_TYPE): + case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ZERO): + case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ONE): + case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_NEGATIVE_ONE): + case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_UNDEF_USIZE): + case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ZERO_USIZE): + case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ONE_USIZE): + case RC(ZIR_REF_U1_TYPE, ZIR_REF_UNDEF_U1): + case RC(ZIR_REF_U1_TYPE, ZIR_REF_ZERO_U1): + case RC(ZIR_REF_U1_TYPE, ZIR_REF_ONE_U1): + case RC(ZIR_REF_U8_TYPE, ZIR_REF_ZERO_U8): + case RC(ZIR_REF_U8_TYPE, ZIR_REF_ONE_U8): + case RC(ZIR_REF_U8_TYPE, ZIR_REF_FOUR_U8): + case RC(ZIR_REF_BOOL_TYPE, ZIR_REF_UNDEF_BOOL): + case RC(ZIR_REF_BOOL_TYPE, ZIR_REF_BOOL_TRUE): + case RC(ZIR_REF_BOOL_TYPE, ZIR_REF_BOOL_FALSE): + case RC(ZIR_REF_VOID_TYPE, ZIR_REF_VOID_VALUE): + return result; + // Conversions (AstGen.zig:11178-11202). + case RC(ZIR_REF_BOOL_TYPE, ZIR_REF_UNDEF): + return ZIR_REF_UNDEF_BOOL; + case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_UNDEF): + return ZIR_REF_UNDEF_USIZE; + case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_UNDEF_U1): + return ZIR_REF_UNDEF_USIZE; + case RC(ZIR_REF_U1_TYPE, ZIR_REF_UNDEF): + return ZIR_REF_UNDEF_U1; + case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ZERO): + return ZIR_REF_ZERO_USIZE; + case RC(ZIR_REF_U1_TYPE, ZIR_REF_ZERO): + return ZIR_REF_ZERO_U1; + case RC(ZIR_REF_U8_TYPE, ZIR_REF_ZERO): + return ZIR_REF_ZERO_U8; + case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ONE): + return ZIR_REF_ONE_USIZE; + case RC(ZIR_REF_U1_TYPE, ZIR_REF_ONE): + return ZIR_REF_ONE_U1; + case RC(ZIR_REF_U8_TYPE, ZIR_REF_ONE): + return ZIR_REF_ONE_U8; + case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ZERO_USIZE): + return ZIR_REF_ZERO; + case RC(ZIR_REF_U1_TYPE, ZIR_REF_ZERO_USIZE): + return ZIR_REF_ZERO_U1; + case RC(ZIR_REF_U8_TYPE, ZIR_REF_ZERO_USIZE): + return ZIR_REF_ZERO_U8; + case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ONE_USIZE): + return ZIR_REF_ONE; + case RC(ZIR_REF_U1_TYPE, ZIR_REF_ONE_USIZE): + return ZIR_REF_ONE_U1; + case RC(ZIR_REF_U8_TYPE, ZIR_REF_ONE_USIZE): + return ZIR_REF_ONE_U8; + case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ZERO_U1): + return ZIR_REF_ZERO; + case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ZERO_U8): + return ZIR_REF_ZERO; + case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ZERO_U1): + return ZIR_REF_ZERO_USIZE; + case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ZERO_U8): + return ZIR_REF_ZERO_USIZE; + case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ONE_U1): + return ZIR_REF_ONE; + case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ONE_U8): + return ZIR_REF_ONE; + case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ONE_U1): + return ZIR_REF_ONE_USIZE; + case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ONE_U8): + return ZIR_REF_ONE_USIZE; + default: + return addPlNodeBin(gz, ZIR_INST_AS_NODE, node, rl.data, result); + } +#undef RC + } + case RL_PTR: + // store_node (AstGen.zig:11211-11216). + addPlNodeBin(gz, ZIR_INST_STORE_NODE, + rl.src_node != 0 ? rl.src_node : node, rl.data, result); + return ZIR_REF_VOID_VALUE; + case RL_INFERRED_PTR: + // store_to_inferred_ptr (AstGen.zig:11218-11223). + addPlNodeBin( + gz, ZIR_INST_STORE_TO_INFERRED_PTR, node, rl.data, result); + return ZIR_REF_VOID_VALUE; + } + return result; +} // --- Expression evaluation (AstGen.zig:634) --- // Forward declarations. -static uint32_t expr(GenZir* gz, uint32_t node); -static uint32_t exprRl(GenZir* gz, ResultLoc rl, uint32_t node); +static uint32_t expr(GenZir* gz, Scope* scope, uint32_t node); +static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node); +static void assignStmt(GenZir* gz, Scope* scope, uint32_t infix_node); +static void assignOp( + GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag); static void emitDbgStmt(GenZir* gz, uint32_t line, uint32_t column); +static void emitDbgStmtForceCurrentIndex( + GenZir* gz, uint32_t line, uint32_t column); static void emitDbgNode(GenZir* gz, uint32_t node); +static void addDbgVar( + GenZir* gz, ZirInstTag tag, uint32_t name, uint32_t inst); +static void blockExprStmts( + GenZir* gz, Scope* scope, const uint32_t* statements, uint32_t stmt_count); +static uint32_t fullBodyExpr(GenZir* gz, Scope* scope, uint32_t node); +static uint32_t containerDecl(GenZir* gz, Scope* scope, uint32_t node); +static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node, + const uint32_t* members, uint32_t members_len); +static uint32_t blockExprExpr( + GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node); +static uint32_t ifExpr(GenZir* gz, Scope* scope, uint32_t node); +static uint32_t forExpr(GenZir* gz, Scope* scope, uint32_t node); +static uint32_t orelseCatchExpr( + GenZir* gz, Scope* scope, uint32_t node, bool is_catch); +static uint32_t arrayInitDotExpr( + GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node); +static uint32_t switchExpr( + GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node); +static uint32_t whileExpr(GenZir* gz, Scope* scope, uint32_t node); +#define EVAL_TO_ERROR_NEVER 0 +#define EVAL_TO_ERROR_ALWAYS 1 +#define EVAL_TO_ERROR_MAYBE 2 +static int nodeMayEvalToError(const Ast* tree, uint32_t node); +static bool nodeMayAppendToErrorTrace(const Ast* tree, uint32_t node); +static void addSaveErrRetIndex(GenZir* gz, uint32_t operand); +static uint32_t identAsString(AstGenCtx* ag, uint32_t token); +static uint32_t lastToken(const Ast* tree, uint32_t node); +static uint32_t simpleBinOp( + GenZir* gz, Scope* scope, uint32_t node, ZirInstTag tag); -// Mirrors numberLiteral (AstGen.zig:8679). -// Handles literals "0" and "1" as built-in refs. -static uint32_t numberLiteral(AstGenCtx* ag, uint32_t node) { +// Mirrors GenZir.endsWithNoReturn (AstGen.zig:11770). +static bool endsWithNoReturn(GenZir* gz) { + uint32_t len = gzInstructionsLen(gz); + if (len == 0) + return false; + uint32_t last = gzInstructionsSlice(gz)[len - 1]; + ZirInstTag tag = gz->astgen->inst_tags[last]; + switch (tag) { + case ZIR_INST_BREAK: + case ZIR_INST_BREAK_INLINE: + case ZIR_INST_CONDBR: + case ZIR_INST_CONDBR_INLINE: + case ZIR_INST_RET_IMPLICIT: + case ZIR_INST_RET_NODE: + case ZIR_INST_REPEAT: + case ZIR_INST_REPEAT_INLINE: + case ZIR_INST_SWITCH_BLOCK: + case ZIR_INST_SWITCH_BLOCK_REF: + case ZIR_INST_SWITCH_BLOCK_ERR_UNION: + return true; + default: + return false; + } +} + +static uint32_t tryResolvePrimitiveIdent(GenZir* gz, uint32_t node); + +// Mirrors comptimeExpr2 (AstGen.zig:1982). +// Evaluates a node in a comptime block_comptime scope. +static uint32_t comptimeExpr(GenZir* gz, Scope* scope, uint32_t node) { + // Skip wrapping when already in comptime context (AstGen.zig:1990). + if (gz->is_comptime) + return expr(gz, scope, node); + // Optimization: certain node types are trivially comptime and don't need + // a block_comptime wrapper (AstGen.zig:1997-2046). + AstGenCtx* ag = gz->astgen; + AstNodeTag tag = ag->tree->nodes.tags[node]; + switch (tag) { + // Identifier handling (AstGen.zig:2000-2003): + // Upstream calls identifier() with force_comptime which resolves + // primitives/int types directly and only wraps others in block_comptime. + // We mirror this by resolving primitives here and falling through for + // non-primitives. + case AST_NODE_IDENTIFIER: { + uint32_t prim = tryResolvePrimitiveIdent(gz, node); + if (prim != ZIR_REF_NONE) + return prim; + break; // non-primitive: fall through to block_comptime wrapping + } + case AST_NODE_NUMBER_LITERAL: + case AST_NODE_CHAR_LITERAL: + case AST_NODE_STRING_LITERAL: + case AST_NODE_MULTILINE_STRING_LITERAL: + case AST_NODE_ENUM_LITERAL: + case AST_NODE_ERROR_VALUE: + // Type expressions that force comptime eval of sub-expressions + // (AstGen.zig:2017-2042). + case AST_NODE_ERROR_UNION: + case AST_NODE_MERGE_ERROR_SETS: + case AST_NODE_OPTIONAL_TYPE: + case AST_NODE_PTR_TYPE_ALIGNED: + case AST_NODE_PTR_TYPE_SENTINEL: + case AST_NODE_PTR_TYPE: + case AST_NODE_PTR_TYPE_BIT_RANGE: + case AST_NODE_ARRAY_TYPE: + case AST_NODE_ARRAY_TYPE_SENTINEL: + case AST_NODE_FN_PROTO_SIMPLE: + case AST_NODE_FN_PROTO_MULTI: + case AST_NODE_FN_PROTO_ONE: + case AST_NODE_FN_PROTO: + case AST_NODE_CONTAINER_DECL: + case AST_NODE_CONTAINER_DECL_TRAILING: + case AST_NODE_CONTAINER_DECL_ARG: + case AST_NODE_CONTAINER_DECL_ARG_TRAILING: + case AST_NODE_CONTAINER_DECL_TWO: + case AST_NODE_CONTAINER_DECL_TWO_TRAILING: + case AST_NODE_TAGGED_UNION: + case AST_NODE_TAGGED_UNION_TRAILING: + case AST_NODE_TAGGED_UNION_ENUM_TAG: + case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING: + case AST_NODE_TAGGED_UNION_TWO: + case AST_NODE_TAGGED_UNION_TWO_TRAILING: + return expr(gz, scope, node); + default: + break; + } + // General case: wrap in block_comptime (AstGen.zig:2078-2096). + uint32_t block_inst = makeBlockInst(ag, ZIR_INST_BLOCK_COMPTIME, gz, node); + GenZir block_scope = makeSubBlock(gz, scope); + block_scope.is_comptime = true; + uint32_t result = expr(&block_scope, scope, node); + addBreak(&block_scope, ZIR_INST_BREAK_INLINE, block_inst, result, + AST_NODE_OFFSET_NONE); + setBlockBody(ag, &block_scope, block_inst); + gzAppendInstruction(gz, block_inst); + return block_inst + ZIR_REF_START_INDEX; +} + +// Mirrors typeExpr (AstGen.zig:1966). +// Evaluates a type expression in comptime context. +static uint32_t typeExpr(GenZir* gz, Scope* scope, uint32_t node) { + return comptimeExpr(gz, scope, node); +} + +// Mirrors numberLiteral (AstGen.zig:8544). +// Parses integer and float literals, returns appropriate ZIR ref. +static uint32_t numberLiteral(GenZir* gz, uint32_t node) { + AstGenCtx* ag = gz->astgen; uint32_t num_token = ag->tree->nodes.main_tokens[node]; uint32_t tok_start = ag->tree->tokens.starts[num_token]; const char* source = ag->tree->source; - // Determine token length by scanning to next non-digit character. + // Determine token length by scanning to next non-number character. uint32_t tok_end = tok_start; while (tok_end < ag->tree->source_len && ((source[tok_end] >= '0' && source[tok_end] <= '9') @@ -828,22 +1460,51 @@ static uint32_t numberLiteral(AstGenCtx* ag, uint32_t node) { || (source[tok_end] >= 'A' && source[tok_end] <= 'F'))) { tok_end++; } - uint32_t tok_len = tok_end - tok_start; - if (tok_len == 1) { - if (source[tok_start] == '0') - return ZIR_REF_ZERO; - if (source[tok_start] == '1') - return ZIR_REF_ONE; + // Parse the integer value (simplified: decimal and hex). + uint64_t value = 0; + bool is_hex = false; + uint32_t pos = tok_start; + if (tok_end - tok_start >= 2 && source[tok_start] == '0' + && source[tok_start + 1] == 'x') { + is_hex = true; + pos = tok_start + 2; } - // TODO: handle other number literals (int, big_int, float). - ag->has_compile_errors = true; - return ZIR_REF_ZERO; + if (is_hex) { + for (; pos < tok_end; pos++) { + if (source[pos] == '_') + continue; + if (source[pos] >= '0' && source[pos] <= '9') + value = value * 16 + (uint64_t)(source[pos] - '0'); + else if (source[pos] >= 'a' && source[pos] <= 'f') + value = value * 16 + 10 + (uint64_t)(source[pos] - 'a'); + else if (source[pos] >= 'A' && source[pos] <= 'F') + value = value * 16 + 10 + (uint64_t)(source[pos] - 'A'); + } + } else { + for (; pos < tok_end; pos++) { + if (source[pos] == '_') + continue; + if (source[pos] == '.') + break; // float — not handled yet + if (source[pos] >= '0' && source[pos] <= '9') + value = value * 10 + (uint64_t)(source[pos] - '0'); + } + } + + // Special cases for 0 and 1 (AstGen.zig:8687-8703). + if (value == 0) + return ZIR_REF_ZERO; + if (value == 1) + return ZIR_REF_ONE; + + return addInt(gz, value); } // Mirrors builtinCall (AstGen.zig:9191), @import case (AstGen.zig:9242). -static uint32_t builtinCallImport(GenZir* gz, uint32_t node) { +static uint32_t builtinCallImport(GenZir* gz, Scope* scope, uint32_t node) { + (void)scope; AstGenCtx* ag = gz->astgen; const Ast* tree = ag->tree; @@ -876,27 +1537,26 @@ static uint32_t builtinCallImport(GenZir* gz, uint32_t node) { } // Mirrors cImport (AstGen.zig:10011). -static uint32_t cImportExpr(GenZir* gz, uint32_t node) { +static uint32_t cImportExpr(GenZir* gz, Scope* scope, uint32_t node) { AstGenCtx* ag = gz->astgen; AstData nd = ag->tree->nodes.datas[node]; uint32_t body_node = nd.lhs; // first arg = body uint32_t block_inst = makeBlockInst(ag, ZIR_INST_C_IMPORT, gz, node); - GenZir block_scope = makeSubBlock(gz); + GenZir block_scope = makeSubBlock(gz, scope); block_scope.is_comptime = true; block_scope.c_import = true; - uint32_t block_result = expr(&block_scope, body_node); + // Use fullBodyExpr to inline unlabeled block body (AstGen.zig:10028). + fullBodyExpr(&block_scope, &block_scope.base, body_node); - // ensure_result_used (AstGen.zig:10029). - addUnNode(&block_scope, ZIR_INST_ENSURE_RESULT_USED, block_result, node); + // ensure_result_used on gz (parent), not block_scope (AstGen.zig:10029). + addUnNode(gz, ZIR_INST_ENSURE_RESULT_USED, ZIR_REF_VOID_VALUE, node); - // break_inline if not noreturn (AstGen.zig:10030-10032). - if (block_result != ZIR_REF_UNREACHABLE_VALUE) { - makeBreakInline(&block_scope, block_inst, ZIR_REF_VOID_VALUE, - AST_NODE_OFFSET_NONE); - } + // break_inline (AstGen.zig:10030-10032). + makeBreakInline( + &block_scope, block_inst, ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE); setBlockBody(ag, &block_scope, block_inst); // block_scope unstacked now, can add to gz. @@ -906,12 +1566,12 @@ static uint32_t cImportExpr(GenZir* gz, uint32_t node) { } // Mirrors simpleCBuiltin (AstGen.zig:9938). -static uint32_t simpleCBuiltin( - GenZir* gz, uint32_t node, uint32_t operand_node, uint16_t ext_tag) { +static uint32_t simpleCBuiltin(GenZir* gz, Scope* scope, uint32_t node, + uint32_t operand_node, uint16_t ext_tag) { AstGenCtx* ag = gz->astgen; // Evaluate operand as comptime string. - uint32_t operand = expr(gz, operand_node); + uint32_t operand = expr(gz, scope, operand_node); // Emit extended instruction with UnNode payload (AstGen.zig:9954). ensureExtraCapacity(ag, 2); @@ -930,7 +1590,7 @@ static uint32_t simpleCBuiltin( } // Mirrors builtinCall (AstGen.zig:9191) dispatch. -static uint32_t builtinCall(GenZir* gz, uint32_t node) { +static uint32_t builtinCall(GenZir* gz, Scope* scope, uint32_t node) { AstGenCtx* ag = gz->astgen; const Ast* tree = ag->tree; @@ -952,29 +1612,118 @@ static uint32_t builtinCall(GenZir* gz, uint32_t node) { // clang-format off if (name_len == 6 && memcmp(source + name_start, "import", 6) == 0) - return builtinCallImport(gz, node); + return builtinCallImport(gz, scope, node); if (name_len == 7 && memcmp(source + name_start, "cImport", 7) == 0) - return cImportExpr(gz, node); + return cImportExpr(gz, scope, node); if (name_len == 8 && memcmp(source + name_start, "cInclude", 8) == 0) { AstData nd = tree->nodes.datas[node]; - return simpleCBuiltin(gz, node, nd.lhs, (uint16_t)ZIR_EXT_C_INCLUDE); + return simpleCBuiltin(gz, scope, node, nd.lhs, (uint16_t)ZIR_EXT_C_INCLUDE); + } + // @intCast (AstGen.zig:9416). + if (name_len == 7 && memcmp(source + name_start, "intCast", 7) == 0) { + AstData nd = tree->nodes.datas[node]; + uint32_t operand = expr(gz, scope, nd.lhs); + return addPlNodeBin(gz, ZIR_INST_INT_CAST, node, + ZIR_REF_NONE, operand); + } + // @embedFile (AstGen.zig:9626). + if (name_len == 9 && memcmp(source + name_start, "embedFile", 9) == 0) { + AstData nd = tree->nodes.datas[node]; + uint32_t operand = expr(gz, scope, nd.lhs); + return addUnNode(gz, ZIR_INST_EMBED_FILE, operand, node); + } + // @intFromEnum (AstGen.zig:9478). + if (name_len == 11 && memcmp(source + name_start, "intFromEnum", 11) == 0) { + AstData nd = tree->nodes.datas[node]; + uint32_t operand = expr(gz, scope, nd.lhs); + return addUnNode(gz, ZIR_INST_INT_FROM_ENUM, operand, node); + } + // @tagName (AstGen.zig:9740). + if (name_len == 7 && memcmp(source + name_start, "tagName", 7) == 0) { + AstData nd = tree->nodes.datas[node]; + uint32_t operand = expr(gz, scope, nd.lhs); + return addUnNode(gz, ZIR_INST_TAG_NAME, operand, node); + } + // @as (AstGen.zig:9388). + if (name_len == 2 && memcmp(source + name_start, "as", 2) == 0) { + AstData nd = tree->nodes.datas[node]; + uint32_t dest_type = typeExpr(gz, scope, nd.lhs); + uint32_t operand = expr(gz, scope, nd.rhs); + return addPlNodeBin(gz, ZIR_INST_AS_NODE, node, dest_type, operand); + } + // @truncate (AstGen.zig:9416). + if (name_len == 8 && memcmp(source + name_start, "truncate", 8) == 0) { + AstData nd = tree->nodes.datas[node]; + uint32_t operand = expr(gz, scope, nd.lhs); + return addPlNodeBin(gz, ZIR_INST_TRUNCATE, node, + ZIR_REF_NONE, operand); + } + // @ptrCast (AstGen.zig:9416). + if (name_len == 7 && memcmp(source + name_start, "ptrCast", 7) == 0) { + AstData nd = tree->nodes.datas[node]; + uint32_t operand = expr(gz, scope, nd.lhs); + return addPlNodeBin(gz, ZIR_INST_PTR_CAST, node, + ZIR_REF_NONE, operand); + } + // @enumFromInt (AstGen.zig:9480). + if (name_len == 11 && memcmp(source + name_start, "enumFromInt", 11) == 0) { + AstData nd = tree->nodes.datas[node]; + uint32_t operand = expr(gz, scope, nd.lhs); + return addPlNodeBin(gz, ZIR_INST_ENUM_FROM_INT, node, + ZIR_REF_NONE, operand); + } + // @bitCast (AstGen.zig:9416). + if (name_len == 7 && memcmp(source + name_start, "bitCast", 7) == 0) { + AstData nd = tree->nodes.datas[node]; + uint32_t operand = expr(gz, scope, nd.lhs); + return addPlNodeBin(gz, ZIR_INST_BITCAST, node, + ZIR_REF_NONE, operand); + } + // @memcpy (AstGen.zig:9586). + if (name_len == 6 && memcmp(source + name_start, "memcpy", 6) == 0) { + AstData nd = tree->nodes.datas[node]; + uint32_t dst = expr(gz, scope, nd.lhs); + uint32_t src = expr(gz, scope, nd.rhs); + return addPlNodeBin(gz, ZIR_INST_MEMCPY, node, dst, src); + } + // @memset (AstGen.zig:9582). + if (name_len == 6 && memcmp(source + name_start, "memset", 6) == 0) { + AstData nd = tree->nodes.datas[node]; + uint32_t dst = expr(gz, scope, nd.lhs); + uint32_t val = expr(gz, scope, nd.rhs); + return addPlNodeBin(gz, ZIR_INST_MEMSET, node, dst, val); + } + // @min (AstGen.zig:9155). + if (name_len == 3 && memcmp(source + name_start, "min", 3) == 0) { + AstData nd = tree->nodes.datas[node]; + uint32_t a = expr(gz, scope, nd.lhs); + uint32_t b = expr(gz, scope, nd.rhs); + return addPlNodeBin(gz, ZIR_INST_MIN, node, a, b); + } + // @max (AstGen.zig:9155). + if (name_len == 3 && memcmp(source + name_start, "max", 3) == 0) { + AstData nd = tree->nodes.datas[node]; + uint32_t a = expr(gz, scope, nd.lhs); + uint32_t b = expr(gz, scope, nd.rhs); + return addPlNodeBin(gz, ZIR_INST_MAX, node, a, b); } // clang-format on // TODO: handle other builtins. - ag->has_compile_errors = true; + SET_ERROR(ag); return ZIR_REF_VOID_VALUE; } // --- identifier (AstGen.zig:8282) --- // Simplified: handles decl_val resolution for container-level declarations. -static uint32_t identifierExpr(GenZir* gz, ResultLoc rl, uint32_t node) { +// Tries to resolve an identifier as a primitive type or integer type. +// Returns the ZIR ref if it's a primitive/int type, or ZIR_REF_NONE. +// Mirrors primitive_instrs + integer type checks in identifier() +// (AstGen.zig:8298-8337). +static uint32_t tryResolvePrimitiveIdent(GenZir* gz, uint32_t node) { AstGenCtx* ag = gz->astgen; - const Ast* tree = ag->tree; - uint32_t ident_token = tree->nodes.main_tokens[node]; - - // Check for primitive types FIRST (AstGen.zig:8298-8338). + uint32_t ident_token = ag->tree->nodes.main_tokens[node]; uint32_t tok_start = ag->tree->tokens.starts[ident_token]; const char* source = ag->tree->source; uint32_t tok_end = tok_start; @@ -993,6 +1742,10 @@ static uint32_t identifierExpr(GenZir* gz, ResultLoc rl, uint32_t node) { if (tok_len == 4 && memcmp(source+tok_start, "bool", 4) == 0) return ZIR_REF_BOOL_TYPE; if (tok_len == 4 && memcmp(source+tok_start, "void", 4) == 0) return ZIR_REF_VOID_TYPE; if (tok_len == 6 && memcmp(source+tok_start, "c_uint", 6) == 0) return ZIR_REF_C_UINT_TYPE; + if (tok_len == 4 && memcmp(source+tok_start, "true", 4) == 0) return ZIR_REF_BOOL_TRUE; + if (tok_len == 5 && memcmp(source+tok_start, "false", 5) == 0) return ZIR_REF_BOOL_FALSE; + if (tok_len == 4 && memcmp(source+tok_start, "null", 4) == 0) return ZIR_REF_NULL_VALUE; + if (tok_len == 9 && memcmp(source+tok_start, "undefined", 9) == 0) return ZIR_REF_UNDEF; // clang-format on // Integer type detection: u29, i13, etc. (AstGen.zig:8304-8336). @@ -1020,9 +1773,64 @@ static uint32_t identifierExpr(GenZir* gz, ResultLoc rl, uint32_t node) { return addInstruction(gz, ZIR_INST_INT_TYPE, data); } } + return ZIR_REF_NONE; +} + +static uint32_t identifierExpr( + GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { + AstGenCtx* ag = gz->astgen; + uint32_t ident_token = ag->tree->nodes.main_tokens[node]; + + // Check for primitive types FIRST (AstGen.zig:8298-8338). + uint32_t prim = tryResolvePrimitiveIdent(gz, node); + if (prim != ZIR_REF_NONE) + return prim; + + // Scope chain walk (AstGen.zig:8340-8461). + uint32_t name_str = identAsString(ag, ident_token); + for (Scope* s = scope; s != NULL;) { + switch (s->tag) { + case SCOPE_LOCAL_VAL: { + ScopeLocalVal* lv = (ScopeLocalVal*)s; + if (lv->name == name_str) + return rvalue(gz, rl, lv->inst, node); + s = lv->parent; + continue; + } + case SCOPE_LOCAL_PTR: { + ScopeLocalPtr* lp = (ScopeLocalPtr*)s; + if (lp->name == name_str) { + if (rl.tag == RL_REF) + return lp->ptr; + return addUnNode(gz, ZIR_INST_LOAD, lp->ptr, node); + } + s = lp->parent; + continue; + } + case SCOPE_GEN_ZIR: { + GenZir* gzs = (GenZir*)s; + s = gzs->parent; + continue; + } + case SCOPE_DEFER_NORMAL: + case SCOPE_DEFER_ERROR: { + ScopeDefer* sd = (ScopeDefer*)s; + s = sd->parent; + continue; + } + case SCOPE_LABEL: { + ScopeLabel* sl = (ScopeLabel*)s; + s = sl->parent; + continue; + } + case SCOPE_NAMESPACE: + case SCOPE_TOP: + goto decl_table; + } + } +decl_table: // Decl table lookup (AstGen.zig:8462-8520). - uint32_t name_str = identAsString(ag, ident_token); for (uint32_t i = 0; i < ag->decl_table_len; i++) { if (ag->decl_names[i] == name_str) { ZirInstTag itag @@ -1034,14 +1842,15 @@ static uint32_t identifierExpr(GenZir* gz, ResultLoc rl, uint32_t node) { } } - ag->has_compile_errors = true; + SET_ERROR(ag); return ZIR_REF_VOID_VALUE; } // --- fieldAccess (AstGen.zig:6154) --- // Simplified: emits field_val instruction with Field payload. -static uint32_t fieldAccessExpr(GenZir* gz, ResultLoc rl, uint32_t node) { +static uint32_t fieldAccessExpr( + GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { AstGenCtx* ag = gz->astgen; const Ast* tree = ag->tree; AstData nd = tree->nodes.datas[node]; @@ -1056,7 +1865,15 @@ static uint32_t fieldAccessExpr(GenZir* gz, ResultLoc rl, uint32_t node) { // Evaluate the LHS object expression (AstGen.zig:6181). // For .ref rl, LHS is also evaluated with .ref (AstGen.zig:6161). ResultLoc lhs_rl = (rl.tag == RL_REF) ? RL_REF_VAL : RL_NONE_VAL; - uint32_t lhs = exprRl(gz, lhs_rl, object_node); + uint32_t lhs = exprRl(gz, scope, lhs_rl, object_node); + + // Emit dbg_stmt for the dot token (AstGen.zig:6183-6184). + advanceSourceCursorToMainToken(ag, node); + { + uint32_t line = ag->source_line - gz->decl_line; + uint32_t column = ag->source_column; + emitDbgStmt(gz, line, column); + } // Emit field_val instruction with Field payload (AstGen.zig:6186-6189). ensureExtraCapacity(ag, 2); @@ -1065,18 +1882,22 @@ static uint32_t fieldAccessExpr(GenZir* gz, ResultLoc rl, uint32_t node) { ag->extra[ag->extra_len++] = str_index; // Field.field_name_start // .ref → field_ptr, else → field_val (AstGen.zig:6160-6164). - ZirInstTag tag + ZirInstTag ftag = (rl.tag == RL_REF) ? ZIR_INST_FIELD_PTR : ZIR_INST_FIELD_VAL; ZirInstData data; data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index; data.pl_node.payload_index = payload_index; - return addInstruction(gz, tag, data); + uint32_t access = addInstruction(gz, ftag, data); + // For ref, return directly; otherwise apply rvalue (AstGen.zig:6161-6164). + if (rl.tag == RL_REF) + return access; + return rvalue(gz, rl, access, node); } // --- ptrType (AstGen.zig:3833) --- // Simplified: handles []const T and []T slice types. -static uint32_t ptrTypeExpr(GenZir* gz, uint32_t node) { +static uint32_t ptrTypeExpr(GenZir* gz, Scope* scope, uint32_t node) { AstGenCtx* ag = gz->astgen; const Ast* tree = ag->tree; AstNodeTag tag = tree->nodes.tags[node]; @@ -1116,8 +1937,8 @@ static uint32_t ptrTypeExpr(GenZir* gz, uint32_t node) { child_type_node = nd.rhs; } - // Evaluate element type. - uint32_t elem_type = exprRl(gz, RL_NONE_VAL, child_type_node); + // Evaluate element type (AstGen.zig ptrType uses typeExpr). + uint32_t elem_type = typeExpr(gz, scope, child_type_node); // Build PtrType payload: { elem_type, src_node }. ensureExtraCapacity(ag, 2); @@ -1141,20 +1962,28 @@ static uint32_t ptrTypeExpr(GenZir* gz, uint32_t node) { // --- arrayType (AstGen.zig:940) --- -static uint32_t arrayTypeExpr(GenZir* gz, uint32_t node) { +static uint32_t arrayTypeExpr(GenZir* gz, Scope* scope, uint32_t node) { AstGenCtx* ag = gz->astgen; - AstData nd = ag->tree->nodes.datas[node]; + const Ast* tree = ag->tree; + AstData nd = tree->nodes.datas[node]; // data.lhs = length expr node, data.rhs = element type node. - uint32_t len = exprRl(gz, RL_NONE_VAL, nd.lhs); - uint32_t elem_type = exprRl(gz, RL_NONE_VAL, nd.rhs); + // Check for `_` identifier → compile error (AstGen.zig:3950-3953). + if (tree->nodes.tags[nd.lhs] == AST_NODE_IDENTIFIER + && isUnderscoreIdent(tree, nd.lhs)) { + SET_ERROR(ag); + return ZIR_REF_VOID_VALUE; + } + uint32_t len = comptimeExpr(gz, scope, nd.lhs); + uint32_t elem_type = typeExpr(gz, scope, nd.rhs); return addPlNodeBin(gz, ZIR_INST_ARRAY_TYPE, node, len, elem_type); } // --- arrayInitExpr (AstGen.zig:1431) --- // Simplified: handles typed array init with inferred [_] length. -static uint32_t arrayInitExpr(GenZir* gz, ResultLoc rl, uint32_t node) { +static uint32_t arrayInitExpr( + GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { AstGenCtx* ag = gz->astgen; const Ast* tree = ag->tree; AstNodeTag tag = tree->nodes.tags[node]; @@ -1191,12 +2020,12 @@ static uint32_t arrayInitExpr(GenZir* gz, ResultLoc rl, uint32_t node) { break; } default: - ag->has_compile_errors = true; + SET_ERROR(ag); return ZIR_REF_VOID_VALUE; } if (type_expr_node == 0 || elem_count == 0) { - ag->has_compile_errors = true; + SET_ERROR(ag); return ZIR_REF_VOID_VALUE; } @@ -1207,63 +2036,59 @@ static uint32_t arrayInitExpr(GenZir* gz, ResultLoc rl, uint32_t node) { uint32_t elem_type_node = type_nd.rhs; // Check if elem_count is `_` identifier. - if (tree->nodes.tags[elem_count_node] == AST_NODE_IDENTIFIER) { - uint32_t id_tok = tree->nodes.main_tokens[elem_count_node]; - uint32_t id_start = tree->tokens.starts[id_tok]; - if (tree->source[id_start] == '_' - && (id_start + 1 >= tree->source_len - || !((tree->source[id_start + 1] >= 'a' - && tree->source[id_start + 1] <= 'z') - || (tree->source[id_start + 1] >= 'A' - && tree->source[id_start + 1] <= 'Z') - || tree->source[id_start + 1] == '_'))) { - // Inferred length: addInt(elem_count) (AstGen.zig:1452). - uint32_t len_inst = addInt(gz, elem_count); - uint32_t elem_type = exprRl(gz, RL_NONE_VAL, elem_type_node); - uint32_t array_type_inst = addPlNodeBin(gz, - ZIR_INST_ARRAY_TYPE, type_expr_node, len_inst, elem_type); + if (tree->nodes.tags[elem_count_node] == AST_NODE_IDENTIFIER + && isUnderscoreIdent(tree, elem_count_node)) { + // Inferred length: addInt(elem_count) (AstGen.zig:1452). + uint32_t len_inst = addInt(gz, elem_count); + uint32_t elem_type + = exprRl(gz, scope, RL_NONE_VAL, elem_type_node); + uint32_t array_type_inst = addPlNodeBin( + gz, ZIR_INST_ARRAY_TYPE, type_expr_node, len_inst, elem_type); - // arrayInitExprTyped (AstGen.zig:1507/1509). - bool is_ref = (rl.tag == RL_REF); - // Build MultiOp payload: operands_len, then type + elements. - uint32_t operands_len = elem_count + 1; // +1 for type - ensureExtraCapacity(ag, 1 + operands_len); - uint32_t payload_index = ag->extra_len; - ag->extra[ag->extra_len++] = operands_len; - ag->extra[ag->extra_len++] = array_type_inst; // type ref - for (uint32_t i = 0; i < elem_count; i++) { - uint32_t elem_ref = exprRl(gz, RL_NONE_VAL, elements[i]); - ag->extra[ag->extra_len++] = elem_ref; - } - ZirInstTag init_tag - = is_ref ? ZIR_INST_ARRAY_INIT_REF : ZIR_INST_ARRAY_INIT; - ZirInstData data; - data.pl_node.src_node - = (int32_t)node - (int32_t)gz->decl_node_index; - data.pl_node.payload_index = payload_index; - return addInstruction(gz, init_tag, data); + // arrayInitExprTyped (AstGen.zig:1507/1509). + bool is_ref = (rl.tag == RL_REF); + // Build MultiOp payload: operands_len, then type + elements. + uint32_t operands_len = elem_count + 1; // +1 for type + ensureExtraCapacity(ag, 1 + operands_len); + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = operands_len; + ag->extra[ag->extra_len++] = array_type_inst; // type ref + for (uint32_t i = 0; i < elem_count; i++) { + uint32_t elem_ref + = exprRl(gz, scope, RL_NONE_VAL, elements[i]); + ag->extra[ag->extra_len++] = elem_ref; } + ZirInstTag init_tag + = is_ref ? ZIR_INST_ARRAY_INIT_REF : ZIR_INST_ARRAY_INIT; + ZirInstData data; + data.pl_node.src_node + = (int32_t)node - (int32_t)gz->decl_node_index; + data.pl_node.payload_index = payload_index; + return addInstruction(gz, init_tag, data); } } // Non-inferred length: evaluate type normally. - ag->has_compile_errors = true; + SET_ERROR(ag); return ZIR_REF_VOID_VALUE; } // --- simpleBinOp (AstGen.zig:2204) --- -static uint32_t simpleBinOp(GenZir* gz, uint32_t node, ZirInstTag op_tag) { +static uint32_t simpleBinOp( + GenZir* gz, Scope* scope, uint32_t node, ZirInstTag op_tag) { AstGenCtx* ag = gz->astgen; AstData nd = ag->tree->nodes.datas[node]; - uint32_t lhs = exprRl(gz, RL_NONE_VAL, nd.lhs); - uint32_t rhs = exprRl(gz, RL_NONE_VAL, nd.rhs); + uint32_t lhs = exprRl(gz, scope, RL_NONE_VAL, nd.lhs); + uint32_t rhs = exprRl(gz, scope, RL_NONE_VAL, nd.rhs); return addPlNodeBin(gz, op_tag, node, lhs, rhs); } // --- multilineStringLiteral (AstGen.zig:8645) --- // Port of strLitNodeAsString for multiline strings. -static uint32_t multilineStringLiteral(GenZir* gz, uint32_t node) { +static uint32_t multilineStringLiteral( + GenZir* gz, Scope* scope, uint32_t node) { + (void)scope; AstGenCtx* ag = gz->astgen; const Ast* tree = ag->tree; AstData nd = tree->nodes.datas[node]; @@ -1308,26 +2133,65 @@ static uint32_t multilineStringLiteral(GenZir* gz, uint32_t node) { // --- ret (AstGen.zig:8119) --- // Simplified: no defer handling. -static uint32_t retExpr(GenZir* gz, uint32_t node) { +static uint32_t retExpr(GenZir* gz, Scope* scope, uint32_t node) { const AstGenCtx* ag = gz->astgen; const Ast* tree = ag->tree; + // Ensure debug line/column information is emitted for this return + // expression (AstGen.zig:8141-8144). if (!gz->is_comptime) { emitDbgNode(gz, node); } + uint32_t ret_lc_line = ag->source_line - gz->decl_line; + uint32_t ret_lc_column = ag->source_column; AstData nd = tree->nodes.datas[node]; uint32_t operand_node = nd.lhs; // optional if (operand_node == 0) { - // Void return (AstGen.zig:8155). + // Void return (AstGen.zig:8148-8156). + // Restore error trace unconditionally (AstGen.zig:8153). + ZirInstData rdata; + rdata.un_node.operand = ZIR_REF_NONE; + rdata.un_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index; + addInstruction( + gz, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata); addUnNode(gz, ZIR_INST_RET_NODE, ZIR_REF_VOID_VALUE, node); return ZIR_REF_UNREACHABLE_VALUE; } - // Evaluate operand (simplified: no coercion to fn_ret_ty yet). - uint32_t operand = expr(gz, operand_node); + // Evaluate operand with fn_ret_ty as result type (AstGen.zig:8178-8186). + ResultLoc ret_rl = RL_NONE_VAL; + if (ag->fn_ret_ty != 0) { + ret_rl.tag = RL_COERCED_TY; + ret_rl.data = ag->fn_ret_ty; + } + uint32_t operand = exprRl(gz, scope, ret_rl, operand_node); + // Emit RESTORE_ERR_RET_INDEX based on nodeMayEvalToError + // (AstGen.zig:8188-8220). + int eval_to_err = nodeMayEvalToError(tree, operand_node); + if (eval_to_err == EVAL_TO_ERROR_NEVER) { + // Returning non-error: pop error trace unconditionally + // (AstGen.zig:8193-8194). + ZirInstData rdata; + rdata.un_node.operand = ZIR_REF_NONE; + rdata.un_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index; + addInstruction( + gz, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata); + } else if (eval_to_err == EVAL_TO_ERROR_MAYBE) { + // May be an error: conditionally pop based on value + // (AstGen.zig:8216-8217). + ZirInstData rdata; + rdata.un_node.operand = operand; + rdata.un_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index; + addInstruction(gz, ZIR_INST_RESTORE_ERR_RET_INDEX_FN_ENTRY, rdata); + } + // .always: no restore needed (error stays on trace) + + // Emit dbg_stmt back at return keyword for error return tracing + // (AstGen.zig:8196). + emitDbgStmt(gz, ret_lc_line, ret_lc_column); addUnNode(gz, ZIR_INST_RET_NODE, operand, node); return ZIR_REF_UNREACHABLE_VALUE; } @@ -1342,7 +2206,7 @@ typedef struct { uint32_t direct; // for direct calls: ref to callee } Callee; -static Callee calleeExpr(GenZir* gz, uint32_t fn_expr_node) { +static Callee calleeExpr(GenZir* gz, Scope* scope, uint32_t fn_expr_node) { AstGenCtx* ag = gz->astgen; const Ast* tree = ag->tree; AstNodeTag tag = tree->nodes.tags[fn_expr_node]; @@ -1353,9 +2217,16 @@ static Callee calleeExpr(GenZir* gz, uint32_t fn_expr_node) { uint32_t field_ident = nd.rhs; uint32_t str_index = identAsString(ag, field_ident); // Evaluate object with .ref rl (AstGen.zig:10207). - uint32_t lhs = exprRl(gz, RL_REF_VAL, object_node); + uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, object_node); - emitDbgNode(gz, fn_expr_node); + // Advance to main token (the `.` dot) — not first token + // (AstGen.zig:10209). + advanceSourceCursorToMainToken(ag, fn_expr_node); + { + uint32_t line = ag->source_line - gz->decl_line; + uint32_t column = ag->source_column; + emitDbgStmt(gz, line, column); + } Callee c; c.is_field = true; @@ -1368,14 +2239,14 @@ static Callee calleeExpr(GenZir* gz, uint32_t fn_expr_node) { // Default: direct call (AstGen.zig:10235). Callee c; c.is_field = false; - c.direct = expr(gz, fn_expr_node); + c.direct = expr(gz, scope, fn_expr_node); c.obj_ptr = 0; c.field_name_start = 0; return c; } // --- callExpr (AstGen.zig:10058) --- -static uint32_t callExpr(GenZir* gz, uint32_t node) { +static uint32_t callExpr(GenZir* gz, Scope* scope, uint32_t node) { AstGenCtx* ag = gz->astgen; const Ast* tree = ag->tree; AstNodeTag tag = tree->nodes.tags[node]; @@ -1412,18 +2283,18 @@ static uint32_t callExpr(GenZir* gz, uint32_t node) { break; } default: - ag->has_compile_errors = true; + SET_ERROR(ag); return ZIR_REF_VOID_VALUE; } - Callee callee = calleeExpr(gz, fn_expr_node); + Callee callee = calleeExpr(gz, scope, fn_expr_node); - // dbg_stmt before call (AstGen.zig:10082). + // dbg_stmt before call (AstGen.zig:10078-10083). { advanceSourceCursor(ag, tree->tokens.starts[lparen_tok]); uint32_t line = ag->source_line - gz->decl_line; uint32_t column = ag->source_column; - emitDbgStmt(gz, line, column); + emitDbgStmtForceCurrentIndex(gz, line, column); } // Reserve instruction slot for call (AstGen.zig:10093). @@ -1442,21 +2313,27 @@ static uint32_t callExpr(GenZir* gz, uint32_t node) { uint32_t arg_lengths_start = ag->extra_len; ag->extra_len += args_len; - for (uint32_t i = 0; i < args_len; i++) { - GenZir arg_block = makeSubBlock(gz); - uint32_t arg_ref = expr(&arg_block, args[i]); + // call_inst ref reused for param type (AstGen.zig:10107). + uint32_t call_inst = call_index + ZIR_REF_START_INDEX; + ResultLoc arg_rl = { .tag = RL_COERCED_TY, .data = call_inst }; - // break_inline with param_node src (AstGen.zig:10107). + for (uint32_t i = 0; i < args_len; i++) { + GenZir arg_block = makeSubBlock(gz, scope); + uint32_t arg_ref + = exprRl(&arg_block, &arg_block.base, arg_rl, args[i]); + + // break_inline with param_node src (AstGen.zig:10108). int32_t param_src = (int32_t)args[i] - (int32_t)arg_block.decl_node_index; makeBreakInline(&arg_block, call_index, arg_ref, param_src); - // Copy arg_block body to extra. - uint32_t body_len = gzInstructionsLen(&arg_block); + // Copy arg_block body to extra (with ref_table fixups). + uint32_t raw_body_len = gzInstructionsLen(&arg_block); const uint32_t* body = gzInstructionsSlice(&arg_block); - ensureExtraCapacity(ag, body_len); - for (uint32_t j = 0; j < body_len; j++) { - ag->extra[ag->extra_len++] = body[j]; + uint32_t fixup_len = countBodyLenAfterFixups(ag, body, raw_body_len); + ensureExtraCapacity(ag, fixup_len); + for (uint32_t j = 0; j < raw_body_len; j++) { + appendPossiblyRefdBodyInst(ag, body[j]); } // Record cumulative body length (AstGen.zig:10113). ag->extra[arg_lengths_start + i] @@ -1471,9 +2348,10 @@ static uint32_t callExpr(GenZir* gz, uint32_t node) { uint32_t payload_index = ag->extra_len; ag->extra[ag->extra_len++] = callee.obj_ptr; ag->extra[ag->extra_len++] = callee.field_name_start; - // flags: pop_error_return_trace=true, modifier=auto, args_len - uint32_t flags = (1u << 0) // pop_error_return_trace - | ((args_len & 0x1FFFFFFFu) << 3); // packed_modifier = auto (0) + // Flags layout (packed): modifier:u3, ensure_result_used:bool, + // pop_error_return_trace:bool, args_len:u27. + uint32_t flags = (1u << 4) // pop_error_return_trace = true + | ((args_len & 0x7FFFFFFu) << 5); // args_len ag->extra[ag->extra_len++] = flags; ag->inst_tags[call_index] = ZIR_INST_FIELD_CALL; ag->inst_datas[call_index].pl_node.src_node @@ -1484,9 +2362,10 @@ static uint32_t callExpr(GenZir* gz, uint32_t node) { ensureExtraCapacity(ag, 2); uint32_t payload_index = ag->extra_len; ag->extra[ag->extra_len++] = callee.direct; - // flags: pop_error_return_trace=true, modifier=auto, args_len - uint32_t flags = (1u << 0) // pop_error_return_trace - | ((args_len & 0x1FFFFFFFu) << 3); // packed_modifier = auto (0) + // Flags layout (packed): modifier:u3, ensure_result_used:bool, + // pop_error_return_trace:bool, args_len:u27. + uint32_t flags = (1u << 4) // pop_error_return_trace = true + | ((args_len & 0x7FFFFFFu) << 5); // args_len ag->extra[ag->extra_len++] = flags; ag->inst_tags[call_index] = ZIR_INST_CALL; ag->inst_datas[call_index].pl_node.src_node @@ -1499,7 +2378,8 @@ static uint32_t callExpr(GenZir* gz, uint32_t node) { // --- structInitExpr (AstGen.zig:1674) --- // Simplified: handles .{} (empty tuple), .{.a = b} (anon init). -static uint32_t structInitExpr(GenZir* gz, ResultLoc rl, uint32_t node) { +static uint32_t structInitExpr( + GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { AstGenCtx* ag = gz->astgen; const Ast* tree = ag->tree; AstNodeTag tag = tree->nodes.tags[node]; @@ -1542,14 +2422,30 @@ static uint32_t structInitExpr(GenZir* gz, ResultLoc rl, uint32_t node) { } break; } + case AST_NODE_STRUCT_INIT: + case AST_NODE_STRUCT_INIT_COMMA: { + type_expr_node = nd.lhs; + uint32_t extra_idx = nd.rhs; + uint32_t range_start = tree->extra_data.arr[extra_idx]; + uint32_t range_end = tree->extra_data.arr[extra_idx + 1]; + fields = tree->extra_data.arr + range_start; + fields_len = range_end - range_start; + break; + } default: - ag->has_compile_errors = true; + SET_ERROR(ag); return ZIR_REF_VOID_VALUE; } if (type_expr_node == 0 && fields_len == 0) { - // .{} with rl.none/ref → empty_tuple (AstGen.zig:1694). - (void)rl; + // .{} — depends on result location (AstGen.zig:1687-1698). + if (rl.tag == RL_TY || rl.tag == RL_COERCED_TY) { + return addUnNode( + gz, ZIR_INST_STRUCT_INIT_EMPTY_RESULT, rl.data, node); + } + if (rl.tag == RL_DISCARD) { + return ZIR_REF_VOID_VALUE; + } return ZIR_REF_EMPTY_TUPLE; } @@ -1570,7 +2466,7 @@ static uint32_t structInitExpr(GenZir* gz, ResultLoc rl, uint32_t node) { // field name is 2 tokens before the field init's first token. uint32_t name_token = firstToken(tree, field_init) - 2; uint32_t str_index = identAsString(ag, name_token); - uint32_t init_ref = expr(gz, field_init); + uint32_t init_ref = expr(gz, scope, field_init); ag->extra[items_start + i * 2] = str_index; ag->extra[items_start + i * 2 + 1] = init_ref; } @@ -1581,18 +2477,87 @@ static uint32_t structInitExpr(GenZir* gz, ResultLoc rl, uint32_t node) { // Typed init: evaluate type, emit struct_init_empty or struct_init. if (type_expr_node != 0 && fields_len == 0) { - uint32_t ty_inst = expr(gz, type_expr_node); + // Check for [_]T{} pattern (AstGen.zig:1707-1753). + AstNodeTag type_tag = tree->nodes.tags[type_expr_node]; + if (type_tag == AST_NODE_ARRAY_TYPE + || type_tag == AST_NODE_ARRAY_TYPE_SENTINEL) { + AstData type_nd = tree->nodes.datas[type_expr_node]; + uint32_t elem_count_node = type_nd.lhs; + if (tree->nodes.tags[elem_count_node] == AST_NODE_IDENTIFIER + && isUnderscoreIdent(tree, elem_count_node)) { + // Inferred length with 0 fields → length 0. + if (type_tag == AST_NODE_ARRAY_TYPE) { + uint32_t elem_type + = exprRl(gz, scope, RL_NONE_VAL, type_nd.rhs); + uint32_t array_type_inst + = addPlNodeBin(gz, ZIR_INST_ARRAY_TYPE, type_expr_node, + ZIR_REF_ZERO_USIZE, elem_type); + return addUnNode( + gz, ZIR_INST_STRUCT_INIT_EMPTY, array_type_inst, node); + } + // ARRAY_TYPE_SENTINEL: extra[rhs] = sentinel, extra[rhs+1] + // = elem_type + uint32_t sentinel_node = tree->extra_data.arr[type_nd.rhs]; + uint32_t elem_type_node + = tree->extra_data.arr[type_nd.rhs + 1]; + uint32_t elem_type + = exprRl(gz, scope, RL_NONE_VAL, elem_type_node); + uint32_t sentinel = comptimeExpr(gz, scope, sentinel_node); + uint32_t array_type_inst = addPlNodeTriple(gz, + ZIR_INST_ARRAY_TYPE_SENTINEL, type_expr_node, + ZIR_REF_ZERO_USIZE, elem_type, sentinel); + return addUnNode( + gz, ZIR_INST_STRUCT_INIT_EMPTY, array_type_inst, node); + } + } + uint32_t ty_inst = typeExpr(gz, scope, type_expr_node); return addUnNode(gz, ZIR_INST_STRUCT_INIT_EMPTY, ty_inst, node); } - // TODO: typed struct init with fields. - ag->has_compile_errors = true; + // Typed struct init with fields (AstGen.zig:1808-1818). + if (type_expr_node != 0 && fields_len > 0) { + uint32_t ty_inst = typeExpr(gz, scope, type_expr_node); + addUnNode(gz, ZIR_INST_VALIDATE_STRUCT_INIT_TY, ty_inst, node); + + // structInitExprTyped (AstGen.zig:1896-1931). + // StructInit payload: abs_node, abs_line, fields_len. + ensureExtraCapacity(ag, 3 + fields_len * 2); + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = node; // abs_node + ag->extra[ag->extra_len++] = ag->source_line; // abs_line + ag->extra[ag->extra_len++] = fields_len; + // Reserve space for field items (field_type + init each). + uint32_t items_start = ag->extra_len; + ag->extra_len += fields_len * 2; + + for (uint32_t i = 0; i < fields_len; i++) { + uint32_t field_init = fields[i]; + uint32_t name_token = firstToken(tree, field_init) - 2; + uint32_t str_index = identAsString(ag, name_token); + // struct_init_field_type (AstGen.zig:1918-1921). + uint32_t field_ty_inst + = addPlNodeBin(gz, ZIR_INST_STRUCT_INIT_FIELD_TYPE, field_init, + ty_inst, str_index); + // Evaluate init (coerced_ty in upstream = no explicit coercion). + uint32_t init_ref = expr(gz, scope, field_init); + ag->extra[items_start + i * 2] + = field_ty_inst - ZIR_REF_START_INDEX; // .toIndex() + ag->extra[items_start + i * 2 + 1] = init_ref; + } + + bool is_ref = (rl.tag == RL_REF); + ZirInstTag init_tag + = is_ref ? ZIR_INST_STRUCT_INIT_REF : ZIR_INST_STRUCT_INIT; + return addPlNodePayloadIndex(gz, init_tag, node, payload_index); + } + + SET_ERROR(ag); return ZIR_REF_VOID_VALUE; } // --- tryExpr (AstGen.zig:5957) --- // Simplified: no defer handling. -static uint32_t tryExpr(GenZir* gz, uint32_t node) { +static uint32_t tryExpr(GenZir* gz, Scope* scope, uint32_t node) { AstGenCtx* ag = gz->astgen; AstData nd = ag->tree->nodes.datas[node]; uint32_t operand_node = nd.lhs; @@ -1600,20 +2565,25 @@ static uint32_t tryExpr(GenZir* gz, uint32_t node) { if (!gz->is_comptime) { emitDbgNode(gz, node); } + uint32_t try_lc_line = ag->source_line - gz->decl_line; + uint32_t try_lc_column = ag->source_column; // Evaluate operand (AstGen.zig:6001). - uint32_t operand = expr(gz, operand_node); + uint32_t operand = expr(gz, scope, operand_node); // Create try block instruction (AstGen.zig:6007). uint32_t try_inst = makeBlockInst(ag, ZIR_INST_TRY, gz, node); gzAppendInstruction(gz, try_inst); // Else scope: extract error code, return it (AstGen.zig:6012-6025). - GenZir else_scope = makeSubBlock(gz); + GenZir else_scope = makeSubBlock(gz, scope); uint32_t err_code = addUnNode(&else_scope, ZIR_INST_ERR_UNION_CODE, operand, node); + // Emit dbg_stmt at try keyword for error return tracing (AstGen.zig:6020). + emitDbgStmt(&else_scope, try_lc_line, try_lc_column); + // ret_node with error code (AstGen.zig:6021). addUnNode(&else_scope, ZIR_INST_RET_NODE, err_code, node); @@ -1623,21 +2593,75 @@ static uint32_t tryExpr(GenZir* gz, uint32_t node) { return try_inst + ZIR_REF_START_INDEX; // toRef() } -// Mirrors expr (AstGen.zig:634) — main expression dispatcher. -static uint32_t exprRl(GenZir* gz, ResultLoc rl, uint32_t node) { +// --- boolBinOp (AstGen.zig:6274) --- +// Short-circuiting boolean and/or. + +static uint32_t boolBinOp( + GenZir* gz, Scope* scope, uint32_t node, ZirInstTag zir_tag) { AstGenCtx* ag = gz->astgen; + AstData nd = ag->tree->nodes.datas[node]; + uint32_t lhs_node = nd.lhs; + uint32_t rhs_node = nd.rhs; + + // Evaluate LHS (AstGen.zig:6285). + uint32_t lhs = expr(gz, scope, lhs_node); + + // Reserve the bool_br instruction (payload set later) + // (AstGen.zig:6286). + uint32_t bool_br = reserveInstructionIndex(ag); + gzAppendInstruction(gz, bool_br); + + // Evaluate RHS in sub-block (AstGen.zig:6288-6293). + GenZir rhs_scope = makeSubBlock(gz, scope); + uint32_t rhs = expr(&rhs_scope, &rhs_scope.base, rhs_node); + + if (!ag->has_compile_errors) { + // break_inline from rhs to bool_br (AstGen.zig:6292). + makeBreakInline(&rhs_scope, bool_br, rhs, + (int32_t)rhs_node - (int32_t)rhs_scope.decl_node_index); + } + + // setBoolBrBody (AstGen.zig:6294, 11929-11944). + uint32_t raw_body_len = gzInstructionsLen(&rhs_scope); + const uint32_t* body = gzInstructionsSlice(&rhs_scope); + uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_body_len); + ensureExtraCapacity(ag, 2 + body_len); + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = lhs; // BoolBr.lhs + ag->extra[ag->extra_len++] = body_len; // BoolBr.body_len + for (uint32_t i = 0; i < raw_body_len; i++) + appendPossiblyRefdBodyInst(ag, body[i]); + gzUnstack(&rhs_scope); + + // Fill in the bool_br instruction. + ag->inst_tags[bool_br] = zir_tag; + ag->inst_datas[bool_br].pl_node.src_node + = (int32_t)node - (int32_t)gz->decl_node_index; + ag->inst_datas[bool_br].pl_node.payload_index = payload_index; + + return bool_br + ZIR_REF_START_INDEX; +} + +// Mirrors expr (AstGen.zig:634) — main expression dispatcher. +static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { + AstGenCtx* ag = gz->astgen; + if (node == 0) { + SET_ERROR(ag); + return ZIR_REF_VOID_VALUE; + } AstNodeTag tag = ag->tree->nodes.tags[node]; + AstData nd = ag->tree->nodes.datas[node]; switch (tag) { case AST_NODE_NUMBER_LITERAL: - return numberLiteral(ag, node); + return rvalue(gz, rl, numberLiteral(gz, node), node); case AST_NODE_BUILTIN_CALL_TWO: case AST_NODE_BUILTIN_CALL_TWO_COMMA: - return builtinCall(gz, node); + return rvalue(gz, rl, builtinCall(gz, scope, node), node); case AST_NODE_FIELD_ACCESS: - return fieldAccessExpr(gz, rl, node); + return fieldAccessExpr(gz, scope, rl, node); case AST_NODE_IDENTIFIER: - return identifierExpr(gz, rl, node); + return identifierExpr(gz, scope, rl, node); case AST_NODE_STRING_LITERAL: { // Mirrors stringLiteral (AstGen.zig:8626). uint32_t str_lit_token = ag->tree->nodes.main_tokens[node]; @@ -1646,55 +2670,84 @@ static uint32_t exprRl(GenZir* gz, ResultLoc rl, uint32_t node) { ZirInstData data; data.str.start = str_index; data.str.len = str_len; - return addInstruction(gz, ZIR_INST_STR, data); + uint32_t str_result = addInstruction(gz, ZIR_INST_STR, data); + return rvalue(gz, rl, str_result, node); } - // address_of (AstGen.zig:953): evaluate operand with .ref rl. + // address_of (AstGen.zig:953-960): evaluate operand with .ref rl. case AST_NODE_ADDRESS_OF: { uint32_t operand_node = ag->tree->nodes.datas[node].lhs; - return exprRl(gz, RL_REF_VAL, operand_node); + // Check for result type to emit validate_ref_ty (AstGen.zig:954-956). + uint32_t res_ty = rlResultType(gz, rl, node); + ResultLoc operand_rl; + if (res_ty != 0) { + addUnTok(gz, ZIR_INST_VALIDATE_REF_TY, res_ty, + firstToken(ag->tree, node)); + operand_rl = RL_REF_VAL; // simplified: skip ref_coerced_ty + } else { + operand_rl = RL_REF_VAL; + } + uint32_t result = exprRl(gz, scope, operand_rl, operand_node); + return rvalue(gz, rl, result, node); } // ptr_type (AstGen.zig:1077-1081). case AST_NODE_PTR_TYPE_ALIGNED: case AST_NODE_PTR_TYPE_SENTINEL: case AST_NODE_PTR_TYPE: case AST_NODE_PTR_TYPE_BIT_RANGE: - return ptrTypeExpr(gz, node); + return rvalue(gz, rl, ptrTypeExpr(gz, scope, node), node); // array_type (AstGen.zig:940). case AST_NODE_ARRAY_TYPE: - return arrayTypeExpr(gz, node); + return rvalue(gz, rl, arrayTypeExpr(gz, scope, node), node); // array_init variants (AstGen.zig:836-856). case AST_NODE_ARRAY_INIT: case AST_NODE_ARRAY_INIT_COMMA: case AST_NODE_ARRAY_INIT_ONE: case AST_NODE_ARRAY_INIT_ONE_COMMA: - return arrayInitExpr(gz, rl, node); + return arrayInitExpr(gz, scope, rl, node); // array_cat (AstGen.zig:772): ++ binary operator. case AST_NODE_ARRAY_CAT: - return simpleBinOp(gz, node, ZIR_INST_ARRAY_CAT); + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_ARRAY_CAT), node); // grouped_expression (AstGen.zig:1100): passthrough. case AST_NODE_GROUPED_EXPRESSION: - return exprRl(gz, rl, ag->tree->nodes.datas[node].lhs); - // unreachable_literal (AstGen.zig:1012). - case AST_NODE_UNREACHABLE_LITERAL: + return exprRl(gz, scope, rl, ag->tree->nodes.datas[node].lhs); + // unreachable_literal (AstGen.zig:846-854). + case AST_NODE_UNREACHABLE_LITERAL: { + emitDbgNode(gz, node); + ZirInstData udata; + memset(&udata, 0, sizeof(udata)); + udata.unreachable_data.src_node + = (int32_t)node - (int32_t)gz->decl_node_index; + addInstruction(gz, ZIR_INST_UNREACHABLE, udata); return ZIR_REF_UNREACHABLE_VALUE; + } // enum_literal (AstGen.zig:993). case AST_NODE_ENUM_LITERAL: { uint32_t ident_token = ag->tree->nodes.main_tokens[node]; uint32_t str_index = identAsString(ag, ident_token); - return addStrTok(gz, ZIR_INST_ENUM_LITERAL, str_index, ident_token); + // If result type available, emit decl_literal (AstGen.zig:993-1003). + uint32_t res_ty = rlResultType(gz, rl, node); + if (res_ty != 0) { + uint32_t res = addPlNodeBin( + gz, ZIR_INST_DECL_LITERAL, node, res_ty, str_index); + return rvalue(gz, rl, res, node); + } + return rvalue(gz, rl, + addStrTok(gz, ZIR_INST_ENUM_LITERAL, str_index, ident_token), + node); } // multiline_string_literal (AstGen.zig:8645). case AST_NODE_MULTILINE_STRING_LITERAL: - return multilineStringLiteral(gz, node); + return rvalue(gz, rl, multilineStringLiteral(gz, scope, node), node); // return (AstGen.zig:856). case AST_NODE_RETURN: - return retExpr(gz, node); + return retExpr(gz, scope, node); // call (AstGen.zig:783-790). case AST_NODE_CALL_ONE: case AST_NODE_CALL_ONE_COMMA: case AST_NODE_CALL: case AST_NODE_CALL_COMMA: - return callExpr(gz, node); + return rvalue(gz, rl, callExpr(gz, scope, node), node); // struct_init (AstGen.zig:836-839). case AST_NODE_STRUCT_INIT_DOT_TWO: case AST_NODE_STRUCT_INIT_DOT_TWO_COMMA: @@ -1702,18 +2755,1518 @@ static uint32_t exprRl(GenZir* gz, ResultLoc rl, uint32_t node) { case AST_NODE_STRUCT_INIT_DOT_COMMA: case AST_NODE_STRUCT_INIT_ONE: case AST_NODE_STRUCT_INIT_ONE_COMMA: - return structInitExpr(gz, rl, node); + case AST_NODE_STRUCT_INIT: + case AST_NODE_STRUCT_INIT_COMMA: + return structInitExpr(gz, scope, rl, node); + // container_decl (AstGen.zig:1083-1098). + case AST_NODE_CONTAINER_DECL: + case AST_NODE_CONTAINER_DECL_TRAILING: + case AST_NODE_CONTAINER_DECL_TWO: + case AST_NODE_CONTAINER_DECL_TWO_TRAILING: + case AST_NODE_CONTAINER_DECL_ARG: + case AST_NODE_CONTAINER_DECL_ARG_TRAILING: + case AST_NODE_TAGGED_UNION: + case AST_NODE_TAGGED_UNION_TRAILING: + case AST_NODE_TAGGED_UNION_TWO: + case AST_NODE_TAGGED_UNION_TWO_TRAILING: + case AST_NODE_TAGGED_UNION_ENUM_TAG: + case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING: + return rvalue(gz, rl, containerDecl(gz, scope, node), node); // try (AstGen.zig:831). case AST_NODE_TRY: - return tryExpr(gz, node); + return rvalue(gz, rl, tryExpr(gz, scope, node), node); + // Comparison operators (AstGen.zig:714-726). + case AST_NODE_EQUAL_EQUAL: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_EQ), node); + case AST_NODE_BANG_EQUAL: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_NEQ), node); + case AST_NODE_LESS_THAN: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_LT), node); + case AST_NODE_GREATER_THAN: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_GT), node); + case AST_NODE_LESS_OR_EQUAL: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_LTE), node); + case AST_NODE_GREATER_OR_EQUAL: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_GTE), node); + // Arithmetic (AstGen.zig:656-698). + case AST_NODE_ADD: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_ADD), node); + case AST_NODE_SUB: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_SUB), node); + case AST_NODE_MUL: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_MUL), node); + case AST_NODE_DIV: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_DIV), node); + case AST_NODE_MOD: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_MOD), node); + // Bitwise (AstGen.zig:700-712). + case AST_NODE_BIT_AND: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_BIT_AND), node); + case AST_NODE_BIT_OR: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_BIT_OR), node); + case AST_NODE_BIT_XOR: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_XOR), node); + case AST_NODE_SHL: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_SHL), node); + case AST_NODE_SHR: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_SHR), node); + // Boolean operators (AstGen.zig:728-731) — special: boolBinOp. + case AST_NODE_BOOL_AND: + return rvalue( + gz, rl, boolBinOp(gz, scope, node, ZIR_INST_BOOL_BR_AND), node); + case AST_NODE_BOOL_OR: + return rvalue( + gz, rl, boolBinOp(gz, scope, node, ZIR_INST_BOOL_BR_OR), node); + // Unary operators (AstGen.zig:919-938). + case AST_NODE_BOOL_NOT: + return rvalue(gz, rl, + addUnNode(gz, ZIR_INST_BOOL_NOT, expr(gz, scope, nd.lhs), node), + node); + case AST_NODE_BIT_NOT: + return rvalue(gz, rl, + addUnNode(gz, ZIR_INST_BIT_NOT, expr(gz, scope, nd.lhs), node), + node); + case AST_NODE_NEGATION: + return rvalue(gz, rl, + addUnNode(gz, ZIR_INST_NEGATE, expr(gz, scope, nd.lhs), node), + node); + case AST_NODE_NEGATION_WRAP: + return rvalue(gz, rl, + addUnNode(gz, ZIR_INST_NEGATE_WRAP, expr(gz, scope, nd.lhs), node), + node); + // deref (AstGen.zig:942-951). + case AST_NODE_DEREF: { + uint32_t lhs = expr(gz, scope, nd.lhs); + addUnNode(gz, ZIR_INST_VALIDATE_DEREF, lhs, node); + if (rl.tag == RL_REF) + return lhs; + return rvalue(gz, rl, addUnNode(gz, ZIR_INST_LOAD, lhs, node), node); + } + // optional_type (AstGen.zig:961-964). + case AST_NODE_OPTIONAL_TYPE: + return rvalue(gz, rl, + addUnNode( + gz, ZIR_INST_OPTIONAL_TYPE, expr(gz, scope, nd.lhs), node), + node); + // unwrap_optional (AstGen.zig:966-985). + case AST_NODE_UNWRAP_OPTIONAL: { + uint32_t lhs = expr(gz, scope, nd.lhs); + return rvalue(gz, rl, + addUnNode(gz, ZIR_INST_OPTIONAL_PAYLOAD_SAFE, lhs, node), node); + } + // error_union type (AstGen.zig:987-990). + case AST_NODE_ERROR_UNION: + return rvalue(gz, rl, + simpleBinOp(gz, scope, node, ZIR_INST_ERROR_UNION_TYPE), node); + // char_literal (AstGen.zig:8662-8675). + case AST_NODE_CHAR_LITERAL: { + uint32_t main_tok = ag->tree->nodes.main_tokens[node]; + uint32_t tok_start = ag->tree->tokens.starts[main_tok]; + // Parse the character after the opening quote. + char ch = ag->tree->source[tok_start + 1]; + return rvalue(gz, rl, addInt(gz, (uint64_t)(uint8_t)ch), node); + } + // arrayAccess (AstGen.zig:6192-6221). + case AST_NODE_ARRAY_ACCESS: { + if (rl.tag == RL_REF) { + uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs); + advanceSourceCursorToMainToken(ag, node); + uint32_t rhs = expr(gz, scope, nd.rhs); + emitDbgStmt( + gz, ag->source_line - gz->decl_line, ag->source_column); + return addPlNodeBin(gz, ZIR_INST_ELEM_PTR_NODE, node, lhs, rhs); + } + uint32_t lhs = expr(gz, scope, nd.lhs); + advanceSourceCursorToMainToken(ag, node); + uint32_t rhs = expr(gz, scope, nd.rhs); + emitDbgStmt(gz, ag->source_line - gz->decl_line, ag->source_column); + return rvalue(gz, rl, + addPlNodeBin(gz, ZIR_INST_ELEM_VAL_NODE, node, lhs, rhs), node); + } + // slice (AstGen.zig:882-939). + case AST_NODE_SLICE_OPEN: { + uint32_t lhs = expr(gz, scope, nd.lhs); + uint32_t start = expr(gz, scope, nd.rhs); + return rvalue(gz, rl, + addPlNodeBin(gz, ZIR_INST_SLICE_START, node, lhs, start), node); + } + case AST_NODE_SLICE: { + // Slice[rhs]: { start, end } + const Ast* stree = ag->tree; + uint32_t lhs = expr(gz, scope, nd.lhs); + uint32_t start_node = stree->extra_data.arr[nd.rhs]; + uint32_t end_node = stree->extra_data.arr[nd.rhs + 1]; + uint32_t start_ref = expr(gz, scope, start_node); + uint32_t end_ref = expr(gz, scope, end_node); + ensureExtraCapacity(ag, 3); + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = lhs; + ag->extra[ag->extra_len++] = start_ref; + ag->extra[ag->extra_len++] = end_ref; + ZirInstData data; + data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index; + data.pl_node.payload_index = payload_index; + return rvalue( + gz, rl, addInstruction(gz, ZIR_INST_SLICE_END, data), node); + } + case AST_NODE_SLICE_SENTINEL: { + // SliceSentinel[rhs]: { start, end, sentinel } + const Ast* stree = ag->tree; + uint32_t lhs = expr(gz, scope, nd.lhs); + uint32_t start_node = stree->extra_data.arr[nd.rhs]; + uint32_t end_node = stree->extra_data.arr[nd.rhs + 1]; + uint32_t sentinel_node = stree->extra_data.arr[nd.rhs + 2]; + uint32_t start_ref = expr(gz, scope, start_node); + uint32_t end_ref = expr(gz, scope, end_node); + uint32_t sentinel_ref = expr(gz, scope, sentinel_node); + ensureExtraCapacity(ag, 4); + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = lhs; + ag->extra[ag->extra_len++] = start_ref; + ag->extra[ag->extra_len++] = end_ref; + ag->extra[ag->extra_len++] = sentinel_ref; + ZirInstData data; + data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index; + data.pl_node.payload_index = payload_index; + return rvalue( + gz, rl, addInstruction(gz, ZIR_INST_SLICE_SENTINEL, data), node); + } + // orelse (AstGen.zig:6031-6142). + case AST_NODE_ORELSE: + return rvalue(gz, rl, orelseCatchExpr(gz, scope, node, false), node); + // catch (AstGen.zig:6031-6142). + case AST_NODE_CATCH: + return rvalue(gz, rl, orelseCatchExpr(gz, scope, node, true), node); + // Block expressions (AstGen.zig:984-992). + case AST_NODE_BLOCK_TWO: + case AST_NODE_BLOCK_TWO_SEMICOLON: + case AST_NODE_BLOCK: + case AST_NODE_BLOCK_SEMICOLON: + return blockExprExpr(gz, scope, rl, node); + // Anonymous array init (AstGen.zig:1119-1127). + case AST_NODE_ARRAY_INIT_DOT_TWO: + case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA: + case AST_NODE_ARRAY_INIT_DOT: + case AST_NODE_ARRAY_INIT_DOT_COMMA: + return arrayInitDotExpr(gz, scope, rl, node); + // if (AstGen.zig:1013-1024). + case AST_NODE_IF_SIMPLE: + case AST_NODE_IF: + return rvalue(gz, rl, ifExpr(gz, scope, node), node); + // for (AstGen.zig:1043-1060). + case AST_NODE_FOR_SIMPLE: + case AST_NODE_FOR: + return rvalue(gz, rl, forExpr(gz, scope, node), node); + // Merge error sets (AstGen.zig:787). + case AST_NODE_MERGE_ERROR_SETS: + return rvalue(gz, rl, + simpleBinOp(gz, scope, node, ZIR_INST_MERGE_ERROR_SETS), node); + // Wrapping arithmetic. + case AST_NODE_ADD_WRAP: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_ADDWRAP), node); + case AST_NODE_SUB_WRAP: + return rvalue( + gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_SUBWRAP), node); + // break (AstGen.zig:2358). + case AST_NODE_BREAK: { + // break :label value + // lhs = OptionalTokenIndex to label (UINT32_MAX if none), + // rhs = node index for value (0 if none) + uint32_t value_node = nd.rhs; + uint32_t value_ref = ZIR_REF_VOID_VALUE; + if (value_node != 0) + value_ref = expr(gz, scope, value_node); + + // Find target block via scope chain (AstGen.zig:2359-2460). + uint32_t label_tok = nd.lhs; + if (label_tok != UINT32_MAX) { + // Labeled break: walk scope chain for ScopeLabel. + uint32_t label_name = identAsString(ag, label_tok); + for (Scope* s = scope; s != NULL;) { + if (s->tag == SCOPE_LABEL) { + ScopeLabel* sl = (ScopeLabel*)s; + if (sl->label_name == label_name) { + addBreak(gz, ZIR_INST_BREAK, sl->block_inst, value_ref, + (int32_t)node - (int32_t)gz->decl_node_index); + return ZIR_REF_UNREACHABLE_VALUE; + } + s = sl->parent; + } else if (s->tag == SCOPE_GEN_ZIR) { + s = ((GenZir*)s)->parent; + } else if (s->tag == SCOPE_LOCAL_VAL) { + s = ((ScopeLocalVal*)s)->parent; + } else if (s->tag == SCOPE_LOCAL_PTR) { + s = ((ScopeLocalPtr*)s)->parent; + } else if (s->tag == SCOPE_DEFER_NORMAL + || s->tag == SCOPE_DEFER_ERROR) { + s = ((ScopeDefer*)s)->parent; + } else { + break; + } + } + } else { + // Unlabeled break: find innermost GenZir with break_block + // (AstGen.zig:2435-2460). + for (Scope* s = scope; s != NULL;) { + if (s->tag == SCOPE_GEN_ZIR) { + GenZir* gz2 = (GenZir*)s; + if (gz2->break_block != UINT32_MAX) { + addBreak(gz, ZIR_INST_BREAK, gz2->break_block, + value_ref, + (int32_t)node - (int32_t)gz->decl_node_index); + return ZIR_REF_UNREACHABLE_VALUE; + } + s = gz2->parent; + } else if (s->tag == SCOPE_LOCAL_VAL) { + s = ((ScopeLocalVal*)s)->parent; + } else if (s->tag == SCOPE_LOCAL_PTR) { + s = ((ScopeLocalPtr*)s)->parent; + } else if (s->tag == SCOPE_DEFER_NORMAL + || s->tag == SCOPE_DEFER_ERROR) { + s = ((ScopeDefer*)s)->parent; + } else if (s->tag == SCOPE_LABEL) { + s = ((ScopeLabel*)s)->parent; + } else { + break; + } + } + } + SET_ERROR(ag); + return ZIR_REF_UNREACHABLE_VALUE; + } + // continue (AstGen.zig:2246-2340). + case AST_NODE_CONTINUE: { + // Walk scope chain to find GenZir with continue_block. + for (Scope* s = scope; s != NULL;) { + if (s->tag == SCOPE_GEN_ZIR) { + GenZir* gz2 = (GenZir*)s; + if (gz2->continue_block != UINT32_MAX) { + addBreak(gz, ZIR_INST_BREAK, gz2->continue_block, + ZIR_REF_VOID_VALUE, + (int32_t)node - (int32_t)gz->decl_node_index); + return ZIR_REF_UNREACHABLE_VALUE; + } + s = gz2->parent; + } else if (s->tag == SCOPE_LOCAL_VAL) { + s = ((ScopeLocalVal*)s)->parent; + } else if (s->tag == SCOPE_LOCAL_PTR) { + s = ((ScopeLocalPtr*)s)->parent; + } else if (s->tag == SCOPE_DEFER_NORMAL + || s->tag == SCOPE_DEFER_ERROR) { + s = ((ScopeDefer*)s)->parent; + } else if (s->tag == SCOPE_LABEL) { + s = ((ScopeLabel*)s)->parent; + } else { + break; + } + } + SET_ERROR(ag); + return ZIR_REF_UNREACHABLE_VALUE; + } + // comptime (AstGen.zig:1104-1105). + case AST_NODE_COMPTIME: { + // comptimeExprAst / comptimeExpr2 (AstGen.zig:2104, 1982). + uint32_t body_node = nd.lhs; + + // If already comptime, just pass through (AstGen.zig:1990-1992). + if (gz->is_comptime) + return exprRl(gz, scope, rl, body_node); + + // Create comptime block (AstGen.zig:2078-2098). + uint32_t block_inst + = makeBlockInst(ag, ZIR_INST_BLOCK_COMPTIME, gz, node); + GenZir block_scope = makeSubBlock(gz, scope); + block_scope.is_comptime = true; + + // Transform RL to type-only (AstGen.zig:2084-2090). + // Runtime-to-comptime boundary: can't pass runtime pointers. + ResultLoc ty_only_rl; + uint32_t res_ty = rlResultType(gz, rl, node); + if (res_ty != 0) + ty_only_rl = (ResultLoc) { + .tag = RL_COERCED_TY, .data = res_ty, .src_node = 0 + }; + else + ty_only_rl = RL_NONE_VAL; + + uint32_t result = exprRl(&block_scope, scope, ty_only_rl, body_node); + addBreak(&block_scope, ZIR_INST_BREAK_INLINE, block_inst, result, + (int32_t)body_node - (int32_t)gz->decl_node_index); + setBlockBody(ag, &block_scope, block_inst); + gzAppendInstruction(gz, block_inst); + + // Apply rvalue to handle RL_PTR etc (AstGen.zig:2098). + return rvalue(gz, rl, block_inst + ZIR_REF_START_INDEX, node); + } + // switch (AstGen.zig:1072-1078). + case AST_NODE_SWITCH: + case AST_NODE_SWITCH_COMMA: + return switchExpr(gz, scope, rl, node); + // while (AstGen.zig:1037-1042). + case AST_NODE_WHILE_SIMPLE: + case AST_NODE_WHILE_CONT: + case AST_NODE_WHILE: + return rvalue(gz, rl, whileExpr(gz, scope, node), node); + // error_value (AstGen.zig:1005-1010). + case AST_NODE_ERROR_VALUE: { + uint32_t error_token = nd.rhs; + uint32_t str = identAsString(ag, error_token); + return rvalue(gz, rl, + addStrTok(gz, ZIR_INST_ERROR_VALUE, str, error_token), node); + } + // error_set_decl (AstGen.zig:1131-1140). + case AST_NODE_ERROR_SET_DECL: { + // TODO: proper error set, for now just emit a placeholder. + SET_ERROR(ag); + return ZIR_REF_VOID_VALUE; + } + // assign in expr context (AstGen.zig:1011-1014). + case AST_NODE_ASSIGN: + assignStmt(gz, scope, node); + return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node); + // Compound assignment operators (AstGen.zig:685-744). + case AST_NODE_ASSIGN_ADD: + assignOp(gz, scope, node, ZIR_INST_ADD); + return ZIR_REF_VOID_VALUE; + case AST_NODE_ASSIGN_SUB: + assignOp(gz, scope, node, ZIR_INST_SUB); + return ZIR_REF_VOID_VALUE; + case AST_NODE_ASSIGN_MUL: + assignOp(gz, scope, node, ZIR_INST_MUL); + return ZIR_REF_VOID_VALUE; + case AST_NODE_ASSIGN_DIV: + assignOp(gz, scope, node, ZIR_INST_DIV); + return ZIR_REF_VOID_VALUE; + case AST_NODE_ASSIGN_MOD: + assignOp(gz, scope, node, ZIR_INST_MOD_REM); + return ZIR_REF_VOID_VALUE; + case AST_NODE_ASSIGN_BIT_AND: + assignOp(gz, scope, node, ZIR_INST_BIT_AND); + return ZIR_REF_VOID_VALUE; + case AST_NODE_ASSIGN_BIT_OR: + assignOp(gz, scope, node, ZIR_INST_BIT_OR); + return ZIR_REF_VOID_VALUE; + case AST_NODE_ASSIGN_BIT_XOR: + assignOp(gz, scope, node, ZIR_INST_XOR); + return ZIR_REF_VOID_VALUE; + case AST_NODE_ASSIGN_ADD_WRAP: + assignOp(gz, scope, node, ZIR_INST_ADDWRAP); + return ZIR_REF_VOID_VALUE; + case AST_NODE_ASSIGN_SUB_WRAP: + assignOp(gz, scope, node, ZIR_INST_SUBWRAP); + return ZIR_REF_VOID_VALUE; + case AST_NODE_ASSIGN_MUL_WRAP: + assignOp(gz, scope, node, ZIR_INST_MULWRAP); + return ZIR_REF_VOID_VALUE; + case AST_NODE_ASSIGN_ADD_SAT: + assignOp(gz, scope, node, ZIR_INST_ADD_SAT); + return ZIR_REF_VOID_VALUE; + case AST_NODE_ASSIGN_SUB_SAT: + assignOp(gz, scope, node, ZIR_INST_SUB_SAT); + return ZIR_REF_VOID_VALUE; + case AST_NODE_ASSIGN_MUL_SAT: + assignOp(gz, scope, node, ZIR_INST_MUL_SAT); + return ZIR_REF_VOID_VALUE; default: - ag->has_compile_errors = true; + SET_ERROR(ag); return ZIR_REF_VOID_VALUE; } } -static uint32_t expr(GenZir* gz, uint32_t node) { - return exprRl(gz, RL_NONE_VAL, node); +static uint32_t expr(GenZir* gz, Scope* scope, uint32_t node) { + return exprRl(gz, scope, RL_NONE_VAL, node); +} + +// --- blockExprExpr (AstGen.zig:2388-2536) --- +// Handles block expressions (labeled and unlabeled). +// Unlabeled blocks just execute statements and return void. +// Labeled blocks (blk: { ... break :blk val; }) need a block instruction. + +static uint32_t blockExprExpr( + GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { + (void)rl; + AstGenCtx* ag = gz->astgen; + const Ast* tree = ag->tree; + AstNodeTag tag = tree->nodes.tags[node]; + AstData nd = tree->nodes.datas[node]; + + // Extract statements. + uint32_t stmt_buf[2]; + const uint32_t* statements = NULL; + uint32_t stmt_count = 0; + + switch (tag) { + case AST_NODE_BLOCK_TWO: + case AST_NODE_BLOCK_TWO_SEMICOLON: { + uint32_t idx = 0; + if (nd.lhs != 0) + stmt_buf[idx++] = nd.lhs; + if (nd.rhs != 0) + stmt_buf[idx++] = nd.rhs; + statements = stmt_buf; + stmt_count = idx; + break; + } + case AST_NODE_BLOCK: + case AST_NODE_BLOCK_SEMICOLON: { + uint32_t start = nd.lhs; + uint32_t end = nd.rhs; + statements = tree->extra_data.arr + start; + stmt_count = end - start; + break; + } + default: + SET_ERROR(ag); + return ZIR_REF_VOID_VALUE; + } + + // Check if labeled (AstGen.zig:2397-2402). + // A labeled block has: identifier colon before the lbrace. + uint32_t lbrace = tree->nodes.main_tokens[node]; + bool is_labeled + = (lbrace >= 2 && tree->tokens.tags[lbrace - 1] == TOKEN_COLON + && tree->tokens.tags[lbrace - 2] == TOKEN_IDENTIFIER); + + if (!is_labeled) { + if (!gz->is_comptime) { + // Non-comptime unlabeled block (AstGen.zig:2404-2425). + // Create block_inst FIRST, add to gz, then process body. + uint32_t block_inst = makeBlockInst(ag, ZIR_INST_BLOCK, gz, node); + gzAppendInstruction(gz, block_inst); + + GenZir block_scope = makeSubBlock(gz, scope); + blockExprStmts( + &block_scope, &block_scope.base, statements, stmt_count); + + if (!endsWithNoReturn(&block_scope)) { + // restore_err_ret_index on gz (AstGen.zig:2420). + ZirInstData rdata; + rdata.un_node.operand = block_inst + ZIR_REF_START_INDEX; + rdata.un_node.src_node + = (int32_t)node - (int32_t)gz->decl_node_index; + addInstruction( + gz, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata); + // break on block_scope (AstGen.zig:2422). + addBreak(&block_scope, ZIR_INST_BREAK, block_inst, + ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE); + } + setBlockBody(ag, &block_scope, block_inst); + } else { + // Comptime unlabeled block: inline statements + // (AstGen.zig:2426-2429). + GenZir sub_gz = makeSubBlock(gz, scope); + blockExprStmts(&sub_gz, &sub_gz.base, statements, stmt_count); + } + return ZIR_REF_VOID_VALUE; + } + + // Labeled block (AstGen.zig:2466-2536). + // Create block instruction. + uint32_t block_inst = makeBlockInst(ag, ZIR_INST_BLOCK, gz, node); + + GenZir block_scope = makeSubBlock(gz, scope); + + // Create label scope so break :label can find the block_inst. + // These fields are read by breakExpr via scope chain walk. + uint32_t label_token = lbrace - 2; + ScopeLabel label_scope; + label_scope.base.tag = SCOPE_LABEL; + // cppcheck-suppress unreadVariable + label_scope.parent = &block_scope.base; + // cppcheck-suppress unreadVariable + label_scope.label_name = identAsString(ag, label_token); + // cppcheck-suppress unreadVariable + label_scope.block_inst = block_inst; + + // Process statements with label scope. + blockExprStmts(&block_scope, &label_scope.base, statements, stmt_count); + + // If we reach here without a break, the block evaluates to void. + uint32_t gz_len = gzInstructionsLen(&block_scope); + bool has_noreturn = false; + if (gz_len > 0) { + uint32_t last_inst = gzInstructionsSlice(&block_scope)[gz_len - 1]; + if (ag->inst_tags[last_inst] == ZIR_INST_BREAK + || ag->inst_tags[last_inst] == ZIR_INST_BREAK_INLINE) { + has_noreturn = true; + } + } + if (!has_noreturn) { + addBreak(&block_scope, ZIR_INST_BREAK, block_inst, ZIR_REF_VOID_VALUE, + AST_NODE_OFFSET_NONE); + } + + setBlockBody(ag, &block_scope, block_inst); + gzAppendInstruction(gz, block_inst); + return block_inst + ZIR_REF_START_INDEX; +} + +// --- arrayInitDotExpr (AstGen.zig:1576-1595) --- +// Handles anonymous array init: `.{a, b, c}`. +// Emits array_init_anon instruction with MultiOp payload. + +static uint32_t arrayInitDotExpr( + GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { + AstGenCtx* ag = gz->astgen; + const Ast* tree = ag->tree; + AstNodeTag tag = tree->nodes.tags[node]; + AstData nd = tree->nodes.datas[node]; + + // Extract elements. + uint32_t elem_buf[2]; + const uint32_t* elements = NULL; + uint32_t elem_count = 0; + + switch (tag) { + case AST_NODE_ARRAY_INIT_DOT_TWO: + case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA: { + uint32_t idx = 0; + if (nd.lhs != 0) + elem_buf[idx++] = nd.lhs; + if (nd.rhs != 0) + elem_buf[idx++] = nd.rhs; + elements = elem_buf; + elem_count = idx; + break; + } + case AST_NODE_ARRAY_INIT_DOT: + case AST_NODE_ARRAY_INIT_DOT_COMMA: { + uint32_t start = nd.lhs; + uint32_t end = nd.rhs; + elements = tree->extra_data.arr + start; + elem_count = end - start; + break; + } + default: + SET_ERROR(ag); + return ZIR_REF_VOID_VALUE; + } + + // Dispatch based on RL (AstGen.zig:1515-1572). + switch (rl.tag) { + case RL_NONE: { + // arrayInitExprAnon (AstGen.zig:1576-1595). + ensureExtraCapacity(ag, 1 + elem_count); + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = elem_count; + uint32_t extra_start = ag->extra_len; + ag->extra_len += elem_count; + for (uint32_t i = 0; i < elem_count; i++) { + uint32_t elem_ref = expr(gz, scope, elements[i]); + ag->extra[extra_start + i] = elem_ref; + } + return addPlNodePayloadIndex( + gz, ZIR_INST_ARRAY_INIT_ANON, node, payload_index); + } + case RL_TY: + case RL_COERCED_TY: { + // validate_array_init_result_ty + arrayInitExprTyped + // (AstGen.zig:1534-1539). + uint32_t result_ty = rl.data; + // Emit ArrayInit { ty, init_count } payload for + // validate_array_init_result_ty. + ensureExtraCapacity(ag, 2); + uint32_t val_payload = ag->extra_len; + ag->extra[ag->extra_len++] = result_ty; + ag->extra[ag->extra_len++] = elem_count; + addPlNodePayloadIndex( + gz, ZIR_INST_VALIDATE_ARRAY_INIT_RESULT_TY, node, val_payload); + + // arrayInitExprTyped (AstGen.zig:1598-1642) with elem_ty=none. + uint32_t operands_len = elem_count + 1; // +1 for type + ensureExtraCapacity(ag, 1 + operands_len); + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = operands_len; + ag->extra[ag->extra_len++] = result_ty; + uint32_t extra_start = ag->extra_len; + ag->extra_len += elem_count; + for (uint32_t i = 0; i < elem_count; i++) { + // array_init_elem_type (AstGen.zig:1626-1632). + uint32_t elem_ty = addPlNodeBin( + gz, ZIR_INST_ARRAY_INIT_ELEM_TYPE, elements[i], result_ty, i); + ResultLoc elem_rl + = { .tag = RL_COERCED_TY, .data = elem_ty, .src_node = 0 }; + uint32_t elem_ref = exprRl(gz, scope, elem_rl, elements[i]); + ag->extra[extra_start + i] = elem_ref; + } + return addPlNodePayloadIndex( + gz, ZIR_INST_ARRAY_INIT, node, payload_index); + } + case RL_INFERRED_PTR: { + // arrayInitExprAnon + rvalue (AstGen.zig:1545-1551). + ensureExtraCapacity(ag, 1 + elem_count); + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = elem_count; + uint32_t extra_start = ag->extra_len; + ag->extra_len += elem_count; + for (uint32_t i = 0; i < elem_count; i++) { + uint32_t elem_ref = expr(gz, scope, elements[i]); + ag->extra[extra_start + i] = elem_ref; + } + uint32_t result = addPlNodePayloadIndex( + gz, ZIR_INST_ARRAY_INIT_ANON, node, payload_index); + return rvalue(gz, rl, result, node); + } + case RL_DISCARD: { + // Evaluate and discard each element (AstGen.zig:1517-1522). + for (uint32_t i = 0; i < elem_count; i++) { + exprRl(gz, scope, RL_DISCARD_VAL, elements[i]); + } + return ZIR_REF_VOID_VALUE; + } + case RL_REF: { + // arrayInitExprAnon + ref (AstGen.zig:1523-1526). + ensureExtraCapacity(ag, 1 + elem_count); + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = elem_count; + uint32_t extra_start = ag->extra_len; + ag->extra_len += elem_count; + for (uint32_t i = 0; i < elem_count; i++) { + uint32_t elem_ref = expr(gz, scope, elements[i]); + ag->extra[extra_start + i] = elem_ref; + } + uint32_t result = addPlNodePayloadIndex( + gz, ZIR_INST_ARRAY_INIT_ANON, node, payload_index); + return rvalue(gz, rl, result, node); + } + case RL_PTR: + // TODO: arrayInitExprPtr (AstGen.zig:1541-1543). + // For now, fall through to anon + rvalue. + break; + } + + // Fallback: anon init + rvalue (handles RL_PTR for now). + ensureExtraCapacity(ag, 1 + elem_count); + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = elem_count; + uint32_t extra_start = ag->extra_len; + ag->extra_len += elem_count; + for (uint32_t i = 0; i < elem_count; i++) { + uint32_t elem_ref = expr(gz, scope, elements[i]); + ag->extra[extra_start + i] = elem_ref; + } + uint32_t result = addPlNodePayloadIndex( + gz, ZIR_INST_ARRAY_INIT_ANON, node, payload_index); + return rvalue(gz, rl, result, node); +} + +// --- ifExpr (AstGen.zig:6300-6528) --- +// Handles if and if_simple expressions. +// Pattern: block_scope with condbr → then/else branches → setCondBrPayload. + +static uint32_t ifExpr(GenZir* gz, Scope* scope, uint32_t node) { + AstGenCtx* ag = gz->astgen; + const Ast* tree = ag->tree; + AstNodeTag tag = tree->nodes.tags[node]; + AstData nd = tree->nodes.datas[node]; + + uint32_t cond_node = nd.lhs; + uint32_t then_node, else_node; + + if (tag == AST_NODE_IF_SIMPLE) { + then_node = nd.rhs; + else_node = 0; + } else { + // AST_NODE_IF: rhs is index into extra → If{then_expr, else_expr} + then_node = tree->extra_data.arr[nd.rhs]; + else_node = tree->extra_data.arr[nd.rhs + 1]; + } + + // Detect payload capture: if (cond) |x| (AstGen.zig Ast.fullIf). + // payload_pipe = lastToken(cond_expr) + 2; if pipe → payload_token + 1. + uint32_t payload_token = 0; // 0 = no payload + uint32_t last_cond_tok = lastToken(tree, cond_node); + uint32_t pipe_tok = last_cond_tok + 2; + if (pipe_tok < tree->tokens.len + && tree->tokens.tags[pipe_tok] == TOKEN_PIPE) { + payload_token = pipe_tok + 1; // identifier token + } + + // Detect error token: then_expr else |e| (AstGen.zig Ast.fullIf). + uint32_t error_token = 0; + if (else_node != 0) { + uint32_t else_tok = lastToken(tree, then_node) + 1; // "else" keyword + if (else_tok + 1 < tree->tokens.len + && tree->tokens.tags[else_tok + 1] == TOKEN_PIPE) { + error_token = else_tok + 2; + } + } + + // Create block_scope (AstGen.zig:6326-6328). + GenZir block_scope = makeSubBlock(gz, scope); + + // Evaluate condition (AstGen.zig:6335-6363). + uint32_t cond_inst; // the value (optional/err-union/bool) + uint32_t bool_bit; // the boolean for condbr + if (error_token != 0) { + // Error union condition: if (err_union) |val| else |err|. + cond_inst = expr(&block_scope, &block_scope.base, cond_node); + bool_bit = addUnNode( + &block_scope, ZIR_INST_IS_NON_ERR, cond_inst, cond_node); + } else if (payload_token != 0) { + // Optional condition: if (optional) |val|. + cond_inst = expr(&block_scope, &block_scope.base, cond_node); + bool_bit = addUnNode( + &block_scope, ZIR_INST_IS_NON_NULL, cond_inst, cond_node); + } else { + // Bool condition (AstGen.zig:6356-6362). + cond_inst = expr(&block_scope, &block_scope.base, cond_node); + bool_bit = cond_inst; + } + + uint32_t condbr = addCondBr(&block_scope, node); + uint32_t block_inst = makeBlockInst(ag, ZIR_INST_BLOCK, gz, node); + setBlockBody(ag, &block_scope, block_inst); + gzAppendInstruction(gz, block_inst); + + // Then branch (AstGen.zig:6372-6441). + GenZir then_scope = makeSubBlock(gz, scope); + Scope* then_sub_scope = &then_scope.base; + ScopeLocalVal payload_val_scope; + memset(&payload_val_scope, 0, sizeof(payload_val_scope)); + + if (error_token != 0 && payload_token != 0) { + // Error union with payload: unwrap payload (AstGen.zig:6379-6407). + uint32_t payload_inst = addUnNode(&then_scope, + ZIR_INST_ERR_UNION_PAYLOAD_UNSAFE, cond_inst, then_node); + uint32_t ident_name = identAsString(ag, payload_token); + payload_val_scope = (ScopeLocalVal) { + .base = { .tag = SCOPE_LOCAL_VAL }, + .parent = &then_scope.base, + .gen_zir = &then_scope, + .inst = payload_inst, + .token_src = payload_token, + .name = ident_name, + }; + addDbgVar(&then_scope, ZIR_INST_DBG_VAR_VAL, ident_name, payload_inst); + then_sub_scope = &payload_val_scope.base; + } else if (payload_token != 0) { + // Optional with payload: unwrap optional (AstGen.zig:6408-6431). + uint32_t payload_inst = addUnNode(&then_scope, + ZIR_INST_OPTIONAL_PAYLOAD_UNSAFE, cond_inst, then_node); + uint32_t ident_name = identAsString(ag, payload_token); + payload_val_scope = (ScopeLocalVal) { + .base = { .tag = SCOPE_LOCAL_VAL }, + .parent = &then_scope.base, + .gen_zir = &then_scope, + .inst = payload_inst, + .token_src = payload_token, + .name = ident_name, + }; + addDbgVar(&then_scope, ZIR_INST_DBG_VAR_VAL, ident_name, payload_inst); + then_sub_scope = &payload_val_scope.base; + } + + // Use fullBodyExpr for then body (AstGen.zig:6437). + uint32_t then_result + = fullBodyExpr(&then_scope, then_sub_scope, then_node); + if (!endsWithNoReturn(&then_scope)) { + addBreak(&then_scope, ZIR_INST_BREAK, block_inst, then_result, + (int32_t)then_node - (int32_t)gz->decl_node_index); + } + + // Else branch (AstGen.zig:6443-6489). + GenZir else_scope = makeSubBlock(gz, scope); + + // save_err_ret_index (AstGen.zig:6448-6449). + bool do_err_trace = ag->fn_ret_ty != 0 && error_token != 0; + if (do_err_trace && nodeMayAppendToErrorTrace(tree, cond_node)) + addSaveErrRetIndex(&else_scope, ZIR_REF_NONE); + + if (else_node != 0) { + Scope* else_sub_scope = &else_scope.base; + ScopeLocalVal error_val_scope; + memset(&error_val_scope, 0, sizeof(error_val_scope)); + + if (error_token != 0) { + // Error capture: else |err| (AstGen.zig:6452-6475). + uint32_t err_inst = addUnNode( + &else_scope, ZIR_INST_ERR_UNION_CODE, cond_inst, cond_node); + uint32_t err_name = identAsString(ag, error_token); + error_val_scope = (ScopeLocalVal) { + .base = { .tag = SCOPE_LOCAL_VAL }, + .parent = &else_scope.base, + .gen_zir = &else_scope, + .inst = err_inst, + .token_src = error_token, + .name = err_name, + }; + addDbgVar(&else_scope, ZIR_INST_DBG_VAR_VAL, err_name, err_inst); + else_sub_scope = &error_val_scope.base; + } + + // Use fullBodyExpr for else body (AstGen.zig:6478). + uint32_t else_result + = fullBodyExpr(&else_scope, else_sub_scope, else_node); + if (!endsWithNoReturn(&else_scope)) { + addBreak(&else_scope, ZIR_INST_BREAK, block_inst, else_result, + (int32_t)else_node - (int32_t)gz->decl_node_index); + } + } else { + addBreak(&else_scope, ZIR_INST_BREAK, block_inst, ZIR_REF_VOID_VALUE, + AST_NODE_OFFSET_NONE); + } + + // Wire up condbr (AstGen.zig:6491). + setCondBrPayload(ag, condbr, bool_bit, &then_scope, &else_scope); + + return block_inst + ZIR_REF_START_INDEX; +} + +// --- forExpr (AstGen.zig:6819-7125) --- +// Handles for_simple and for (multi-input). +// Supports both indexable and for_range inputs. + +#define FOR_MAX_INPUTS 16 + +static uint32_t forExpr(GenZir* gz, Scope* scope, uint32_t node) { + AstGenCtx* ag = gz->astgen; + const Ast* tree = ag->tree; + AstData nd = tree->nodes.datas[node]; + AstNodeTag node_tag = tree->nodes.tags[node]; + + // Extract input nodes and body/else nodes. + // FOR_SIMPLE: lhs = input node, rhs = body (Ast.zig:1960-1968). + // FOR: lhs = extra_data index, rhs = packed AstFor (Ast.zig:1970-1981). + uint32_t input_nodes[FOR_MAX_INPUTS]; + uint32_t num_inputs; + uint32_t body_node; + if (node_tag == AST_NODE_FOR_SIMPLE) { + input_nodes[0] = nd.lhs; + num_inputs = 1; + body_node = nd.rhs; + } else { + uint32_t extra_idx = nd.lhs; + AstFor for_data; + memcpy(&for_data, &nd.rhs, sizeof(AstFor)); + num_inputs = for_data.inputs; + if (num_inputs == 0 || num_inputs > FOR_MAX_INPUTS) { + SET_ERROR(ag); + return ZIR_REF_VOID_VALUE; + } + for (uint32_t i = 0; i < num_inputs; i++) + input_nodes[i] = tree->extra_data.arr[extra_idx + i]; + body_node = tree->extra_data.arr[extra_idx + num_inputs]; + } + + // Per-input arrays (AstGen.zig:6858-6862). + uint32_t indexables[FOR_MAX_INPUTS]; + uint32_t lens[FOR_MAX_INPUTS][2]; // [ref0, ref1] per input + + // Allocate index counter (AstGen.zig:6865-6874). + uint32_t index_ptr + = addUnNode(gz, ZIR_INST_ALLOC, ZIR_REF_USIZE_TYPE, node); + addPlNodeBin(gz, ZIR_INST_STORE_NODE, node, index_ptr, ZIR_REF_ZERO_USIZE); + + // Compute payload_token (AstGen.zig fullForComponents:2349-2350). + // payload_token = lastToken(inputs[last]) + 3 + has_comma + uint32_t last_cond_tok = lastToken(tree, input_nodes[num_inputs - 1]); + bool has_comma = (last_cond_tok + 1 < tree->tokens.len + && tree->tokens.tags[last_cond_tok + 1] == TOKEN_COMMA); + uint32_t payload_token = last_cond_tok + 3 + (has_comma ? 1 : 0); + + // Process each input (AstGen.zig:6878-6925). + uint32_t capture_token = payload_token; + for (uint32_t i = 0; i < num_inputs; i++) { + uint32_t input = input_nodes[i]; + // Advance capture_token past this capture's ident (+comma). + bool capture_is_ref + = (tree->tokens.tags[capture_token] == TOKEN_ASTERISK); + uint32_t ident_tok = capture_token + (capture_is_ref ? 1u : 0u); + capture_token = ident_tok + 2; // skip ident + comma/pipe + + emitDbgNode(gz, input); + + if (tree->nodes.tags[input] == AST_NODE_FOR_RANGE) { + // Range input (AstGen.zig:6892-6916). + AstData range_nd = tree->nodes.datas[input]; + uint32_t start_node = range_nd.lhs; + uint32_t end_node = range_nd.rhs; + + // AstGen.zig:6897-6902: expr with .rl = .{ .ty = .usize_type } + ResultLoc usize_rl + = { .tag = RL_TY, .data = ZIR_REF_USIZE_TYPE, .src_node = 0 }; + uint32_t start_val = exprRl(gz, scope, usize_rl, start_node); + + uint32_t end_val = ZIR_REF_NONE; + if (end_node != 0) { + end_val = exprRl(gz, scope, usize_rl, end_node); + } + + if (end_val == ZIR_REF_NONE) { + lens[i][0] = ZIR_REF_NONE; + lens[i][1] = ZIR_REF_NONE; + } else { + lens[i][0] = start_val; + lens[i][1] = end_val; + } + + // Check if start is trivially zero. + bool start_is_zero = false; + if (tree->nodes.tags[start_node] == AST_NODE_NUMBER_LITERAL) { + uint32_t tok = tree->nodes.main_tokens[start_node]; + uint32_t ts = tree->tokens.starts[tok]; + if (tree->source[ts] == '0' + && (ts + 1 >= tree->source_len + || tree->source[ts + 1] < '0' + || tree->source[ts + 1] > '9')) + start_is_zero = true; + } + indexables[i] = start_is_zero ? ZIR_REF_NONE : start_val; + } else { + // Regular indexable (AstGen.zig:6918-6923). + uint32_t indexable = expr(gz, scope, input); + indexables[i] = indexable; + lens[i][0] = indexable; + lens[i][1] = ZIR_REF_NONE; + } + } + + // Emit for_len as MultiOp (AstGen.zig:6933-6942). + uint32_t len; + { + uint32_t operands_len = num_inputs * 2; + ensureExtraCapacity(ag, 1 + operands_len); + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = operands_len; + for (uint32_t i = 0; i < num_inputs; i++) { + ag->extra[ag->extra_len++] = lens[i][0]; + ag->extra[ag->extra_len++] = lens[i][1]; + } + ZirInstData data; + data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index; + data.pl_node.payload_index = payload_index; + len = addInstruction(gz, ZIR_INST_FOR_LEN, data); + } + + // Create loop (AstGen.zig:6944-6956). + uint32_t loop_inst = makeBlockInst(ag, ZIR_INST_LOOP, gz, node); + + GenZir loop_scope = makeSubBlock(gz, scope); + + // Load index (AstGen.zig:6955-6956). + uint32_t index = addUnNode(&loop_scope, ZIR_INST_LOAD, index_ptr, node); + + // Condition: index < len (AstGen.zig:6962). + uint32_t cond + = addPlNodeBin(&loop_scope, ZIR_INST_CMP_LT, node, index, len); + + // Create condbr + block (AstGen.zig:6967-6974). + GenZir cond_scope = makeSubBlock(&loop_scope, &loop_scope.base); + uint32_t condbr = addCondBr(&cond_scope, node); + uint32_t cond_block = makeBlockInst(ag, ZIR_INST_BLOCK, &loop_scope, node); + setBlockBody(ag, &cond_scope, cond_block); + loop_scope.break_block = loop_inst; + gzAppendInstruction(&loop_scope, cond_block); + + // Then branch: loop body (AstGen.zig:6982-7065). + GenZir then_scope = makeSubBlock(&loop_scope, &loop_scope.base); + + // Set up capture scopes for all inputs (AstGen.zig:6986-7045). + ScopeLocalVal capture_scopes[FOR_MAX_INPUTS]; + Scope* body_scope_parent = &then_scope.base; + { + capture_token = payload_token; + for (uint32_t i = 0; i < num_inputs; i++) { + uint32_t input = input_nodes[i]; + bool capture_is_ref + = (tree->tokens.tags[capture_token] == TOKEN_ASTERISK); + uint32_t ident_tok = capture_token + (capture_is_ref ? 1u : 0u); + capture_token = ident_tok + 2; + + // Check if discard (AstGen.zig:6999). + uint32_t ts = tree->tokens.starts[ident_tok]; + bool is_discard = (tree->source[ts] == '_' + && (ts + 1 >= tree->source_len + || !((tree->source[ts + 1] >= 'a' + && tree->source[ts + 1] <= 'z') + || (tree->source[ts + 1] >= 'A' + && tree->source[ts + 1] <= 'Z') + || tree->source[ts + 1] == '_' + || (tree->source[ts + 1] >= '0' + && tree->source[ts + 1] <= '9')))); + if (is_discard) + continue; + + // Compute capture inst (AstGen.zig:7004-7028). + uint32_t capture_inst; + bool is_counter = (tree->nodes.tags[input] == AST_NODE_FOR_RANGE); + + if (indexables[i] == ZIR_REF_NONE) { + // Start=0 counter: use index directly. + capture_inst = index; + } else if (is_counter) { + // Counter with nonzero start: add. + capture_inst = addPlNodeBin( + &then_scope, ZIR_INST_ADD, input, indexables[i], index); + } else if (capture_is_ref) { + // Indexable by ref: elem_ptr. + capture_inst = addPlNodeBin(&then_scope, ZIR_INST_ELEM_PTR, + input, indexables[i], index); + } else { + // Indexable by val: elem_val. + capture_inst = addPlNodeBin(&then_scope, ZIR_INST_ELEM_VAL, + input, indexables[i], index); + } + + uint32_t name_str = identAsString(ag, ident_tok); + capture_scopes[i] = (ScopeLocalVal) { + .base = { .tag = SCOPE_LOCAL_VAL }, + .parent = body_scope_parent, + .gen_zir = &then_scope, + .inst = capture_inst, + .token_src = ident_tok, + .name = name_str, + }; + // AstGen.zig:7040. + addDbgVar( + &then_scope, ZIR_INST_DBG_VAR_VAL, name_str, capture_inst); + body_scope_parent = &capture_scopes[i].base; + } + } + + // Execute body (AstGen.zig:7047). + fullBodyExpr(&then_scope, body_scope_parent, body_node); + addBreak(&then_scope, ZIR_INST_BREAK, cond_block, ZIR_REF_VOID_VALUE, + AST_NODE_OFFSET_NONE); + + // Else branch: break out of loop (AstGen.zig:7066-7091). + GenZir else_scope = makeSubBlock(&loop_scope, &loop_scope.base); + addBreak(&else_scope, ZIR_INST_BREAK, loop_inst, ZIR_REF_VOID_VALUE, + AST_NODE_OFFSET_NONE); + + setCondBrPayload(ag, condbr, cond, &then_scope, &else_scope); + + // Increment index (AstGen.zig:7096-7113). + uint32_t index_plus_one = addPlNodeBin( + &loop_scope, ZIR_INST_ADD_UNSAFE, node, index, ZIR_REF_ONE_USIZE); + addPlNodeBin( + &loop_scope, ZIR_INST_STORE_NODE, node, index_ptr, index_plus_one); + + // Repeat (AstGen.zig:7112). + { + ZirInstData repeat_data; + memset(&repeat_data, 0, sizeof(repeat_data)); + repeat_data.node = (int32_t)node - (int32_t)loop_scope.decl_node_index; + addInstruction(&loop_scope, ZIR_INST_REPEAT, repeat_data); + } + + setBlockBody(ag, &loop_scope, loop_inst); + gzAppendInstruction(gz, loop_inst); + + return loop_inst + ZIR_REF_START_INDEX; +} + +// --- orelseCatchExpr (AstGen.zig:6031-6142) --- +// Handles `lhs orelse rhs` and `lhs catch rhs`. + +static uint32_t orelseCatchExpr( + GenZir* gz, Scope* scope, uint32_t node, bool is_catch) { + AstGenCtx* ag = gz->astgen; + const Ast* tree = ag->tree; + AstData nd = tree->nodes.datas[node]; + + bool do_err_trace = is_catch && ag->fn_ret_ty != 0; + + // Create block_scope (AstGen.zig:6062-6063). + GenZir block_scope = makeSubBlock(gz, scope); + + // Evaluate operand in block_scope (AstGen.zig:6074). + uint32_t operand = expr(&block_scope, &block_scope.base, nd.lhs); + + // Check condition in block_scope (AstGen.zig:6075). + ZirInstTag test_tag + = is_catch ? ZIR_INST_IS_NON_ERR : ZIR_INST_IS_NON_NULL; + uint32_t condition = addUnNode(&block_scope, test_tag, operand, node); + + // condbr in block_scope (AstGen.zig:6076). + uint32_t condbr = addCondBr(&block_scope, node); + + // Create block in parent gz (AstGen.zig:6078-6081). + uint32_t block_inst = makeBlockInst(ag, ZIR_INST_BLOCK, gz, node); + setBlockBody(ag, &block_scope, block_inst); + // block_scope unstacked now. + gzAppendInstruction(gz, block_inst); + + // Then branch: unwrap payload (AstGen.zig:6083-6092). + GenZir then_scope = makeSubBlock(&block_scope, scope); + ZirInstTag unwrap_tag = is_catch ? ZIR_INST_ERR_UNION_PAYLOAD_UNSAFE + : ZIR_INST_OPTIONAL_PAYLOAD_UNSAFE; + uint32_t unwrapped = addUnNode(&then_scope, unwrap_tag, operand, node); + addBreak(&then_scope, ZIR_INST_BREAK, block_inst, unwrapped, + (int32_t)node - (int32_t)gz->decl_node_index); + + // Else branch: evaluate RHS (AstGen.zig:6094-6131). + GenZir else_scope = makeSubBlock(&block_scope, scope); + + // save_err_ret_index (AstGen.zig:6099-6100). + if (do_err_trace && nodeMayAppendToErrorTrace(tree, nd.lhs)) + addSaveErrRetIndex(&else_scope, ZIR_REF_NONE); + + uint32_t else_result = expr(&else_scope, &else_scope.base, nd.rhs); + if (!endsWithNoReturn(&else_scope)) { + addBreak(&else_scope, ZIR_INST_BREAK, block_inst, else_result, + (int32_t)nd.rhs - (int32_t)gz->decl_node_index); + } + + setCondBrPayload(ag, condbr, condition, &then_scope, &else_scope); + + return block_inst + ZIR_REF_START_INDEX; +} + +// --- whileExpr (AstGen.zig:6529-6805) --- +// Handles while_simple. +// Structure: loop { cond_block { cond, condbr }, repeat } +// condbr → then { continue_block { body, break continue }, break cond } +// → else { break loop } + +static uint32_t whileExpr(GenZir* gz, Scope* scope, uint32_t node) { + AstGenCtx* ag = gz->astgen; + const Ast* tree = ag->tree; + AstData nd = tree->nodes.datas[node]; + + // WHILE_SIMPLE: lhs = cond_expr, rhs = body. + uint32_t cond_node = nd.lhs; + uint32_t body_node = nd.rhs; + + // Create loop instruction (AstGen.zig:6562-6564). + uint32_t loop_inst = makeBlockInst(ag, ZIR_INST_LOOP, gz, node); + gzAppendInstruction(gz, loop_inst); + + GenZir loop_scope = makeSubBlock(gz, scope); + + // Evaluate condition in cond_scope (AstGen.zig:6571-6607). + GenZir cond_scope = makeSubBlock(&loop_scope, &loop_scope.base); + uint32_t cond = expr(&cond_scope, &cond_scope.base, cond_node); + + // Create condbr + cond_block (AstGen.zig:6609-6615). + uint32_t condbr = addCondBr(&cond_scope, node); + uint32_t cond_block = makeBlockInst(ag, ZIR_INST_BLOCK, &loop_scope, node); + setBlockBody(ag, &cond_scope, cond_block); // unstacks cond_scope + gzAppendInstruction(&loop_scope, cond_block); + + // Create continue_block (AstGen.zig:6694). + uint32_t continue_block + = makeBlockInst(ag, ZIR_INST_BLOCK, &loop_scope, node); + + // Add repeat to loop_scope (AstGen.zig:6696-6697). + { + ZirInstData repeat_data; + memset(&repeat_data, 0, sizeof(repeat_data)); + repeat_data.node = (int32_t)node - (int32_t)loop_scope.decl_node_index; + addInstruction(&loop_scope, ZIR_INST_REPEAT, repeat_data); + } + + // Set loop body and configure break/continue (AstGen.zig:6699-6701). + setBlockBody(ag, &loop_scope, loop_inst); // unstacks loop_scope + loop_scope.break_block = loop_inst; + loop_scope.continue_block = continue_block; + + // Stack then_scope (AstGen.zig:6708-6709). + GenZir then_scope = makeSubBlock(gz, &cond_scope.base); + + // Add continue_block to then_scope (AstGen.zig:6716). + gzAppendInstruction(&then_scope, continue_block); + + // Create continue_scope inside then_scope (AstGen.zig:6725). + GenZir continue_scope = makeSubBlock(&then_scope, &then_scope.base); + + // Execute body (AstGen.zig:6727-6730). + emitDbgNode(&continue_scope, body_node); + fullBodyExpr(&continue_scope, &continue_scope.base, body_node); + + // Break continue_block if not noreturn (AstGen.zig:6733-6744). + if (!endsWithNoReturn(&continue_scope)) { + addBreak(&continue_scope, ZIR_INST_BREAK, continue_block, + ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE); + } + setBlockBody(ag, &continue_scope, continue_block); + + // Break cond_block from then_scope (AstGen.zig:6746). + addBreak(&then_scope, ZIR_INST_BREAK, cond_block, ZIR_REF_VOID_VALUE, + AST_NODE_OFFSET_NONE); + + // Else scope: break loop with void (AstGen.zig:6785-6788). + GenZir else_scope = makeSubBlock(gz, &cond_scope.base); + addBreak(&else_scope, ZIR_INST_BREAK, loop_inst, ZIR_REF_VOID_VALUE, + AST_NODE_OFFSET_NONE); + + // Wire up condbr (AstGen.zig:6795). + setCondBrPayload(ag, condbr, cond, &then_scope, &else_scope); + + return loop_inst + ZIR_REF_START_INDEX; +} + +// --- switchExpr (AstGen.zig:7625-8117) --- +// Handles switch and switch_comma expressions. +// Encoding: switch_block pl_node with SwitchBlock extra payload. + +static uint32_t switchExpr( + GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) { + AstGenCtx* ag = gz->astgen; + const Ast* tree = ag->tree; + AstData nd = tree->nodes.datas[node]; + + // AST_NODE_SWITCH: lhs = condition node, rhs = extra index for SubRange. + // SubRange[rhs] = { cases_start, cases_end }. + // Case nodes are at extra_data[cases_start..cases_end]. + uint32_t cond_node = nd.lhs; + uint32_t extra_idx = nd.rhs; + uint32_t cases_start = tree->extra_data.arr[extra_idx]; + uint32_t cases_end = tree->extra_data.arr[extra_idx + 1]; + const uint32_t* case_nodes_arr = tree->extra_data.arr + cases_start; + uint32_t case_count = cases_end - cases_start; + + // Save operand source location before evaluating (AstGen.zig:7774-7775). + advanceSourceCursorToNode(ag, cond_node); + uint32_t operand_lc_line = ag->source_line - gz->decl_line; + uint32_t operand_lc_col = ag->source_column; + + // Evaluate switch operand (AstGen.zig:7777). + uint32_t cond_ref = expr(gz, scope, cond_node); + + // --- First pass: categorize cases (AstGen.zig:7671-7762) --- + uint32_t scalar_cases_len = 0; + uint32_t multi_cases_len = 0; + bool has_else = false; + + for (uint32_t ci = 0; ci < case_count; ci++) { + uint32_t cn = case_nodes_arr[ci]; + AstNodeTag ct = tree->nodes.tags[cn]; + AstData cd = tree->nodes.datas[cn]; + + switch (ct) { + case AST_NODE_SWITCH_CASE_ONE: + case AST_NODE_SWITCH_CASE_INLINE_ONE: + if (cd.lhs == 0) + has_else = true; + else if (tree->nodes.tags[cd.lhs] == AST_NODE_SWITCH_RANGE) + multi_cases_len++; + else + scalar_cases_len++; + break; + case AST_NODE_SWITCH_CASE: + case AST_NODE_SWITCH_CASE_INLINE: + multi_cases_len++; + break; + default: + break; + } + } + + // Sema expects a dbg_stmt immediately before switch_block + // (AstGen.zig:7806). + emitDbgStmtForceCurrentIndex(gz, operand_lc_line, operand_lc_col); + // --- Create switch_block instruction (AstGen.zig:7809) --- + uint32_t switch_inst = makeBlockInst(ag, ZIR_INST_SWITCH_BLOCK, gz, node); + + // --- Single-pass evaluation in source order (AstGen.zig:7849-8027) --- + // Case table + payload buffer pattern (like upstream scratch). + // Table layout: [else?] [scalar_0..N] [multi_0..N] + // Each entry points to the start of that case's data in the buffer. + uint32_t table_size + = (has_else ? 1 : 0) + scalar_cases_len + multi_cases_len; + uint32_t else_tbl = 0; + uint32_t scalar_tbl = (has_else ? 1 : 0); + uint32_t multi_tbl = scalar_tbl + scalar_cases_len; + + uint32_t pay_cap = table_size + case_count * 16; + uint32_t* pay = malloc(pay_cap * sizeof(uint32_t)); + uint32_t pay_len = table_size; + + uint32_t scalar_ci = 0; + uint32_t multi_ci = 0; + + for (uint32_t ci = 0; ci < case_count; ci++) { + uint32_t cn = case_nodes_arr[ci]; + AstNodeTag ct = tree->nodes.tags[cn]; + AstData cd = tree->nodes.datas[cn]; + uint32_t hdr = pay_len; + uint32_t prong_info_slot = 0; + + // Ensure capacity for items (generous estimate). + if (pay_len + 32 > pay_cap) { + pay_cap *= 2; + uint32_t* p = realloc(pay, pay_cap * sizeof(uint32_t)); + if (!p) + abort(); + pay = p; + } + + switch (ct) { + case AST_NODE_SWITCH_CASE_ONE: + case AST_NODE_SWITCH_CASE_INLINE_ONE: + if (cd.lhs == 0) { + // Else: [prong_info, body...] + pay[else_tbl] = hdr; + prong_info_slot = pay_len++; + } else if (tree->nodes.tags[cd.lhs] == AST_NODE_SWITCH_RANGE) { + // Single range → multi case: + // [items_len=0, ranges_len=1, prong_info, first, last] + pay[multi_tbl + multi_ci++] = hdr; + pay[pay_len++] = 0; + pay[pay_len++] = 1; + prong_info_slot = pay_len++; + AstData rng = tree->nodes.datas[cd.lhs]; + pay[pay_len++] = comptimeExpr(gz, scope, rng.lhs); + pay[pay_len++] = comptimeExpr(gz, scope, rng.rhs); + } else { + // Scalar: [item_ref, prong_info, body...] + pay[scalar_tbl + scalar_ci++] = hdr; + pay[pay_len++] = comptimeExpr(gz, scope, cd.lhs); + prong_info_slot = pay_len++; + } + break; + case AST_NODE_SWITCH_CASE: + case AST_NODE_SWITCH_CASE_INLINE: { + // Multi-item: SubRange[lhs] of items, rhs = body. + pay[multi_tbl + multi_ci++] = hdr; + uint32_t ist = tree->extra_data.arr[cd.lhs]; + uint32_t ien = tree->extra_data.arr[cd.lhs + 1]; + uint32_t nitems = 0, nranges = 0; + for (uint32_t j = ist; j < ien; j++) { + if (tree->nodes.tags[tree->extra_data.arr[j]] + == AST_NODE_SWITCH_RANGE) + nranges++; + else + nitems++; + } + pay[pay_len++] = nitems; + pay[pay_len++] = nranges; + prong_info_slot = pay_len++; + // Non-range items. + for (uint32_t j = ist; j < ien; j++) { + uint32_t item = tree->extra_data.arr[j]; + if (tree->nodes.tags[item] != AST_NODE_SWITCH_RANGE) { + if (pay_len + 2 > pay_cap) { + pay_cap *= 2; + uint32_t* p = realloc(pay, pay_cap * sizeof(uint32_t)); + if (!p) + abort(); + pay = p; + } + pay[pay_len++] = comptimeExpr(gz, scope, item); + } + } + // Range pairs. + for (uint32_t j = ist; j < ien; j++) { + uint32_t item = tree->extra_data.arr[j]; + if (tree->nodes.tags[item] == AST_NODE_SWITCH_RANGE) { + AstData rng = tree->nodes.datas[item]; + if (pay_len + 2 > pay_cap) { + pay_cap *= 2; + uint32_t* p = realloc(pay, pay_cap * sizeof(uint32_t)); + if (!p) + abort(); + pay = p; + } + pay[pay_len++] = comptimeExpr(gz, scope, rng.lhs); + pay[pay_len++] = comptimeExpr(gz, scope, rng.rhs); + } + } + break; + } + default: + continue; + } + + // Evaluate body (AstGen.zig:7997-8026). + uint32_t body_node = cd.rhs; + GenZir case_scope = makeSubBlock(gz, scope); + + // save_err_ret_index (AstGen.zig:7524-7525). + if (ag->fn_ret_ty != 0 && nodeMayAppendToErrorTrace(tree, cond_node)) + addSaveErrRetIndex(&case_scope, ZIR_REF_NONE); + + uint32_t result = exprRl(&case_scope, &case_scope.base, rl, body_node); + if (!endsWithNoReturn(&case_scope)) { + addBreak(&case_scope, ZIR_INST_BREAK, switch_inst, result, + (int32_t)body_node - (int32_t)gz->decl_node_index); + } + uint32_t body_len = gzInstructionsLen(&case_scope); + const uint32_t* body = gzInstructionsSlice(&case_scope); + + pay[prong_info_slot] = body_len & 0x0FFFFFFFu; + + if (pay_len + body_len > pay_cap) { + while (pay_len + body_len > pay_cap) + pay_cap *= 2; + uint32_t* p = realloc(pay, pay_cap * sizeof(uint32_t)); + if (!p) + abort(); + pay = p; + } + for (uint32_t i = 0; i < body_len; i++) + pay[pay_len++] = body[i]; + gzUnstack(&case_scope); + } + + // --- Serialize to extra in payload order (AstGen.zig:8036-8110) --- + ensureExtraCapacity(ag, + 2 + (uint32_t)(multi_cases_len > 0 ? 1 : 0) + pay_len - table_size); + uint32_t payload_index = ag->extra_len; + + ag->extra[ag->extra_len++] = cond_ref; + + uint32_t bits = 0; + if (multi_cases_len > 0) + bits |= 1u; + if (has_else) + bits |= (1u << 1); + bits |= (scalar_cases_len & 0x1FFFFFFu) << 7; + ag->extra[ag->extra_len++] = bits; + + if (multi_cases_len > 0) + ag->extra[ag->extra_len++] = multi_cases_len; + + // Else prong. + if (has_else) { + uint32_t si = pay[else_tbl]; + uint32_t bl = pay[si] & 0x0FFFFFFFu; + for (uint32_t i = 0; i < 1 + bl; i++) + ag->extra[ag->extra_len++] = pay[si + i]; + } + // Scalar cases. + for (uint32_t i = 0; i < scalar_cases_len; i++) { + uint32_t si = pay[scalar_tbl + i]; + uint32_t bl = pay[si + 1] & 0x0FFFFFFFu; + for (uint32_t j = 0; j < 2 + bl; j++) + ag->extra[ag->extra_len++] = pay[si + j]; + } + // Multi cases. + for (uint32_t i = 0; i < multi_cases_len; i++) { + uint32_t si = pay[multi_tbl + i]; + uint32_t ni = pay[si]; + uint32_t nr = pay[si + 1]; + uint32_t bl = pay[si + 2] & 0x0FFFFFFFu; + uint32_t total = 3 + ni + nr * 2 + bl; + for (uint32_t j = 0; j < total; j++) + ag->extra[ag->extra_len++] = pay[si + j]; + } + + free(pay); + + ag->inst_datas[switch_inst].pl_node.payload_index = payload_index; + gzAppendInstruction(gz, switch_inst); + + return switch_inst + ZIR_REF_START_INDEX; } // --- rvalue (AstGen.zig:11029) --- @@ -1752,6 +4305,26 @@ static void emitDbgStmt(GenZir* gz, uint32_t line, uint32_t column) { addInstruction(gz, ZIR_INST_DBG_STMT, data); } +// Mirrors emitDbgStmtForceCurrentIndex (AstGen.zig:13739-13760). +static void emitDbgStmtForceCurrentIndex( + GenZir* gz, uint32_t line, uint32_t column) { + AstGenCtx* ag = gz->astgen; + uint32_t gz_len = gzInstructionsLen(gz); + if (gz_len > 0 + && gzInstructionsSlice(gz)[gz_len - 1] == ag->inst_len - 1) { + uint32_t last = ag->inst_len - 1; + if (ag->inst_tags[last] == ZIR_INST_DBG_STMT) { + ag->inst_datas[last].dbg_stmt.line = line; + ag->inst_datas[last].dbg_stmt.column = column; + return; + } + } + ZirInstData data; + data.dbg_stmt.line = line; + data.dbg_stmt.column = column; + addInstruction(gz, ZIR_INST_DBG_STMT, data); +} + static void emitDbgNode(GenZir* gz, uint32_t node) { if (gz->is_comptime) return; @@ -1765,9 +4338,9 @@ static void emitDbgNode(GenZir* gz, uint32_t node) { // --- assign (AstGen.zig:3434) --- // Handles `_ = expr` discard pattern. -static void assignStmt(GenZir* gz, uint32_t infix_node) { +static void assignStmt(GenZir* gz, Scope* scope, uint32_t infix_node) { emitDbgNode(gz, infix_node); - AstGenCtx* ag = gz->astgen; + const AstGenCtx* ag = gz->astgen; const Ast* tree = ag->tree; AstData nd = tree->nodes.datas[infix_node]; @@ -1780,48 +4353,684 @@ static void assignStmt(GenZir* gz, uint32_t infix_node) { uint32_t tok_start = tree->tokens.starts[ident_tok]; if (tree->source[tok_start] == '_' && (tok_start + 1 >= tree->source_len - || !(tree->source[tok_start + 1] >= 'a' - && tree->source[tok_start + 1] <= 'z') - || (tree->source[tok_start + 1] >= 'A' - && tree->source[tok_start + 1] <= 'Z') - || tree->source[tok_start + 1] == '_')) { + || !((tree->source[tok_start + 1] >= 'a' + && tree->source[tok_start + 1] <= 'z') + || (tree->source[tok_start + 1] >= 'A' + && tree->source[tok_start + 1] <= 'Z') + || tree->source[tok_start + 1] == '_' + || (tree->source[tok_start + 1] >= '0' + && tree->source[tok_start + 1] <= '9')))) { // Discard: evaluate RHS with .discard result location. - uint32_t result = expr(gz, rhs); + uint32_t result = expr(gz, scope, rhs); rvalueDiscard(gz, result, rhs); return; } } - // TODO: handle non-discard assignments. - ag->has_compile_errors = true; + // Non-discard assignment: evaluate LHS as lvalue, store RHS. + // (AstGen.zig:3448-3452). + { + uint32_t lhs_ptr = exprRl(gz, scope, RL_REF_VAL, lhs); + uint32_t rhs_val = expr(gz, scope, rhs); + addPlNodeBin(gz, ZIR_INST_STORE_NODE, infix_node, lhs_ptr, rhs_val); + } +} + +// --- assignOp (AstGen.zig:3731) --- +// Handles compound assignment operators (+=, -=, *=, etc.). + +static void assignOp( + GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag) { + emitDbgNode(gz, infix_node); + AstGenCtx* ag = gz->astgen; + const Ast* tree = ag->tree; + + AstData nd = tree->nodes.datas[infix_node]; + uint32_t lhs_node = nd.lhs; + uint32_t rhs_node = nd.rhs; + + // Evaluate LHS as lvalue pointer (AstGen.zig:3742). + uint32_t lhs_ptr = exprRl(gz, scope, RL_REF_VAL, lhs_node); + + // Advance cursor for add/sub/mul/div/mod_rem (AstGen.zig:3744-3747). + uint32_t cursor_line = 0, cursor_col = 0; + bool need_dbg = false; + if (op_tag == ZIR_INST_ADD || op_tag == ZIR_INST_SUB + || op_tag == ZIR_INST_MUL || op_tag == ZIR_INST_DIV + || op_tag == ZIR_INST_MOD_REM) { + if (!gz->is_comptime) { + advanceSourceCursorToMainToken(ag, infix_node); + } + cursor_line = ag->source_line - gz->decl_line; + cursor_col = ag->source_column; + need_dbg = true; + } + + // Load current value (AstGen.zig:3748). + uint32_t lhs = addUnNode(gz, ZIR_INST_LOAD, lhs_ptr, infix_node); + + // Determine RHS result type (AstGen.zig:3750-3766). + uint32_t rhs_res_ty; + if (op_tag == ZIR_INST_ADD || op_tag == ZIR_INST_SUB) { + // Emit inplace_arith_result_ty extended instruction. + uint16_t inplace_op + = (op_tag == ZIR_INST_ADD) ? 0 : 1; // add_eq=0, sub_eq=1 + ZirInstData ext_data; + memset(&ext_data, 0, sizeof(ext_data)); + ext_data.extended.opcode = (uint16_t)ZIR_EXT_INPLACE_ARITH_RESULT_TY; + ext_data.extended.small = inplace_op; + ext_data.extended.operand = lhs; + rhs_res_ty = addInstruction(gz, ZIR_INST_EXTENDED, ext_data) + + ZIR_REF_START_INDEX; + } else { + rhs_res_ty = addUnNode(gz, ZIR_INST_TYPEOF, lhs, infix_node); + } + + // Evaluate RHS with type coercion (AstGen.zig:3768). + uint32_t rhs_raw = expr(gz, scope, rhs_node); + uint32_t rhs + = addPlNodeBin(gz, ZIR_INST_AS_NODE, rhs_node, rhs_res_ty, rhs_raw); + + // Emit debug statement for arithmetic ops (AstGen.zig:3770-3775). + if (need_dbg) { + emitDbgStmt(gz, cursor_line, cursor_col); + } + + // Emit the operation (AstGen.zig:3776-3779). + uint32_t result = addPlNodeBin(gz, op_tag, infix_node, lhs, rhs); + + // Store result back (AstGen.zig:3780-3783). + addPlNodeBin(gz, ZIR_INST_STORE_NODE, infix_node, lhs_ptr, result); +} + +// --- nodeMayEvalToError (AstGen.zig:10340) --- +// Three-way result: 0=never, 1=always, 2=maybe. +#define EVAL_TO_ERROR_NEVER 0 +#define EVAL_TO_ERROR_ALWAYS 1 +#define EVAL_TO_ERROR_MAYBE 2 + +static int nodeMayEvalToError(const Ast* tree, uint32_t node) { + uint32_t n = node; + while (true) { + AstNodeTag tag = tree->nodes.tags[n]; + switch (tag) { + case AST_NODE_ERROR_VALUE: + return EVAL_TO_ERROR_ALWAYS; + // These may evaluate to errors. + case AST_NODE_IDENTIFIER: + case AST_NODE_FIELD_ACCESS: + case AST_NODE_DEREF: + case AST_NODE_ARRAY_ACCESS: + case AST_NODE_WHILE_SIMPLE: + case AST_NODE_WHILE_CONT: + case AST_NODE_WHILE: + case AST_NODE_FOR_SIMPLE: + case AST_NODE_FOR: + case AST_NODE_IF_SIMPLE: + case AST_NODE_IF: + case AST_NODE_SWITCH: + case AST_NODE_SWITCH_COMMA: + case AST_NODE_CALL_ONE: + case AST_NODE_CALL_ONE_COMMA: + case AST_NODE_CALL: + case AST_NODE_CALL_COMMA: + case AST_NODE_ASM_SIMPLE: + case AST_NODE_ASM_LEGACY: + case AST_NODE_ASM: + case AST_NODE_CATCH: + case AST_NODE_ORELSE: + return EVAL_TO_ERROR_MAYBE; + // Forward to sub-expression. + case AST_NODE_TRY: + case AST_NODE_COMPTIME: + case AST_NODE_NOSUSPEND: + n = tree->nodes.datas[n].lhs; + continue; + case AST_NODE_GROUPED_EXPRESSION: + case AST_NODE_UNWRAP_OPTIONAL: + n = tree->nodes.datas[n].lhs; + continue; + // Labeled blocks may need a memory location. + case AST_NODE_BLOCK_TWO: + case AST_NODE_BLOCK_TWO_SEMICOLON: + case AST_NODE_BLOCK: + case AST_NODE_BLOCK_SEMICOLON: { + uint32_t lbrace = tree->nodes.main_tokens[n]; + if (lbrace > 0 && tree->tokens.tags[lbrace - 1] == TOKEN_COLON) + return EVAL_TO_ERROR_MAYBE; + return EVAL_TO_ERROR_NEVER; + } + // Builtins: simplified — return maybe for safety. + case AST_NODE_BUILTIN_CALL: + case AST_NODE_BUILTIN_CALL_COMMA: + case AST_NODE_BUILTIN_CALL_TWO: + case AST_NODE_BUILTIN_CALL_TWO_COMMA: + return EVAL_TO_ERROR_MAYBE; + // Everything else: .never + default: + return EVAL_TO_ERROR_NEVER; + } + } +} + +// --- nodeMayAppendToErrorTrace (AstGen.zig:10315) --- +// Returns true if the expression may append to the error return trace. +static bool nodeMayAppendToErrorTrace(const Ast* tree, uint32_t node) { + uint32_t n = node; + while (true) { + AstNodeTag tag = tree->nodes.tags[n]; + switch (tag) { + // These don't call runtime functions. + case AST_NODE_ERROR_VALUE: + case AST_NODE_IDENTIFIER: + case AST_NODE_COMPTIME: + return false; + // Forward to sub-expression. + case AST_NODE_TRY: + case AST_NODE_NOSUSPEND: + n = tree->nodes.datas[n].lhs; + continue; + case AST_NODE_GROUPED_EXPRESSION: + case AST_NODE_UNWRAP_OPTIONAL: + n = tree->nodes.datas[n].lhs; + continue; + // Anything else: check if it may eval to error. + default: + return nodeMayEvalToError(tree, node) != EVAL_TO_ERROR_NEVER; + } + } +} + +// --- addSaveErrRetIndex (AstGen.zig:12556) --- +// Emits SAVE_ERR_RET_INDEX instruction. +// operand is the init inst ref (or ZIR_REF_NONE for .always). +static void addSaveErrRetIndex(GenZir* gz, uint32_t operand) { + ZirInstData data; + data.save_err_ret_index.operand = operand; + data.save_err_ret_index._pad = 0; + addInstruction(gz, ZIR_INST_SAVE_ERR_RET_INDEX, data); +} + +// --- varDecl (AstGen.zig:3189) --- +// Handles local const/var declarations. Returns new scope with the variable. +// scope_out: set to new scope if variable is added; unchanged otherwise. + +static void varDecl(GenZir* gz, Scope* scope, uint32_t node, + ScopeLocalVal* val_out, ScopeLocalPtr* ptr_out, Scope** scope_out) { + AstGenCtx* ag = gz->astgen; + emitDbgNode(gz, node); // AstGen.zig:3196 + const Ast* tree = ag->tree; + AstData nd = tree->nodes.datas[node]; + AstNodeTag tag = tree->nodes.tags[node]; + + uint32_t mut_token = tree->nodes.main_tokens[node]; + uint32_t name_token = mut_token + 1; + bool is_const = (tree->source[tree->tokens.starts[mut_token]] == 'c'); + + uint32_t ident_name = identAsString(ag, name_token); + + // Extract type_node and init_node based on variant. + uint32_t type_node = 0; + uint32_t init_node = 0; + + if (tag == AST_NODE_SIMPLE_VAR_DECL) { + // lhs = type (optional), rhs = init (optional). + type_node = nd.lhs; + init_node = nd.rhs; + } else if (tag == AST_NODE_LOCAL_VAR_DECL) { + // lhs = extra_data index, rhs = init. + // extra: {type_node, align_node, addrspace_node, section_node} + // Simplified: just extract type_node. + uint32_t extra_idx = nd.lhs; + type_node = tree->extra_data.arr[extra_idx]; // type_node + init_node = nd.rhs; + } else if (tag == AST_NODE_ALIGNED_VAR_DECL) { + // lhs = align expr, rhs = init. + // No type node in this variant. + init_node = nd.rhs; + } else { + // global_var_decl or unknown — bail. + SET_ERROR(ag); + return; + } + + if (init_node == 0) { + // Variables must be initialized (AstGen.zig:3228). + SET_ERROR(ag); + return; + } + + if (is_const) { + // --- CONST path (AstGen.zig:3232-3340) --- + if (!nodesNeedRlContains(ag, node)) { + // Rvalue path (AstGen.zig:3246-3271). + // Evaluate type annotation if present (AstGen.zig:3248). + if (type_node != 0) + (void)typeExpr(gz, scope, type_node); + + // Evaluate init expression (AstGen.zig:3259-3264). + uint32_t init_ref = expr(gz, scope, init_node); + + if (ag->has_compile_errors) + return; + + // validate_const (AstGen.zig:3266). + addUnNode(gz, ZIR_INST_VALIDATE_CONST, init_ref, init_node); + + // dbg_var_val (AstGen.zig:3269). + addDbgVar(gz, ZIR_INST_DBG_VAR_VAL, ident_name, init_ref); + + // save_err_ret_index (AstGen.zig:3259-3260). + if (nodeMayAppendToErrorTrace(tree, init_node)) + addSaveErrRetIndex(gz, init_ref); + + // Create ScopeLocalVal (AstGen.zig:3276-3284). + val_out->base.tag = SCOPE_LOCAL_VAL; + val_out->parent = *scope_out; + val_out->gen_zir = gz; + val_out->inst = init_ref; + val_out->token_src = name_token; + val_out->name = ident_name; + *scope_out = &val_out->base; + } else { + // Alloc path (AstGen.zig:3277-3340). + // The init expression needs a result pointer (nodes_need_rl). + bool is_comptime_init = gz->is_comptime + || tree->nodes.tags[init_node] == AST_NODE_COMPTIME; + + uint32_t var_ptr; + bool resolve_inferred; + + if (type_node != 0) { + // Typed const: alloc (AstGen.zig:3280). + uint32_t type_ref = typeExpr(gz, scope, type_node); + var_ptr = addUnNode(gz, ZIR_INST_ALLOC, type_ref, node); + resolve_inferred = false; + } else { + // Inferred type: alloc_inferred (AstGen.zig:3291-3296). + ZirInstTag alloc_tag = is_comptime_init + ? ZIR_INST_ALLOC_INFERRED_COMPTIME + : ZIR_INST_ALLOC_INFERRED; + ZirInstData adata; + adata.node = (int32_t)node - (int32_t)gz->decl_node_index; + var_ptr = addInstruction(gz, alloc_tag, adata); + resolve_inferred = true; + } + + // Evaluate init with RL pointing to alloc (AstGen.zig:3313-3316). + ResultLoc init_rl; + if (type_node != 0) { + init_rl.tag = RL_PTR; + init_rl.data = var_ptr; + init_rl.src_node = node; + } else { + init_rl.tag = RL_INFERRED_PTR; + init_rl.data = var_ptr; + init_rl.src_node = 0; + } + uint32_t init_ref = exprRl(gz, scope, init_rl, init_node); + + if (ag->has_compile_errors) + return; + + // save_err_ret_index (AstGen.zig:3320-3321). + if (nodeMayAppendToErrorTrace(tree, init_node)) + addSaveErrRetIndex(gz, init_ref); + + // resolve_inferred_alloc or make_ptr_const (AstGen.zig:3323-3326). + uint32_t const_ptr; + if (resolve_inferred) + const_ptr = addUnNode( + gz, ZIR_INST_RESOLVE_INFERRED_ALLOC, var_ptr, node); + else + const_ptr + = addUnNode(gz, ZIR_INST_MAKE_PTR_CONST, var_ptr, node); + + // dbg_var_ptr (AstGen.zig:3328). + addDbgVar(gz, ZIR_INST_DBG_VAR_PTR, ident_name, const_ptr); + + // Create ScopeLocalPtr (AstGen.zig:3330-3340). + ptr_out->base.tag = SCOPE_LOCAL_PTR; + ptr_out->parent = *scope_out; + ptr_out->gen_zir = gz; + ptr_out->ptr = const_ptr; + ptr_out->token_src = name_token; + ptr_out->name = ident_name; + ptr_out->maybe_comptime = true; + *scope_out = &ptr_out->base; + } + } else { + // --- VAR path (AstGen.zig:3342-3416) --- + + uint32_t alloc_ref; + bool resolve_inferred = false; + + if (type_node != 0) { + // Typed var: alloc_mut (AstGen.zig:3361-3375). + uint32_t type_ref = typeExpr(gz, scope, type_node); + ZirInstTag alloc_tag = gz->is_comptime + ? ZIR_INST_ALLOC_COMPTIME_MUT + : ZIR_INST_ALLOC_MUT; + alloc_ref = addUnNode(gz, alloc_tag, type_ref, node); + } else { + // Inferred type var: alloc_inferred_mut + // (AstGen.zig:3384-3392). + ZirInstTag alloc_tag = gz->is_comptime + ? ZIR_INST_ALLOC_INFERRED_COMPTIME_MUT + : ZIR_INST_ALLOC_INFERRED_MUT; + ZirInstData adata; + adata.node = (int32_t)node - (int32_t)gz->decl_node_index; + alloc_ref = addInstruction(gz, alloc_tag, adata); + resolve_inferred = true; + } + + // Evaluate init with RL pointing to alloc (AstGen.zig:3395-3402). + ResultLoc var_init_rl; + if (type_node != 0) { + var_init_rl.tag = RL_PTR; + var_init_rl.data = alloc_ref; + var_init_rl.src_node = node; + } else { + var_init_rl.tag = RL_INFERRED_PTR; + var_init_rl.data = alloc_ref; + var_init_rl.src_node = 0; + } + uint32_t init_ref = exprRl(gz, scope, var_init_rl, init_node); + (void)init_ref; + + if (ag->has_compile_errors) + return; + + // resolve_inferred_alloc if type was inferred + // (AstGen.zig:3407-3408). + uint32_t final_ptr = alloc_ref; + if (resolve_inferred) + final_ptr = addUnNode( + gz, ZIR_INST_RESOLVE_INFERRED_ALLOC, alloc_ref, node); + + // dbg_var_ptr (AstGen.zig:3411). + addDbgVar(gz, ZIR_INST_DBG_VAR_PTR, ident_name, final_ptr); + + // Create ScopeLocalPtr (AstGen.zig:3413-3422). + ptr_out->base.tag = SCOPE_LOCAL_PTR; + ptr_out->parent = *scope_out; + ptr_out->gen_zir = gz; + ptr_out->ptr = final_ptr; + ptr_out->token_src = name_token; + ptr_out->name = ident_name; + ptr_out->maybe_comptime = gz->is_comptime; + *scope_out = &ptr_out->base; + } +} + +// --- addEnsureResult (AstGen.zig:2649) --- +// After evaluating an expression as a statement, optionally emits +// ensure_result_used. For call/field_call, sets flag in extra data instead. +static void addEnsureResult( + GenZir* gz, uint32_t maybe_unused_result, uint32_t statement) { + AstGenCtx* ag = gz->astgen; + bool elide_check; + if (maybe_unused_result >= ZIR_REF_START_INDEX) { + uint32_t inst = maybe_unused_result - ZIR_REF_START_INDEX; + ZirInstTag tag = ag->inst_tags[inst]; + switch (tag) { + // For call/field_call: set ensure_result_used flag (bit 3). + case ZIR_INST_CALL: { + uint32_t pi = ag->inst_datas[inst].pl_node.payload_index; + ag->extra[pi + 1] |= (1u << 3); // ensure_result_used + elide_check = true; + break; + } + case ZIR_INST_FIELD_CALL: { + uint32_t pi = ag->inst_datas[inst].pl_node.payload_index; + ag->extra[pi + 2] |= (1u << 3); // ensure_result_used + elide_check = true; + break; + } + case ZIR_INST_BUILTIN_CALL: { + uint32_t pi = ag->inst_datas[inst].pl_node.payload_index; + ag->extra[pi + 1] |= (1u << 3); // ensure_result_used + elide_check = true; + break; + } + // Always noreturn → elide. + case ZIR_INST_BREAK: + case ZIR_INST_BREAK_INLINE: + case ZIR_INST_CONDBR: + case ZIR_INST_CONDBR_INLINE: + case ZIR_INST_RET_NODE: + case ZIR_INST_RET_LOAD: + case ZIR_INST_RET_IMPLICIT: + case ZIR_INST_RET_ERR_VALUE: + case ZIR_INST_UNREACHABLE: + case ZIR_INST_REPEAT: + case ZIR_INST_REPEAT_INLINE: + case ZIR_INST_PANIC: + case ZIR_INST_TRAP: + case ZIR_INST_CHECK_COMPTIME_CONTROL_FLOW: + case ZIR_INST_SWITCH_CONTINUE: + elide_check = true; + break; + // Always void → elide. + case ZIR_INST_DBG_STMT: + case ZIR_INST_DBG_VAR_PTR: + case ZIR_INST_DBG_VAR_VAL: + case ZIR_INST_ENSURE_RESULT_USED: + case ZIR_INST_ENSURE_RESULT_NON_ERROR: + case ZIR_INST_ENSURE_ERR_UNION_PAYLOAD_VOID: + case ZIR_INST_EXPORT: + case ZIR_INST_SET_EVAL_BRANCH_QUOTA: + case ZIR_INST_ATOMIC_STORE: + case ZIR_INST_STORE_NODE: + case ZIR_INST_STORE_TO_INFERRED_PTR: + case ZIR_INST_RESOLVE_INFERRED_ALLOC: + case ZIR_INST_SET_RUNTIME_SAFETY: + case ZIR_INST_MEMCPY: + case ZIR_INST_MEMSET: + case ZIR_INST_MEMMOVE: + case ZIR_INST_VALIDATE_DEREF: + case ZIR_INST_VALIDATE_DESTRUCTURE: + case ZIR_INST_SAVE_ERR_RET_INDEX: + case ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL: + case ZIR_INST_RESTORE_ERR_RET_INDEX_FN_ENTRY: + case ZIR_INST_VALIDATE_STRUCT_INIT_TY: + case ZIR_INST_VALIDATE_STRUCT_INIT_RESULT_TY: + case ZIR_INST_VALIDATE_PTR_STRUCT_INIT: + case ZIR_INST_VALIDATE_ARRAY_INIT_TY: + case ZIR_INST_VALIDATE_ARRAY_INIT_RESULT_TY: + case ZIR_INST_VALIDATE_PTR_ARRAY_INIT: + case ZIR_INST_VALIDATE_REF_TY: + case ZIR_INST_VALIDATE_CONST: + elide_check = true; + break; + // Extended: check opcode. + case ZIR_INST_EXTENDED: { + uint32_t opcode = ag->inst_datas[inst].extended.opcode; + elide_check = (opcode == ZIR_EXT_BREAKPOINT + || opcode == ZIR_EXT_BRANCH_HINT + || opcode == ZIR_EXT_SET_FLOAT_MODE + || opcode == ZIR_EXT_DISABLE_INSTRUMENTATION + || opcode == ZIR_EXT_DISABLE_INTRINSICS); + break; + } + // Everything else: might produce non-void result → emit check. + default: + elide_check = false; + break; + } + } else { + // Named ref constant. + elide_check = (maybe_unused_result == ZIR_REF_UNREACHABLE_VALUE + || maybe_unused_result == ZIR_REF_VOID_VALUE); + } + if (!elide_check) { + addUnNode( + gz, ZIR_INST_ENSURE_RESULT_USED, maybe_unused_result, statement); + } } // --- blockExprStmts (AstGen.zig:2538) --- -// Processes block statements sequentially. +// Processes block statements sequentially, threading scope. -static void blockExprStmts( - GenZir* gz, const uint32_t* statements, uint32_t stmt_count) { +static void blockExprStmts(GenZir* gz, Scope* scope, + const uint32_t* statements, uint32_t stmt_count) { AstGenCtx* ag = gz->astgen; + // Stack-allocated scope storage for local variables and defers. + // Max 64 local variable declarations and 64 defers per block. + ScopeLocalVal val_scopes[64]; + ScopeLocalPtr ptr_scopes[64]; + ScopeDefer defer_scopes[64]; + uint32_t val_idx = 0; + uint32_t ptr_idx = 0; + uint32_t defer_idx = 0; + Scope* cur_scope = scope; + for (uint32_t i = 0; i < stmt_count; i++) { + if (ag->has_compile_errors) + return; uint32_t stmt = statements[i]; AstNodeTag tag = ag->tree->nodes.tags[stmt]; switch (tag) { case AST_NODE_ASSIGN: - assignStmt(gz, stmt); + assignStmt(gz, cur_scope, stmt); break; - // TODO: var_decl, defer, other statement types - default: - // Try as expression statement. - expr(gz, stmt); + // Compound assignment operators (AstGen.zig:2588-2607). + case AST_NODE_ASSIGN_ADD: + assignOp(gz, cur_scope, stmt, ZIR_INST_ADD); break; + case AST_NODE_ASSIGN_SUB: + assignOp(gz, cur_scope, stmt, ZIR_INST_SUB); + break; + case AST_NODE_ASSIGN_MUL: + assignOp(gz, cur_scope, stmt, ZIR_INST_MUL); + break; + case AST_NODE_ASSIGN_DIV: + assignOp(gz, cur_scope, stmt, ZIR_INST_DIV); + break; + case AST_NODE_ASSIGN_MOD: + assignOp(gz, cur_scope, stmt, ZIR_INST_MOD_REM); + break; + case AST_NODE_ASSIGN_BIT_AND: + assignOp(gz, cur_scope, stmt, ZIR_INST_BIT_AND); + break; + case AST_NODE_ASSIGN_BIT_OR: + assignOp(gz, cur_scope, stmt, ZIR_INST_BIT_OR); + break; + case AST_NODE_ASSIGN_BIT_XOR: + assignOp(gz, cur_scope, stmt, ZIR_INST_XOR); + break; + case AST_NODE_ASSIGN_ADD_WRAP: + assignOp(gz, cur_scope, stmt, ZIR_INST_ADDWRAP); + break; + case AST_NODE_ASSIGN_SUB_WRAP: + assignOp(gz, cur_scope, stmt, ZIR_INST_SUBWRAP); + break; + case AST_NODE_ASSIGN_MUL_WRAP: + assignOp(gz, cur_scope, stmt, ZIR_INST_MULWRAP); + break; + case AST_NODE_ASSIGN_ADD_SAT: + assignOp(gz, cur_scope, stmt, ZIR_INST_ADD_SAT); + break; + case AST_NODE_ASSIGN_SUB_SAT: + assignOp(gz, cur_scope, stmt, ZIR_INST_SUB_SAT); + break; + case AST_NODE_ASSIGN_MUL_SAT: + assignOp(gz, cur_scope, stmt, ZIR_INST_MUL_SAT); + break; + case AST_NODE_SIMPLE_VAR_DECL: + case AST_NODE_LOCAL_VAR_DECL: + case AST_NODE_ALIGNED_VAR_DECL: + if (val_idx < 64 && ptr_idx < 64) { + varDecl(gz, cur_scope, stmt, &val_scopes[val_idx], + &ptr_scopes[ptr_idx], &cur_scope); + // Check which one was used: if scope now points to + // val_scopes[val_idx], advance val_idx; same for ptr. + if (cur_scope == &val_scopes[val_idx].base) + val_idx++; + else if (cur_scope == &ptr_scopes[ptr_idx].base) + ptr_idx++; + } else { + SET_ERROR(ag); + } + break; + // defer/errdefer (AstGen.zig:2580-2581). + case AST_NODE_DEFER: + case AST_NODE_ERRDEFER: { + if (defer_idx >= 64) { + SET_ERROR(ag); + break; + } + ScopeTag scope_tag = (tag == AST_NODE_DEFER) ? SCOPE_DEFER_NORMAL + : SCOPE_DEFER_ERROR; + // Create sub-block for defer body (AstGen.zig:3123-3126). + GenZir defer_gen = makeSubBlock(gz, cur_scope); + + // Evaluate deferred expression (AstGen.zig:3165). + // DEFER: lhs is the deferred expression, rhs = 0. + // ERRDEFER: lhs is optional error capture token, rhs is expr. + AstData dnd = ag->tree->nodes.datas[stmt]; + uint32_t expr_node; + if (tag == AST_NODE_DEFER) { + expr_node = dnd.lhs; + } else { + expr_node = dnd.rhs; + } + expr(&defer_gen, &defer_gen.base, expr_node); + + // Add break_inline at end (AstGen.zig:3167). + addBreak(&defer_gen, ZIR_INST_BREAK_INLINE, 0, ZIR_REF_VOID_VALUE, + AST_NODE_OFFSET_NONE); + + // Write body to extra (AstGen.zig:3173-3175). + uint32_t raw_body_len = gzInstructionsLen(&defer_gen); + const uint32_t* body = gzInstructionsSlice(&defer_gen); + uint32_t extra_index = ag->extra_len; + uint32_t fixup_len + = countBodyLenAfterFixups(ag, body, raw_body_len); + ensureExtraCapacity(ag, fixup_len); + for (uint32_t b = 0; b < raw_body_len; b++) + appendPossiblyRefdBodyInst(ag, body[b]); + gzUnstack(&defer_gen); + + // Create scope (AstGen.zig:3179-3185). + defer_scopes[defer_idx] = (ScopeDefer) { + .base = { .tag = scope_tag }, + .parent = cur_scope, + .index = extra_index, + .len = fixup_len, + }; + cur_scope = &defer_scopes[defer_idx].base; + defer_idx++; + break; + } + // while/for as statements (AstGen.zig:2605-2610). + // These do NOT get emitDbgNode; they emit their own dbg_stmt. + case AST_NODE_WHILE_SIMPLE: + case AST_NODE_WHILE_CONT: + case AST_NODE_WHILE: + (void)whileExpr(gz, cur_scope, stmt); + break; + case AST_NODE_FOR_SIMPLE: + case AST_NODE_FOR: + (void)forExpr(gz, cur_scope, stmt); + break; + default: { + // Expression statement (AstGen.zig:2627 unusedResultExpr). + emitDbgNode(gz, stmt); + uint32_t result = expr(gz, cur_scope, stmt); + addEnsureResult(gz, result, stmt); + break; + } } } } // --- fullBodyExpr (AstGen.zig:2358) --- -// Processes a block body, returning void. +// Processes a body expression. If it's an unlabeled block, processes +// statements inline without creating a BLOCK instruction (unlike blockExprExpr +// which wraps in BLOCK). Returns the result ref. -static void fullBodyExpr(GenZir* gz, uint32_t node) { +static uint32_t fullBodyExpr(GenZir* gz, Scope* scope, uint32_t node) { const Ast* tree = gz->astgen->tree; AstNodeTag tag = tree->nodes.tags[node]; @@ -1852,13 +5061,24 @@ static void fullBodyExpr(GenZir* gz, uint32_t node) { break; } default: - // Not a block — treat as single expression. - expr(gz, node); - return; + // Not a block — treat as single expression (AstGen.zig:2369). + return expr(gz, scope, node); } - // Process statements (AstGen.zig:2381). - blockExprStmts(gz, statements, stmt_count); + // Check if labeled (AstGen.zig:2373-2377). + uint32_t lbrace = tree->nodes.main_tokens[node]; + bool is_labeled + = (lbrace >= 2 && tree->tokens.tags[lbrace - 1] == TOKEN_COLON + && tree->tokens.tags[lbrace - 2] == TOKEN_IDENTIFIER); + if (is_labeled) { + // Labeled blocks need a proper block instruction. + return blockExprExpr(gz, scope, RL_NONE_VAL, node); + } + + // Unlabeled block: process statements inline (AstGen.zig:2380-2383). + GenZir sub_gz = makeSubBlock(gz, scope); + blockExprStmts(&sub_gz, &sub_gz.base, statements, stmt_count); + return ZIR_REF_VOID_VALUE; } // --- lastToken (Ast.zig:874) --- @@ -1876,6 +5096,37 @@ static uint32_t lastToken(const Ast* tree, uint32_t node) { // Binary ops: recurse into RHS (Ast.zig:893-948). case AST_NODE_ASSIGN: + case AST_NODE_ADD: + case AST_NODE_SUB: + case AST_NODE_MUL: + case AST_NODE_DIV: + case AST_NODE_MOD: + case AST_NODE_BIT_AND: + case AST_NODE_BIT_OR: + case AST_NODE_BIT_XOR: + case AST_NODE_SHL: + case AST_NODE_SHR: + case AST_NODE_ARRAY_CAT: + case AST_NODE_ARRAY_MULT: + case AST_NODE_ADD_WRAP: + case AST_NODE_SUB_WRAP: + case AST_NODE_ADD_SAT: + case AST_NODE_SUB_SAT: + case AST_NODE_MUL_WRAP: + case AST_NODE_MUL_SAT: + case AST_NODE_MERGE_ERROR_SETS: + case AST_NODE_EQUAL_EQUAL: + case AST_NODE_BANG_EQUAL: + case AST_NODE_LESS_THAN: + case AST_NODE_GREATER_THAN: + case AST_NODE_LESS_OR_EQUAL: + case AST_NODE_GREATER_OR_EQUAL: + case AST_NODE_BOOL_AND: + case AST_NODE_BOOL_OR: + case AST_NODE_ORELSE: + case AST_NODE_CATCH: + case AST_NODE_ERROR_UNION: + case AST_NODE_SHL_SAT: n = nd.rhs; continue; @@ -1888,6 +5139,16 @@ static uint32_t lastToken(const Ast* tree, uint32_t node) { n = nd.rhs; continue; + // defer: recurse into body (lhs) (Ast.zig:951). + case AST_NODE_DEFER: + n = nd.lhs; + continue; + + // errdefer: recurse into body (rhs) (Ast.zig:950). + case AST_NODE_ERRDEFER: + n = nd.rhs; + continue; + // block (Ast.zig:1085): end_offset += 1 (rbrace), recurse into last. case AST_NODE_BLOCK: { uint32_t start = nd.lhs; @@ -1969,14 +5230,506 @@ static uint32_t lastToken(const Ast* tree, uint32_t node) { continue; } + // Unary ops: recurse into lhs (Ast.zig:895-910). + case AST_NODE_BOOL_NOT: + case AST_NODE_BIT_NOT: + case AST_NODE_NEGATION: + case AST_NODE_NEGATION_WRAP: + case AST_NODE_ADDRESS_OF: + case AST_NODE_TRY: + case AST_NODE_AWAIT: + case AST_NODE_OPTIONAL_TYPE: + case AST_NODE_COMPTIME: + case AST_NODE_NOSUSPEND: + case AST_NODE_RESUME: + n = nd.lhs; + continue; + + // return: optional operand (Ast.zig:998-1002). + case AST_NODE_RETURN: + if (nd.lhs != 0) { + n = nd.lhs; + continue; + } + return tree->nodes.main_tokens[n] + end_offset; + + // deref: main_token is the dot, +1 for '*' (Ast.zig:974). + case AST_NODE_DEREF: + return tree->nodes.main_tokens[n] + 1 + end_offset; + + // unwrap_optional: +1 for '?' (Ast.zig:971). + case AST_NODE_UNWRAP_OPTIONAL: + return tree->nodes.main_tokens[n] + 1 + end_offset; + + // for_range: recurse into rhs if present, else lhs. + case AST_NODE_FOR_RANGE: + if (nd.rhs != 0) { + n = nd.rhs; + } else { + // Unbounded range: last token is the '..' operator. + // main_token + 1 (the second dot of ..) + return tree->nodes.main_tokens[n] + 1 + end_offset; + } + continue; + // Terminals: return main_token + end_offset (Ast.zig:988-996). case AST_NODE_NUMBER_LITERAL: case AST_NODE_STRING_LITERAL: case AST_NODE_IDENTIFIER: + case AST_NODE_ENUM_LITERAL: + case AST_NODE_CHAR_LITERAL: + case AST_NODE_UNREACHABLE_LITERAL: + case AST_NODE_ANYFRAME_LITERAL: + case AST_NODE_ERROR_VALUE: return tree->nodes.main_tokens[n] + end_offset; - // field_access: return data.rhs (the field token) + end_offset - // (Ast.zig:979-982). + // call_one: recurse into lhs, +1 for ')'. + case AST_NODE_CALL_ONE: + end_offset += 1; // rparen + if (nd.rhs != 0) { + n = nd.rhs; + } else { + n = nd.lhs; + } + continue; + case AST_NODE_CALL_ONE_COMMA: + end_offset += 2; // comma + rparen + if (nd.rhs != 0) { + n = nd.rhs; + } else { + n = nd.lhs; + } + continue; + + // array_access: end_offset += 1 (rbracket), recurse rhs. + case AST_NODE_ARRAY_ACCESS: + end_offset += 1; + n = nd.rhs; + continue; + + // simple_var_decl: recurse into init/type (Ast.zig:1169-1178). + case AST_NODE_SIMPLE_VAR_DECL: + if (nd.rhs != 0) { + n = nd.rhs; // init expr + } else if (nd.lhs != 0) { + n = nd.lhs; // type expr + } else { + end_offset += 1; // from mut token to name + return tree->nodes.main_tokens[n] + end_offset; + } + continue; + + // aligned_var_decl: recurse into init/align (Ast.zig:1180-1187). + case AST_NODE_ALIGNED_VAR_DECL: + if (nd.rhs != 0) { + n = nd.rhs; // init expr + } else { + end_offset += 1; // rparen + n = nd.lhs; // align expr + } + continue; + + // local_var_decl (Ast.zig:1209-1217). + case AST_NODE_LOCAL_VAR_DECL: + if (nd.rhs != 0) { + n = nd.rhs; // init expr + } else { + // extra[lhs] has align_node + end_offset += 1; // rparen + n = tree->extra_data.arr[nd.lhs]; // align_node + } + continue; + + // global_var_decl (Ast.zig:1189-1207). + case AST_NODE_GLOBAL_VAR_DECL: + if (nd.rhs != 0) { + n = nd.rhs; // init expr + } else { + // extra[lhs] = {type_node, align_node, ...} + // complex; approximate by using main_token + end_offset += 1; + return tree->nodes.main_tokens[n] + end_offset; + } + continue; + + // slice_open: end_offset += 2 (ellipsis2 + rbracket), recurse rhs + // (Ast.zig:1245-1248). + case AST_NODE_SLICE_OPEN: + end_offset += 2; + n = nd.rhs; + continue; + + // grouped_expression: end_offset += 1 (rparen), recurse lhs. + case AST_NODE_GROUPED_EXPRESSION: + end_offset += 1; + n = nd.lhs; + continue; + + // if_simple: recurse into body (rhs) (Ast.zig:942). + case AST_NODE_IF_SIMPLE: + case AST_NODE_WHILE_SIMPLE: + case AST_NODE_FOR_SIMPLE: + case AST_NODE_FN_DECL: + case AST_NODE_ARRAY_TYPE: + n = nd.rhs; + continue; + + // if: recurse into else_expr (Ast.zig:1295). + case AST_NODE_IF: { + // If[rhs]: { then_expr, else_expr } + n = tree->extra_data.arr[nd.rhs + 1]; // else_expr + continue; + } + + // while: recurse into else_expr (Ast.zig:1290). + case AST_NODE_WHILE: { + // While[rhs]: { cont_expr, then_expr, else_expr } + n = tree->extra_data.arr[nd.rhs + 2]; // else_expr + continue; + } + + // while_cont: recurse into then_expr (Ast.zig:943-like). + case AST_NODE_WHILE_CONT: { + // WhileCont[rhs]: { cont_expr, then_expr } + n = tree->extra_data.arr[nd.rhs + 1]; // then_expr + continue; + } + + // switch: recurse into last case (Ast.zig:1031-1041). + case AST_NODE_SWITCH: { + uint32_t ei = nd.rhs; + uint32_t cs = tree->extra_data.arr[ei]; + uint32_t ce = tree->extra_data.arr[ei + 1]; + if (cs == ce) { + end_offset += 3; // rparen, lbrace, rbrace + n = nd.lhs; + } else { + end_offset += 1; // rbrace + n = tree->extra_data.arr[ce - 1]; + } + continue; + } + case AST_NODE_SWITCH_COMMA: { + uint32_t ei = nd.rhs; + uint32_t cs = tree->extra_data.arr[ei]; + uint32_t ce = tree->extra_data.arr[ei + 1]; + assert(cs != ce); + end_offset += 2; // comma + rbrace + n = tree->extra_data.arr[ce - 1]; + continue; + } + + // switch_case_one: recurse into rhs (body) (Ast.zig:942). + case AST_NODE_SWITCH_CASE_ONE: + case AST_NODE_SWITCH_CASE_INLINE_ONE: + case AST_NODE_SWITCH_CASE: + case AST_NODE_SWITCH_CASE_INLINE: + n = nd.rhs; + continue; + + // switch_range: recurse into rhs (Ast.zig: binary op pattern). + case AST_NODE_SWITCH_RANGE: + n = nd.rhs; + continue; + + // struct_init_one: recurse into field if present, +1. + case AST_NODE_STRUCT_INIT_ONE: + end_offset += 1; // rbrace + if (nd.rhs != 0) { + n = nd.rhs; + } else { + return tree->nodes.main_tokens[n] + end_offset; + } + continue; + case AST_NODE_STRUCT_INIT_ONE_COMMA: + end_offset += 2; // comma + rbrace + n = nd.rhs; + continue; + + // struct_init_dot_two: similar to block_two. + case AST_NODE_STRUCT_INIT_DOT_TWO: + if (nd.rhs != 0) { + end_offset += 1; + n = nd.rhs; + } else if (nd.lhs != 0) { + end_offset += 1; + n = nd.lhs; + } else { + end_offset += 1; // rbrace + return tree->nodes.main_tokens[n] + end_offset; + } + continue; + case AST_NODE_STRUCT_INIT_DOT_TWO_COMMA: + end_offset += 2; + if (nd.rhs != 0) { + n = nd.rhs; + } else { + n = nd.lhs; + } + continue; + + // struct_init_dot: SubRange pattern. + case AST_NODE_STRUCT_INIT_DOT: + assert(nd.lhs != nd.rhs); + end_offset += 1; + n = tree->extra_data.arr[nd.rhs - 1]; + continue; + + // struct_init: node_and_extra SubRange pattern. + case AST_NODE_STRUCT_INIT: { + uint32_t si = tree->extra_data.arr[nd.rhs]; + uint32_t se = tree->extra_data.arr[nd.rhs + 1]; + assert(si != se); + end_offset += 1; + n = tree->extra_data.arr[se - 1]; + continue; + } + + // call: SubRange pattern. + case AST_NODE_CALL: { + uint32_t si = tree->extra_data.arr[nd.rhs]; + uint32_t se = tree->extra_data.arr[nd.rhs + 1]; + assert(si != se); + end_offset += 1; + n = tree->extra_data.arr[se - 1]; + continue; + } + case AST_NODE_CALL_COMMA: { + uint32_t si = tree->extra_data.arr[nd.rhs]; + uint32_t se = tree->extra_data.arr[nd.rhs + 1]; + assert(si != se); + end_offset += 2; + n = tree->extra_data.arr[se - 1]; + continue; + } + + // fn_proto_simple: recurse into rhs (return type). + case AST_NODE_FN_PROTO_SIMPLE: + case AST_NODE_FN_PROTO_ONE: + case AST_NODE_FN_PROTO_MULTI: + case AST_NODE_FN_PROTO: + n = nd.rhs; + continue; + + // error_set_decl: rhs is the closing rbrace token. + case AST_NODE_ERROR_SET_DECL: + return nd.rhs + end_offset; + + // ptr_type variants: recurse into rhs (child type). + case AST_NODE_PTR_TYPE_ALIGNED: + case AST_NODE_PTR_TYPE_SENTINEL: + case AST_NODE_PTR_TYPE: + case AST_NODE_PTR_TYPE_BIT_RANGE: + n = nd.rhs; + continue; + + // container_decl: extra_range pattern. + case AST_NODE_CONTAINER_DECL: + case AST_NODE_TAGGED_UNION: + assert(nd.lhs != nd.rhs); + end_offset += 1; + n = tree->extra_data.arr[nd.rhs - 1]; + continue; + case AST_NODE_CONTAINER_DECL_TRAILING: + case AST_NODE_TAGGED_UNION_TRAILING: + assert(nd.lhs != nd.rhs); + end_offset += 2; + n = tree->extra_data.arr[nd.rhs - 1]; + continue; + + // container_decl_two: like block_two. + case AST_NODE_CONTAINER_DECL_TWO: + case AST_NODE_TAGGED_UNION_TWO: + if (nd.rhs != 0) { + end_offset += 1; + n = nd.rhs; + } else if (nd.lhs != 0) { + end_offset += 1; + n = nd.lhs; + } else { + end_offset += 2; // lbrace + rbrace + return tree->nodes.main_tokens[n] + end_offset; + } + continue; + case AST_NODE_CONTAINER_DECL_TWO_TRAILING: + case AST_NODE_TAGGED_UNION_TWO_TRAILING: + end_offset += 2; + if (nd.rhs != 0) { + n = nd.rhs; + } else { + n = nd.lhs; + } + continue; + + // container_decl_arg: node_and_extra SubRange. + case AST_NODE_CONTAINER_DECL_ARG: { + uint32_t si = tree->extra_data.arr[nd.rhs]; + uint32_t se = tree->extra_data.arr[nd.rhs + 1]; + if (si == se) { + end_offset += 3; // rparen + lbrace + rbrace + n = nd.lhs; + } else { + end_offset += 1; + n = tree->extra_data.arr[se - 1]; + } + continue; + } + case AST_NODE_CONTAINER_DECL_ARG_TRAILING: { + uint32_t si = tree->extra_data.arr[nd.rhs]; + uint32_t se = tree->extra_data.arr[nd.rhs + 1]; + assert(si != se); + end_offset += 2; + n = tree->extra_data.arr[se - 1]; + continue; + } + + // slice: extra data pattern. + case AST_NODE_SLICE: { + // Slice[rhs]: { start, end } + end_offset += 1; + n = tree->extra_data.arr[nd.rhs + 1]; // end + continue; + } + case AST_NODE_SLICE_SENTINEL: { + // SliceSentinel[rhs]: { start, end, sentinel } + end_offset += 1; + n = tree->extra_data.arr[nd.rhs + 2]; // sentinel + continue; + } + + // array_type_sentinel: extra data. + case AST_NODE_ARRAY_TYPE_SENTINEL: { + // ArrayTypeSentinel[rhs]: { sentinel, elem_type } + n = tree->extra_data.arr[nd.rhs + 1]; // elem_type + continue; + } + + // multiline_string_literal: main_token + end_offset. + case AST_NODE_MULTILINE_STRING_LITERAL: + return nd.rhs + end_offset; + + // break/continue (Ast.zig:1275-1283). + case AST_NODE_BREAK: + case AST_NODE_CONTINUE: + if (nd.rhs != 0) { + n = nd.rhs; // optional rhs expression + } else if (nd.lhs != 0) { + return nd.lhs + end_offset; // label token + } else { + return tree->nodes.main_tokens[n] + end_offset; + } + continue; + + // array_init_one: end_offset += 1 (rbrace), recurse rhs + // (Ast.zig:1224-1230). + case AST_NODE_ARRAY_INIT_ONE: + end_offset += 1; + n = nd.rhs; + continue; + + case AST_NODE_ARRAY_INIT_ONE_COMMA: + end_offset += 2; // comma + rbrace + n = nd.rhs; + continue; + + // struct_init_dot_comma: SubRange pattern. + case AST_NODE_STRUCT_INIT_DOT_COMMA: + assert(nd.lhs != nd.rhs); + end_offset += 2; // comma + rbrace + n = tree->extra_data.arr[nd.rhs - 1]; + continue; + + // struct_init_comma: node_and_extra SubRange. + case AST_NODE_STRUCT_INIT_COMMA: { + uint32_t si = tree->extra_data.arr[nd.rhs]; + uint32_t se = tree->extra_data.arr[nd.rhs + 1]; + assert(si != se); + end_offset += 2; + n = tree->extra_data.arr[se - 1]; + continue; + } + + // array_init variants. + case AST_NODE_ARRAY_INIT: { + uint32_t si = tree->extra_data.arr[nd.rhs]; + uint32_t se = tree->extra_data.arr[nd.rhs + 1]; + assert(si != se); + end_offset += 1; + n = tree->extra_data.arr[se - 1]; + continue; + } + case AST_NODE_ARRAY_INIT_COMMA: { + uint32_t si = tree->extra_data.arr[nd.rhs]; + uint32_t se = tree->extra_data.arr[nd.rhs + 1]; + assert(si != se); + end_offset += 2; + n = tree->extra_data.arr[se - 1]; + continue; + } + + // array_init_dot variants. + case AST_NODE_ARRAY_INIT_DOT_TWO: + if (nd.rhs != 0) { + end_offset += 1; + n = nd.rhs; + } else if (nd.lhs != 0) { + end_offset += 1; + n = nd.lhs; + } else { + end_offset += 1; + return tree->nodes.main_tokens[n] + end_offset; + } + continue; + case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA: + end_offset += 2; + if (nd.rhs != 0) { + n = nd.rhs; + } else { + n = nd.lhs; + } + continue; + case AST_NODE_ARRAY_INIT_DOT: + assert(nd.lhs != nd.rhs); + end_offset += 1; + n = tree->extra_data.arr[nd.rhs - 1]; + continue; + case AST_NODE_ARRAY_INIT_DOT_COMMA: + assert(nd.lhs != nd.rhs); + end_offset += 2; + n = tree->extra_data.arr[nd.rhs - 1]; + continue; + + // builtin_call (Ast.zig:1083-1105). + case AST_NODE_BUILTIN_CALL: { + uint32_t si = tree->extra_data.arr[nd.rhs]; + uint32_t se = tree->extra_data.arr[nd.rhs + 1]; + assert(si != se); + end_offset += 1; + n = tree->extra_data.arr[se - 1]; + continue; + } + case AST_NODE_BUILTIN_CALL_COMMA: { + uint32_t si = tree->extra_data.arr[nd.rhs]; + uint32_t se = tree->extra_data.arr[nd.rhs + 1]; + assert(si != se); + end_offset += 2; + n = tree->extra_data.arr[se - 1]; + continue; + } + + // for (Ast.zig:1300-1303): complex extra data. + case AST_NODE_FOR: { + // lhs = span.start (extra_data index), + // rhs = packed(inputs:u31, has_else:u1 at bit 31). + // extra[lhs..] = input nodes, then_body, [else_body]. + uint32_t span_start = nd.lhs; + uint32_t for_packed = nd.rhs; + uint32_t inputs = for_packed & 0x7FFFFFFFu; + bool has_else = (for_packed >> 31) != 0; + uint32_t last_idx = span_start + inputs + (has_else ? 1 : 0); + n = tree->extra_data.arr[last_idx]; + continue; + } default: // Fallback: return main_token + end_offset. @@ -1985,13 +5738,60 @@ static uint32_t lastToken(const Ast* tree, uint32_t node) { } } +// --- addParam (AstGen.zig:12390) --- +// Creates a param instruction with pl_tok data and type body in extra. + +static uint32_t addParam(GenZir* gz, GenZir* param_gz, ZirInstTag tag, + uint32_t abs_tok_index, uint32_t name) { + AstGenCtx* ag = gz->astgen; + + uint32_t body_len = gzInstructionsLen(param_gz); + const uint32_t* param_body = gzInstructionsSlice(param_gz); + + // Param payload: name, type{body_len:u31|is_generic:u1} + ensureExtraCapacity(ag, 2 + body_len); + uint32_t payload_index = ag->extra_len; + ag->extra[ag->extra_len++] = name; + ag->extra[ag->extra_len++] = body_len & 0x7FFFFFFFu; // is_generic = false + for (uint32_t i = 0; i < body_len; i++) { + ag->extra[ag->extra_len++] = param_body[i]; + } + gzUnstack(param_gz); + + // Emit the param instruction. + ensureInstCapacity(ag, 1); + uint32_t idx = ag->inst_len; + ag->inst_tags[idx] = tag; + ZirInstData data; + data.pl_tok.src_tok = tokenIndexToRelative(gz, abs_tok_index); + data.pl_tok.payload_index = payload_index; + ag->inst_datas[idx] = data; + ag->inst_len++; + gzAppendInstruction(gz, idx); + return idx; +} + +// --- addDbgVar (AstGen.zig:13196) --- + +static void addDbgVar( + GenZir* gz, ZirInstTag tag, uint32_t name, uint32_t inst) { + if (gz->is_comptime) + return; + ZirInstData data; + data.str_op.str = name; + data.str_op.operand = inst; + addInstruction(gz, tag, data); +} + // --- addFunc (AstGen.zig:12023) --- -// Simplified: handles test functions (no cc, no varargs, no noalias, not -// fancy). +// Handles non-fancy func/func_inferred instructions. +// ret_body/ret_body_len: instructions for the return type sub-block (may be +// 0). ret_ref: if ret_body_len==0, the return type as a simple Ref. static uint32_t addFunc(GenZir* gz, uint32_t src_node, uint32_t block_node, - uint32_t param_block, uint32_t ret_ref, const uint32_t* body, - uint32_t body_len, uint32_t lbrace_line, uint32_t lbrace_column) { + uint32_t param_block, uint32_t ret_ref, const uint32_t* ret_body, + uint32_t ret_body_len, const uint32_t* body, uint32_t body_len, + uint32_t lbrace_line, uint32_t lbrace_column, bool is_inferred_error) { AstGenCtx* ag = gz->astgen; const Ast* tree = ag->tree; uint32_t rbrace_tok = lastToken(tree, block_node); @@ -2002,29 +5802,36 @@ static uint32_t addFunc(GenZir* gz, uint32_t src_node, uint32_t block_node, // Build Func payload (Zir.Inst.Func: ret_ty, param_block, body_len). // (AstGen.zig:12187-12194) - uint32_t ret_body_len; - if (ret_ref == ZIR_REF_NONE) { - ret_body_len = 0; // void return + uint32_t ret_ty_packed_len; + if (ret_body_len > 0) { + ret_ty_packed_len = ret_body_len; // body-based return type + } else if (ret_ref != ZIR_REF_NONE) { + ret_ty_packed_len = 1; // simple Ref } else { - ret_body_len = 1; // simple Ref + ret_ty_packed_len = 0; // void return } // Pack RetTy: body_len:u31 | is_generic:bool(u1) = just body_len. - uint32_t ret_ty_packed = ret_body_len & 0x7FFFFFFFu; // is_generic=false + uint32_t ret_ty_packed + = ret_ty_packed_len & 0x7FFFFFFFu; // is_generic=false - ensureExtraCapacity(ag, 3 + 1 + body_len + 7); + uint32_t fixup_body_len = countBodyLenAfterFixups(ag, body, body_len); + ensureExtraCapacity(ag, 3 + ret_ty_packed_len + fixup_body_len + 7); uint32_t payload_index = ag->extra_len; ag->extra[ag->extra_len++] = ret_ty_packed; // Func.ret_ty ag->extra[ag->extra_len++] = param_block; // Func.param_block - ag->extra[ag->extra_len++] = body_len; // Func.body_len + ag->extra[ag->extra_len++] = fixup_body_len; // Func.body_len - // Trailing ret_ty ref (if ret_body_len == 1). - if (ret_ref != ZIR_REF_NONE) { + // Trailing ret_ty: either body instructions or a single ref. + if (ret_body_len > 0) { + for (uint32_t i = 0; i < ret_body_len; i++) + ag->extra[ag->extra_len++] = ret_body[i]; + } else if (ret_ref != ZIR_REF_NONE) { ag->extra[ag->extra_len++] = ret_ref; } - // Body instructions. + // Body instructions (with ref_table fixups). for (uint32_t i = 0; i < body_len; i++) { - ag->extra[ag->extra_len++] = body[i]; + appendPossiblyRefdBodyInst(ag, body[i]); } // SrcLocs (AstGen.zig:12098-12106). @@ -2032,17 +5839,19 @@ static uint32_t addFunc(GenZir* gz, uint32_t src_node, uint32_t block_node, ag->extra[ag->extra_len++] = lbrace_line; ag->extra[ag->extra_len++] = rbrace_line; ag->extra[ag->extra_len++] = columns; - // proto_hash (4 words): zero for tests. + // proto_hash (4 words): zero for now. ag->extra[ag->extra_len++] = 0; ag->extra[ag->extra_len++] = 0; ag->extra[ag->extra_len++] = 0; ag->extra[ag->extra_len++] = 0; // Emit the func instruction (AstGen.zig:12220-12226). + ZirInstTag tag + = is_inferred_error ? ZIR_INST_FUNC_INFERRED : ZIR_INST_FUNC; ZirInstData data; data.pl_node.src_node = (int32_t)src_node - (int32_t)gz->decl_node_index; data.pl_node.payload_index = payload_index; - return addInstruction(gz, ZIR_INST_FUNC, data); + return addInstruction(gz, tag, data); } // --- testDecl (AstGen.zig:4708) --- @@ -2083,20 +5892,26 @@ static void testDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts, // Set up decl_block GenZir (AstGen.zig:4735-4743). GenZir decl_block; memset(&decl_block, 0, sizeof(decl_block)); + decl_block.base.tag = SCOPE_GEN_ZIR; + decl_block.parent = NULL; decl_block.astgen = ag; decl_block.decl_node_index = node; decl_block.decl_line = decl_line; decl_block.is_comptime = true; decl_block.instructions_top = ag->scratch_inst_len; + decl_block.break_block = UINT32_MAX; // Set up fn_block GenZir (AstGen.zig:4837-4845). GenZir fn_block; memset(&fn_block, 0, sizeof(fn_block)); + fn_block.base.tag = SCOPE_GEN_ZIR; + fn_block.parent = &decl_block.base; fn_block.astgen = ag; fn_block.decl_node_index = node; fn_block.decl_line = decl_line; fn_block.is_comptime = false; fn_block.instructions_top = ag->scratch_inst_len; + fn_block.break_block = UINT32_MAX; // Compute lbrace source location (AstGen.zig:4860-4862). advanceSourceCursorToNode(ag, body_node); @@ -2104,29 +5919,26 @@ static void testDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts, uint32_t lbrace_column = ag->source_column; // Process test body (AstGen.zig:4864). - fullBodyExpr(&fn_block, body_node); + fullBodyExpr(&fn_block, &fn_block.base, body_node); // If we hit unimplemented features, bail out. if (ag->has_compile_errors) return; - // Add restore_err_ret_index_unconditional (AstGen.zig:4868). - { + // Add restore_err_ret_index + ret_implicit (AstGen.zig:4865-4871). + if (!endsWithNoReturn(&fn_block)) { ZirInstData rdata; rdata.un_node.operand = ZIR_REF_NONE; // .none for .ret rdata.un_node.src_node = (int32_t)node - (int32_t)fn_block.decl_node_index; addInstruction( &fn_block, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata); - } - // Add ret_implicit (AstGen.zig:4871). - { uint32_t body_last_tok = lastToken(tree, body_node); - ZirInstData rdata; - rdata.un_tok.operand = ZIR_REF_VOID_VALUE; - rdata.un_tok.src_tok = tokenIndexToRelative(&fn_block, body_last_tok); - addInstruction(&fn_block, ZIR_INST_RET_IMPLICIT, rdata); + ZirInstData rdata2; + rdata2.un_tok.operand = ZIR_REF_VOID_VALUE; + rdata2.un_tok.src_tok = tokenIndexToRelative(&fn_block, body_last_tok); + addInstruction(&fn_block, ZIR_INST_RET_IMPLICIT, rdata2); } // Read fn_block body before unstacking (AstGen.zig:4874). @@ -2139,8 +5951,8 @@ static void testDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts, // Create func instruction (AstGen.zig:4874-4897). uint32_t func_ref = addFunc(&decl_block, node, body_node, decl_inst, - ZIR_REF_ANYERROR_VOID_ERROR_UNION_TYPE, fn_body, fn_body_len, - lbrace_line, lbrace_column); + ZIR_REF_ANYERROR_VOID_ERROR_UNION_TYPE, NULL, 0, fn_body, fn_body_len, + lbrace_line, lbrace_column, false); // break_inline returning func to declaration (AstGen.zig:4899). makeBreakInline(&decl_block, decl_inst, func_ref, AST_NODE_OFFSET_NONE); @@ -2153,8 +5965,8 @@ static void testDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts, (void)gz; } -// --- fnDecl (AstGen.zig:4067) --- -// Simplified: handles non-extern function declarations with bodies. +// --- fnDecl (AstGen.zig:4067) / fnDeclInner (AstGen.zig:4228) --- +// Handles non-extern function declarations with bodies, including params. static void fnDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts, uint32_t* decl_idx, uint32_t node) { @@ -2187,27 +5999,20 @@ static void fnDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts, uint32_t decl_line = ag->source_line; uint32_t decl_column = ag->source_column; - // Determine return type (AstGen.zig:4133-4135). - // For fn_proto_simple: return_type is in data. - // Simplified: detect !void vs void from source. + // Save source cursor for restoring after ret_gz (AstGen.zig:4387-4388). + uint32_t saved_source_offset = ag->source_offset; + uint32_t saved_source_line = ag->source_line; + uint32_t saved_source_column = ag->source_column; + AstNodeTag proto_tag = tree->nodes.tags[proto_node]; - bool is_inferred_error = false; - - // Look for the return type node. - // For fn_proto_simple: data.lhs = param (optional), data.rhs = - // return_type. For fn_proto_one: data = {extra, return_type}. Simplified: - // check if return type token starts with '!'. AstData proto_data = tree->nodes.datas[proto_node]; - uint32_t return_type_node = 0; - if (proto_tag == AST_NODE_FN_PROTO_SIMPLE) { - return_type_node = proto_data.rhs; - } else if (proto_tag == AST_NODE_FN_PROTO_ONE) { - return_type_node = proto_data.rhs; - } else if (proto_tag == AST_NODE_FN_PROTO_MULTI - || proto_tag == AST_NODE_FN_PROTO) { - return_type_node = proto_data.rhs; - } + // Extract return type node (rhs for all fn_proto variants). + uint32_t return_type_node = proto_data.rhs; + + // Detect inferred error set: token before return type is '!' + // (AstGen.zig:4249-4251). + bool is_inferred_error = false; if (return_type_node != 0) { uint32_t ret_first_tok = firstToken(tree, return_type_node); if (ret_first_tok > 0) { @@ -2218,81 +6023,305 @@ static void fnDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts, } } - // value_gz for fnDeclInner (AstGen.zig:4194-4201). - GenZir value_gz; - memset(&value_gz, 0, sizeof(value_gz)); - value_gz.astgen = ag; - value_gz.decl_node_index = proto_node; - value_gz.decl_line = decl_line; - value_gz.is_comptime = true; - value_gz.instructions_top = ag->scratch_inst_len; + // Extract param type nodes from proto variant (AstGen.zig:4253-4254). + uint32_t param_nodes_buf[1]; // buffer for fn_proto_simple/fn_proto_one + const uint32_t* param_nodes = NULL; + uint32_t params_len = 0; - // fnDeclInner creates the func instruction. - // Simplified: creates fn_block, processes body, adds func instruction. - GenZir fn_block; - memset(&fn_block, 0, sizeof(fn_block)); - fn_block.astgen = ag; - fn_block.decl_node_index = proto_node; - fn_block.decl_line = decl_line; - fn_block.is_comptime = false; - fn_block.instructions_top = ag->scratch_inst_len; + if (proto_tag == AST_NODE_FN_PROTO_SIMPLE) { + // data.lhs = optional param node, data.rhs = return type. + if (proto_data.lhs != 0) { + param_nodes_buf[0] = proto_data.lhs; + param_nodes = param_nodes_buf; + params_len = 1; + } + } else if (proto_tag == AST_NODE_FN_PROTO_ONE) { + // data.lhs = extra_data index → AstFnProtoOne. + uint32_t extra_idx = proto_data.lhs; + uint32_t param + = tree->extra_data.arr[extra_idx]; // AstFnProtoOne.param + if (param != 0) { + param_nodes_buf[0] = param; + param_nodes = param_nodes_buf; + params_len = 1; + } + } else if (proto_tag == AST_NODE_FN_PROTO_MULTI) { + // data.lhs = extra_data index → SubRange{start, end}. + uint32_t extra_idx = proto_data.lhs; + uint32_t range_start = tree->extra_data.arr[extra_idx]; + uint32_t range_end = tree->extra_data.arr[extra_idx + 1]; + param_nodes = tree->extra_data.arr + range_start; + params_len = range_end - range_start; + } else if (proto_tag == AST_NODE_FN_PROTO) { + // data.lhs = extra_data index → AstFnProto{params_start, params_end, + // ...}. + uint32_t extra_idx = proto_data.lhs; + uint32_t pstart = tree->extra_data.arr[extra_idx]; // params_start + uint32_t pend = tree->extra_data.arr[extra_idx + 1]; // params_end + param_nodes = tree->extra_data.arr + pstart; + params_len = pend - pstart; + } - // Process function body (AstGen.zig:4358). + // decl_gz (called value_gz in caller, decl_gz in fnDeclInner) + // (AstGen.zig:4194-4201). + GenZir decl_gz; + memset(&decl_gz, 0, sizeof(decl_gz)); + decl_gz.base.tag = SCOPE_GEN_ZIR; + decl_gz.parent = NULL; + decl_gz.astgen = ag; + decl_gz.decl_node_index = proto_node; + decl_gz.decl_line = decl_line; + decl_gz.is_comptime = true; + decl_gz.instructions_top = ag->scratch_inst_len; + decl_gz.break_block = UINT32_MAX; + + // --- Parameter iteration (AstGen.zig:4260-4363) --- + // Walk params, creating param instructions and ScopeLocalVal entries. + // We keep param scopes on the C stack (max 32 params like upstream). + Scope* params_scope = &decl_gz.base; + ScopeLocalVal param_scopes[32]; + uint32_t param_scope_count = 0; + + for (uint32_t param_i = 0; param_i < params_len; param_i++) { + uint32_t param_type_node = param_nodes[param_i]; + + // Find param name token by scanning backwards from firstToken of + // type expression (mirrors FnProto.Iterator.next, Ast.zig:2687). + // Layout: [comptime] [name] [:] type_expr + // So: type_first_tok - 1 is ':', type_first_tok - 2 is name. + uint32_t type_first_tok = firstToken(tree, param_type_node); + uint32_t name_token = 0; // 0 = no name found + bool is_comptime_param = false; + if (type_first_tok >= 2 + && tree->tokens.tags[type_first_tok - 1] == TOKEN_COLON) { + // Named parameter: name is at type_first_tok - 2. + uint32_t maybe_name = type_first_tok - 2; + uint32_t name_start = tree->tokens.starts[maybe_name]; + char ch = tree->source[name_start]; + if ((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') + || ch == '_' || ch == '@') { + // Could be name or comptime/noalias keyword. + if (name_start + 8 <= tree->source_len + && memcmp(tree->source + name_start, "comptime", 8) == 0) { + is_comptime_param = true; + } else if (name_start + 7 <= tree->source_len + && memcmp(tree->source + name_start, "noalias", 7) == 0) { + // noalias keyword, not a name. + } else { + name_token = maybe_name; + // Check for preceding comptime keyword. + if (maybe_name > 0) { + uint32_t prev = maybe_name - 1; + uint32_t prev_start = tree->tokens.starts[prev]; + if (prev_start + 8 <= tree->source_len + && memcmp(tree->source + prev_start, "comptime", 8) + == 0) + is_comptime_param = true; + } + } + } + } + + // Evaluate param type expression in a sub-block + // (AstGen.zig:4333-4337). + GenZir param_gz = makeSubBlock(&decl_gz, params_scope); + uint32_t param_type_ref + = expr(¶m_gz, params_scope, param_type_node); + + if (ag->has_compile_errors) + return; + + // The break_inline target is the param instruction we're about to + // create (AstGen.zig:4336-4337). + uint32_t param_inst_expected = ag->inst_len + 1; + // +1 because: the break_inline is emitted first (uses inst_len), + // then addParam emits the param instruction at inst_len. + // Actually, addParam emits the param after break_inline. The + // break_inline's block_inst field should point to the param inst. + // We know it will be at ag->inst_len after the break_inline. + makeBreakInline(¶m_gz, param_inst_expected, param_type_ref, + (int32_t)param_type_node - (int32_t)param_gz.decl_node_index); + + // Determine param name string. + uint32_t param_name_str = 0; // NullTerminatedString.empty + if (name_token != 0) { + uint32_t name_start = tree->tokens.starts[name_token]; + char nch = tree->source[name_start]; + // Skip "_" params (AstGen.zig:4285-4286). + if (nch == '_') { + uint32_t next_start = tree->tokens.starts[name_token + 1]; + if (next_start == name_start + 1) { + // Single underscore: empty name. + param_name_str = 0; + } else { + param_name_str = identAsString(ag, name_token); + } + } else { + param_name_str = identAsString(ag, name_token); + } + } + + // Create param instruction (AstGen.zig:4341-4343). + ZirInstTag param_tag + = is_comptime_param ? ZIR_INST_PARAM_COMPTIME : ZIR_INST_PARAM; + uint32_t name_tok_for_src = name_token != 0 + ? name_token + : tree->nodes.main_tokens[param_type_node]; + uint32_t param_inst = addParam( + &decl_gz, ¶m_gz, param_tag, name_tok_for_src, param_name_str); + (void)param_inst_expected; + + // Create ScopeLocalVal for this param (AstGen.zig:4349-4359). + if (param_name_str != 0 && param_scope_count < 32) { + ScopeLocalVal* lv = ¶m_scopes[param_scope_count++]; + lv->base.tag = SCOPE_LOCAL_VAL; + lv->parent = params_scope; + lv->gen_zir = &decl_gz; + lv->inst = param_inst + ZIR_REF_START_INDEX; // toRef() + lv->token_src = name_token; + lv->name = param_name_str; + params_scope = &lv->base; + } + } + + // --- Return type (AstGen.zig:4369-4383) --- + GenZir ret_gz = makeSubBlock(&decl_gz, params_scope); + uint32_t ret_ref = ZIR_REF_NONE; + if (return_type_node != 0) { + ret_ref = expr(&ret_gz, params_scope, return_type_node); + if (ag->has_compile_errors) + return; + // If ret_gz produced instructions, add break_inline + // (AstGen.zig:4377-4381). + if (gzInstructionsLen(&ret_gz) > 0) { + // break_inline targets the func instruction (which doesn't + // exist yet). We use 0 as placeholder and patch later. + makeBreakInline(&ret_gz, 0, ret_ref, 0); + } + } + // Map void_type → .none (AstGen.zig:12054). + if (ret_ref == ZIR_REF_VOID_TYPE) + ret_ref = ZIR_REF_NONE; + + uint32_t ret_body_len = gzInstructionsLen(&ret_gz); + // Copy ret_body before unstacking: body_gz reuses the same scratch area. + uint32_t* ret_body = NULL; + if (ret_body_len > 0) { + ret_body = malloc(ret_body_len * sizeof(uint32_t)); + if (!ret_body) + abort(); + memcpy(ret_body, gzInstructionsSlice(&ret_gz), + ret_body_len * sizeof(uint32_t)); + } + gzUnstack(&ret_gz); + + // Restore source cursor (AstGen.zig:4387-4388). + ag->source_offset = saved_source_offset; + ag->source_line = saved_source_line; + ag->source_column = saved_source_column; + + // --- Body (AstGen.zig:4415-4424) --- + GenZir body_gz; + memset(&body_gz, 0, sizeof(body_gz)); + body_gz.base.tag = SCOPE_GEN_ZIR; + body_gz.parent = params_scope; + body_gz.astgen = ag; + body_gz.decl_node_index = proto_node; + body_gz.decl_line = decl_line; + body_gz.is_comptime = false; + body_gz.instructions_top = ag->scratch_inst_len; + + // Set fn_ret_ty for the body (AstGen.zig:4449-4455). + uint32_t prev_fn_ret_ty = ag->fn_ret_ty; + if (is_inferred_error || ret_ref == ZIR_REF_NONE) { + // Non-void non-trivial return type: emit ret_type instruction. + if (ret_body_len > 0 || is_inferred_error) { + ZirInstData rtdata; + memset(&rtdata, 0, sizeof(rtdata)); + rtdata.node = (int32_t)node - (int32_t)body_gz.decl_node_index; + ag->fn_ret_ty + = addInstruction(&body_gz, ZIR_INST_RET_TYPE, rtdata); + } else { + ag->fn_ret_ty = ret_ref; // void + } + } else { + // ret_ref is a simple ref (not void, not inferred error). + // Still need ret_type instruction if it resolved to an inst. + if (ret_ref >= ZIR_REF_START_INDEX) { + ZirInstData rtdata; + memset(&rtdata, 0, sizeof(rtdata)); + rtdata.node = (int32_t)node - (int32_t)body_gz.decl_node_index; + ag->fn_ret_ty + = addInstruction(&body_gz, ZIR_INST_RET_TYPE, rtdata); + } else { + ag->fn_ret_ty = ret_ref; + } + } + + // Process function body (AstGen.zig:4461-4465). advanceSourceCursorToNode(ag, body_node); uint32_t lbrace_line = ag->source_line - decl_line; uint32_t lbrace_column = ag->source_column; - fullBodyExpr(&fn_block, body_node); + fullBodyExpr(&body_gz, params_scope, body_node); - if (ag->has_compile_errors) + ag->fn_ret_ty = prev_fn_ret_ty; + + if (ag->has_compile_errors) { + free(ret_body); return; + } - // Add implicit return at end of function body. - // restore_err_ret_index is always added (AstGen.zig:4365-4368). - { + // Add implicit return at end of function body + // (AstGen.zig:4465-4871). + if (!endsWithNoReturn(&body_gz)) { ZirInstData rdata; rdata.un_node.operand = ZIR_REF_NONE; rdata.un_node.src_node - = (int32_t)node - (int32_t)fn_block.decl_node_index; + = (int32_t)node - (int32_t)body_gz.decl_node_index; addInstruction( - &fn_block, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata); - } - { + &body_gz, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata); + uint32_t body_last_tok = lastToken(tree, body_node); - ZirInstData rdata; - rdata.un_tok.operand = ZIR_REF_VOID_VALUE; - rdata.un_tok.src_tok = tokenIndexToRelative(&fn_block, body_last_tok); - addInstruction(&fn_block, ZIR_INST_RET_IMPLICIT, rdata); + ZirInstData rdata2; + rdata2.un_tok.operand = ZIR_REF_VOID_VALUE; + rdata2.un_tok.src_tok = tokenIndexToRelative(&body_gz, body_last_tok); + addInstruction(&body_gz, ZIR_INST_RET_IMPLICIT, rdata2); } - // Create func instruction (AstGen.zig:4396). - uint32_t func_ref; - // Read fn_block body before unstacking (upstream unstacks inside addFunc). - const uint32_t* fn_body = gzInstructionsSlice(&fn_block); - uint32_t fn_body_len = gzInstructionsLen(&fn_block); - gzUnstack(&fn_block); + // Read body before unstacking (AstGen.zig:12215-12218). + const uint32_t* fn_body = gzInstructionsSlice(&body_gz); + uint32_t fn_body_len = gzInstructionsLen(&body_gz); + gzUnstack(&body_gz); - if (is_inferred_error) { - func_ref = addFunc(&value_gz, node, body_node, decl_inst, ZIR_REF_NONE, - fn_body, fn_body_len, lbrace_line, lbrace_column); - // Patch the tag to func_inferred. - ag->inst_tags[func_ref - ZIR_REF_START_INDEX] = ZIR_INST_FUNC_INFERRED; - } else { - // void return: ret_ref = .none means void. - func_ref = addFunc(&value_gz, node, body_node, decl_inst, ZIR_REF_NONE, - fn_body, fn_body_len, lbrace_line, lbrace_column); + // Create func instruction (AstGen.zig:4476-4494). + uint32_t func_ref = addFunc(&decl_gz, node, body_node, decl_inst, ret_ref, + ret_body, ret_body_len, fn_body, fn_body_len, lbrace_line, + lbrace_column, is_inferred_error); + + // Patch ret_body break_inline to point to func instruction + // (AstGen.zig:12199-12202). + if (ret_body_len > 0) { + uint32_t break_inst = ret_body[ret_body_len - 1]; + // The break_inline payload is at payload_index; block_inst is at + // offset 1 in the Break struct. + uint32_t break_payload + = ag->inst_datas[break_inst].break_data.payload_index; + ag->extra[break_payload + 1] = func_ref - ZIR_REF_START_INDEX; } + free(ret_body); - // break_inline returning func to declaration. - makeBreakInline(&value_gz, decl_inst, func_ref, AST_NODE_OFFSET_NONE); + // break_inline returning func to declaration + // (AstGen.zig:4495). + makeBreakInline(&decl_gz, decl_inst, func_ref, AST_NODE_OFFSET_NONE); // setDeclaration (AstGen.zig:4208-4225). DeclFlagsId decl_id = is_pub ? DECL_ID_PUB_CONST_SIMPLE : DECL_ID_CONST_SIMPLE; uint32_t name_str = identAsString(ag, fn_name_token); setDeclaration(ag, decl_inst, decl_line, decl_column, decl_id, name_str, - gzInstructionsSlice(&value_gz), gzInstructionsLen(&value_gz)); - gzUnstack(&value_gz); + gzInstructionsSlice(&decl_gz), gzInstructionsLen(&decl_gz)); + gzUnstack(&decl_gz); (void)gz; } @@ -2314,6 +6343,8 @@ static void comptimeDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts, // Value sub-block (AstGen.zig:4675-4686). GenZir value_gz; memset(&value_gz, 0, sizeof(value_gz)); + value_gz.base.tag = SCOPE_GEN_ZIR; + value_gz.parent = NULL; value_gz.astgen = ag; value_gz.decl_node_index = node; value_gz.decl_line = decl_line; @@ -2351,6 +6382,8 @@ static void globalVarDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts, // Set up init sub-block (AstGen.zig:4610). GenZir init_gz; memset(&init_gz, 0, sizeof(init_gz)); + init_gz.base.tag = SCOPE_GEN_ZIR; + init_gz.parent = NULL; init_gz.astgen = ag; init_gz.decl_node_index = node; init_gz.instructions_top = ag->scratch_inst_len; @@ -2364,10 +6397,10 @@ static void globalVarDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts, uint32_t init_ref; if (init_node != UINT32_MAX) { - init_ref = expr(&init_gz, init_node); + init_ref = expr(&init_gz, &init_gz.base, init_node); } else { // extern variable: no init. Not handled yet. - ag->has_compile_errors = true; + SET_ERROR(ag); init_ref = ZIR_REF_VOID_VALUE; } @@ -2386,9 +6419,69 @@ static void globalVarDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts, (void)gz; } +// --- containerDecl (AstGen.zig:5468) --- +// Handles container declarations as expressions (struct{}, enum{}, etc.). + +static uint32_t containerDecl(GenZir* gz, Scope* scope, uint32_t node) { + AstGenCtx* ag = gz->astgen; + const Ast* tree = ag->tree; + AstNodeTag tag = tree->nodes.tags[node]; + AstData nd = tree->nodes.datas[node]; + + // Extract members based on node type (Ast.zig:2459-2470). + uint32_t members_buf[2]; + const uint32_t* members; + uint32_t members_len; + + switch (tag) { + case AST_NODE_CONTAINER_DECL_TWO: + case AST_NODE_CONTAINER_DECL_TWO_TRAILING: + case AST_NODE_TAGGED_UNION_TWO: + case AST_NODE_TAGGED_UNION_TWO_TRAILING: { + // lhs and rhs are optional member nodes (0 = none). + members_len = 0; + if (nd.lhs != 0) + members_buf[members_len++] = nd.lhs; + if (nd.rhs != 0) + members_buf[members_len++] = nd.rhs; + members = members_buf; + break; + } + case AST_NODE_CONTAINER_DECL: + case AST_NODE_CONTAINER_DECL_TRAILING: + case AST_NODE_TAGGED_UNION: + case AST_NODE_TAGGED_UNION_TRAILING: { + // extra_data[lhs..rhs] contains members. + members = tree->extra_data.arr + nd.lhs; + members_len = nd.rhs - nd.lhs; + break; + } + case AST_NODE_CONTAINER_DECL_ARG: + case AST_NODE_CONTAINER_DECL_ARG_TRAILING: + case AST_NODE_TAGGED_UNION_ENUM_TAG: + case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING: { + // lhs is arg node, rhs is extra index → SubRange(start, end). + uint32_t start = tree->extra_data.arr[nd.rhs]; + uint32_t end = tree->extra_data.arr[nd.rhs + 1]; + members = tree->extra_data.arr + start; + members_len = end - start; + break; + } + default: + SET_ERROR(ag); + return ZIR_REF_VOID_VALUE; + } + + // For now, only handle struct containers (AstGen.zig:5481-5496). + // TODO: handle union/enum/opaque. + uint32_t decl_inst = structDeclInner(ag, gz, node, members, members_len); + (void)scope; + return decl_inst + ZIR_REF_START_INDEX; +} + // --- structDeclInner (AstGen.zig:4926) --- -static void structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node, +static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node, const uint32_t* members, uint32_t members_len) { uint32_t decl_inst = reserveInstructionIndex(ag); @@ -2397,7 +6490,7 @@ static void structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node, StructDeclSmall small; memset(&small, 0, sizeof(small)); setStruct(ag, decl_inst, node, small, 0, 0, 0); - return; + return decl_inst; } // Non-empty container (AstGen.zig:4973-5189). @@ -2432,11 +6525,19 @@ static void structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node, case AST_NODE_FN_DECL: fnDecl(ag, gz, wip_decl_insts, &decl_idx, member_node); break; - // TODO: AST_NODE_GLOBAL_VAR_DECL, AST_NODE_LOCAL_VAR_DECL, - // AST_NODE_ALIGNED_VAR_DECL, - // AST_NODE_FN_PROTO_*, container fields + case AST_NODE_USINGNAMESPACE: + case AST_NODE_GLOBAL_VAR_DECL: + case AST_NODE_LOCAL_VAR_DECL: + case AST_NODE_ALIGNED_VAR_DECL: + globalVarDecl(ag, gz, wip_decl_insts, &decl_idx, member_node); + break; + case AST_NODE_CONTAINER_FIELD_INIT: + case AST_NODE_CONTAINER_FIELD_ALIGN: + case AST_NODE_CONTAINER_FIELD: + // Struct fields — skip for now (counted but not emitted). + break; default: - ag->has_compile_errors = true; + SET_ERROR(ag); break; } } @@ -2454,6 +6555,1179 @@ static void structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node, } free(wip_decl_insts); + return decl_inst; +} + +// --- AstRlAnnotate (AstRlAnnotate.zig) --- +// Pre-pass to determine which AST nodes need result locations. + +typedef struct { + bool have_type; + bool have_ptr; +} RlResultInfo; + +#define RL_RI_NONE ((RlResultInfo) { false, false }) +#define RL_RI_TYPED_PTR ((RlResultInfo) { true, true }) +#define RL_RI_INFERRED_PTR ((RlResultInfo) { false, true }) +#define RL_RI_TYPE_ONLY ((RlResultInfo) { true, false }) + +// Block for label tracking (AstRlAnnotate.zig:56-62). +typedef struct RlBlock { + struct RlBlock* parent; + uint32_t label_token; // UINT32_MAX = no label + bool is_loop; + RlResultInfo ri; + bool consumes_res_ptr; +} RlBlock; + +static void nodesNeedRlAdd(AstGenCtx* ag, uint32_t node) { + if (ag->nodes_need_rl_len >= ag->nodes_need_rl_cap) { + uint32_t new_cap + = ag->nodes_need_rl_cap == 0 ? 16 : ag->nodes_need_rl_cap * 2; + ag->nodes_need_rl + = realloc(ag->nodes_need_rl, new_cap * sizeof(uint32_t)); + ag->nodes_need_rl_cap = new_cap; + } + ag->nodes_need_rl[ag->nodes_need_rl_len++] = node; +} + +static bool nodesNeedRlContains(const AstGenCtx* ag, uint32_t node) { + for (uint32_t i = 0; i < ag->nodes_need_rl_len; i++) { + if (ag->nodes_need_rl[i] == node) + return true; + } + return false; +} + +// Compare two identifier tokens by their source text. +static bool rlTokenIdentEqual( + const Ast* tree, uint32_t tok_a, uint32_t tok_b) { + const char* src = tree->source; + uint32_t a_start = tree->tokens.starts[tok_a]; + uint32_t b_start = tree->tokens.starts[tok_b]; + for (uint32_t i = 0;; i++) { + char ca = src[a_start + i]; + char cb = src[b_start + i]; + bool a_id = (ca >= 'a' && ca <= 'z') || (ca >= 'A' && ca <= 'Z') + || (ca >= '0' && ca <= '9') || ca == '_'; + bool b_id = (cb >= 'a' && cb <= 'z') || (cb >= 'A' && cb <= 'Z') + || (cb >= '0' && cb <= '9') || cb == '_'; + if (!a_id && !b_id) + return true; + if (!a_id || !b_id) + return false; + if (ca != cb) + return false; + } +} + +// Forward declarations. +static bool rlExpr( + AstGenCtx* ag, uint32_t node, RlBlock* block, RlResultInfo ri); +static void rlContainerDecl(AstGenCtx* ag, RlBlock* block, uint32_t node); +static bool rlBlockExpr(AstGenCtx* ag, RlBlock* parent_block, RlResultInfo ri, + uint32_t node, const uint32_t* stmts, uint32_t count); +static bool rlBuiltinCall(AstGenCtx* ag, RlBlock* block, uint32_t node, + const uint32_t* args, uint32_t nargs); + +// containerDecl (AstRlAnnotate.zig:89-127). +static void rlContainerDecl(AstGenCtx* ag, RlBlock* block, uint32_t node) { + const Ast* tree = ag->tree; + AstNodeTag tag = tree->nodes.tags[node]; + AstData nd = tree->nodes.datas[node]; + + // Extract arg and members depending on variant. + // All container decls: recurse arg with type_only, members with none. + // (The keyword type — struct/union/enum/opaque — doesn't matter for RL.) + uint32_t member_buf[2]; + const uint32_t* members = NULL; + uint32_t members_len = 0; + uint32_t arg_node = 0; // 0 = no arg + + switch (tag) { + case AST_NODE_CONTAINER_DECL_TWO: + case AST_NODE_CONTAINER_DECL_TWO_TRAILING: + case AST_NODE_TAGGED_UNION_TWO: + case AST_NODE_TAGGED_UNION_TWO_TRAILING: { + uint32_t idx = 0; + if (nd.lhs != 0) + member_buf[idx++] = nd.lhs; + if (nd.rhs != 0) + member_buf[idx++] = nd.rhs; + members = member_buf; + members_len = idx; + break; + } + case AST_NODE_CONTAINER_DECL: + case AST_NODE_CONTAINER_DECL_TRAILING: + case AST_NODE_TAGGED_UNION: + case AST_NODE_TAGGED_UNION_TRAILING: + members = tree->extra_data.arr + nd.lhs; + members_len = nd.rhs - nd.lhs; + break; + case AST_NODE_CONTAINER_DECL_ARG: + case AST_NODE_CONTAINER_DECL_ARG_TRAILING: + case AST_NODE_TAGGED_UNION_ENUM_TAG: + case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING: { + arg_node = nd.lhs; + uint32_t extra_idx = nd.rhs; + uint32_t start = tree->extra_data.arr[extra_idx]; + uint32_t end = tree->extra_data.arr[extra_idx + 1]; + members = tree->extra_data.arr + start; + members_len = end - start; + break; + } + default: + return; + } + + if (arg_node != 0) + (void)rlExpr(ag, arg_node, block, RL_RI_TYPE_ONLY); + for (uint32_t i = 0; i < members_len; i++) + (void)rlExpr(ag, members[i], block, RL_RI_NONE); +} + +// blockExpr (AstRlAnnotate.zig:787-814). +static bool rlBlockExpr(AstGenCtx* ag, RlBlock* parent_block, RlResultInfo ri, + uint32_t node, const uint32_t* stmts, uint32_t count) { + const Ast* tree = ag->tree; + uint32_t lbrace = tree->nodes.main_tokens[node]; + bool is_labeled + = (lbrace >= 2 && tree->tokens.tags[lbrace - 1] == TOKEN_COLON + && tree->tokens.tags[lbrace - 2] == TOKEN_IDENTIFIER); + + if (is_labeled) { + RlBlock new_block; + new_block.parent = parent_block; + new_block.label_token = lbrace - 2; + new_block.is_loop = false; + new_block.ri = ri; + new_block.consumes_res_ptr = false; + for (uint32_t i = 0; i < count; i++) + (void)rlExpr(ag, stmts[i], &new_block, RL_RI_NONE); + if (new_block.consumes_res_ptr) + nodesNeedRlAdd(ag, node); + return new_block.consumes_res_ptr; + } else { + for (uint32_t i = 0; i < count; i++) + (void)rlExpr(ag, stmts[i], parent_block, RL_RI_NONE); + return false; + } +} + +// builtinCall (AstRlAnnotate.zig:816-1100). +// Simplified: no builtin currently consumes its result location, +// so we just recurse into all args with RL_RI_NONE. +static bool rlBuiltinCall(AstGenCtx* ag, RlBlock* block, uint32_t node, + const uint32_t* args, uint32_t nargs) { + (void)node; + for (uint32_t i = 0; i < nargs; i++) + (void)rlExpr(ag, args[i], block, RL_RI_NONE); + return false; +} + +// expr (AstRlAnnotate.zig:130-771). +static bool rlExpr( + AstGenCtx* ag, uint32_t node, RlBlock* block, RlResultInfo ri) { + const Ast* tree = ag->tree; + AstNodeTag tag = tree->nodes.tags[node]; + AstData nd = tree->nodes.datas[node]; + + switch (tag) { + // Unreachable nodes (AstRlAnnotate.zig:133-142). + case AST_NODE_ROOT: + case AST_NODE_SWITCH_CASE_ONE: + case AST_NODE_SWITCH_CASE_INLINE_ONE: + case AST_NODE_SWITCH_CASE: + case AST_NODE_SWITCH_CASE_INLINE: + case AST_NODE_SWITCH_RANGE: + case AST_NODE_FOR_RANGE: + case AST_NODE_ASM_OUTPUT: + case AST_NODE_ASM_INPUT: + return false; // unreachable in upstream + + // errdefer (AstRlAnnotate.zig:144-147). + case AST_NODE_ERRDEFER: + (void)rlExpr(ag, nd.rhs, block, RL_RI_NONE); + return false; + + // defer (AstRlAnnotate.zig:148-151). + case AST_NODE_DEFER: + (void)rlExpr(ag, nd.rhs, block, RL_RI_NONE); + return false; + + // container_field (AstRlAnnotate.zig:153-167). + case AST_NODE_CONTAINER_FIELD_INIT: { + // lhs = type_expr, rhs = value_expr + if (nd.lhs != 0) + (void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY); + if (nd.rhs != 0) + (void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY); + return false; + } + case AST_NODE_CONTAINER_FIELD_ALIGN: { + // lhs = type_expr, rhs = align_expr + if (nd.lhs != 0) + (void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY); + if (nd.rhs != 0) + (void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY); + return false; + } + case AST_NODE_CONTAINER_FIELD: { + // lhs = type_expr, rhs = extra index to {align_expr, value_expr} + if (nd.lhs != 0) + (void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY); + if (nd.rhs != 0) { + uint32_t align_node = tree->extra_data.arr[nd.rhs]; + uint32_t value_node = tree->extra_data.arr[nd.rhs + 1]; + if (align_node != 0) + (void)rlExpr(ag, align_node, block, RL_RI_TYPE_ONLY); + if (value_node != 0) + (void)rlExpr(ag, value_node, block, RL_RI_TYPE_ONLY); + } + return false; + } + + // test_decl (AstRlAnnotate.zig:168-171). + case AST_NODE_TEST_DECL: + (void)rlExpr(ag, nd.rhs, block, RL_RI_NONE); + return false; + + // var_decl (AstRlAnnotate.zig:172-202). + case AST_NODE_GLOBAL_VAR_DECL: + case AST_NODE_LOCAL_VAR_DECL: + case AST_NODE_SIMPLE_VAR_DECL: + case AST_NODE_ALIGNED_VAR_DECL: { + uint32_t type_node = 0; + uint32_t init_node = 0; + uint32_t mut_token = tree->nodes.main_tokens[node]; + if (tag == AST_NODE_SIMPLE_VAR_DECL) { + type_node = nd.lhs; + init_node = nd.rhs; + } else if (tag == AST_NODE_LOCAL_VAR_DECL + || tag == AST_NODE_GLOBAL_VAR_DECL) { + type_node = tree->extra_data.arr[nd.lhs]; + init_node = nd.rhs; + } else { // ALIGNED_VAR_DECL + init_node = nd.rhs; + } + RlResultInfo init_ri; + if (type_node != 0) { + (void)rlExpr(ag, type_node, block, RL_RI_TYPE_ONLY); + init_ri = RL_RI_TYPED_PTR; + } else { + init_ri = RL_RI_INFERRED_PTR; + } + if (init_node == 0) + return false; + bool is_const = (tree->source[tree->tokens.starts[mut_token]] == 'c'); + if (is_const) { + bool init_consumes_rl = rlExpr(ag, init_node, block, init_ri); + if (init_consumes_rl) + nodesNeedRlAdd(ag, node); + return false; + } else { + (void)rlExpr(ag, init_node, block, init_ri); + return false; + } + } + + // assign (AstRlAnnotate.zig:212-217). + case AST_NODE_ASSIGN: + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + (void)rlExpr(ag, nd.rhs, block, RL_RI_TYPED_PTR); + return false; + + // compound assign (AstRlAnnotate.zig:218-240). + case AST_NODE_ASSIGN_SHL: + case AST_NODE_ASSIGN_SHL_SAT: + case AST_NODE_ASSIGN_SHR: + case AST_NODE_ASSIGN_BIT_AND: + case AST_NODE_ASSIGN_BIT_OR: + case AST_NODE_ASSIGN_BIT_XOR: + case AST_NODE_ASSIGN_DIV: + case AST_NODE_ASSIGN_SUB: + case AST_NODE_ASSIGN_SUB_WRAP: + case AST_NODE_ASSIGN_SUB_SAT: + case AST_NODE_ASSIGN_MOD: + case AST_NODE_ASSIGN_ADD: + case AST_NODE_ASSIGN_ADD_WRAP: + case AST_NODE_ASSIGN_ADD_SAT: + case AST_NODE_ASSIGN_MUL: + case AST_NODE_ASSIGN_MUL_WRAP: + case AST_NODE_ASSIGN_MUL_SAT: + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + (void)rlExpr(ag, nd.rhs, block, RL_RI_NONE); + return false; + + // shl/shr (AstRlAnnotate.zig:241-246). + case AST_NODE_SHL: + case AST_NODE_SHR: + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + (void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY); + return false; + + // binary arithmetic/comparison (AstRlAnnotate.zig:247-274). + case AST_NODE_ADD: + case AST_NODE_ADD_WRAP: + case AST_NODE_ADD_SAT: + case AST_NODE_SUB: + case AST_NODE_SUB_WRAP: + case AST_NODE_SUB_SAT: + case AST_NODE_MUL: + case AST_NODE_MUL_WRAP: + case AST_NODE_MUL_SAT: + case AST_NODE_DIV: + case AST_NODE_MOD: + case AST_NODE_SHL_SAT: + case AST_NODE_BIT_AND: + case AST_NODE_BIT_OR: + case AST_NODE_BIT_XOR: + case AST_NODE_BANG_EQUAL: + case AST_NODE_EQUAL_EQUAL: + case AST_NODE_GREATER_THAN: + case AST_NODE_GREATER_OR_EQUAL: + case AST_NODE_LESS_THAN: + case AST_NODE_LESS_OR_EQUAL: + case AST_NODE_ARRAY_CAT: + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + (void)rlExpr(ag, nd.rhs, block, RL_RI_NONE); + return false; + + // array_mult (AstRlAnnotate.zig:276-281). + case AST_NODE_ARRAY_MULT: + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + (void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY); + return false; + + // error_union, merge_error_sets (AstRlAnnotate.zig:282-287). + case AST_NODE_ERROR_UNION: + case AST_NODE_MERGE_ERROR_SETS: + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + (void)rlExpr(ag, nd.rhs, block, RL_RI_NONE); + return false; + + // bool_and, bool_or (AstRlAnnotate.zig:288-295). + case AST_NODE_BOOL_AND: + case AST_NODE_BOOL_OR: + (void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY); + (void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY); + return false; + + // bool_not (AstRlAnnotate.zig:296-299). + case AST_NODE_BOOL_NOT: + (void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY); + return false; + + // bit_not, negation, negation_wrap (AstRlAnnotate.zig:300-303). + case AST_NODE_BIT_NOT: + case AST_NODE_NEGATION: + case AST_NODE_NEGATION_WRAP: + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + return false; + + // Leaves (AstRlAnnotate.zig:305-320). + case AST_NODE_IDENTIFIER: + case AST_NODE_STRING_LITERAL: + case AST_NODE_MULTILINE_STRING_LITERAL: + case AST_NODE_NUMBER_LITERAL: + case AST_NODE_UNREACHABLE_LITERAL: + case AST_NODE_ASM_SIMPLE: + case AST_NODE_ASM: + case AST_NODE_ASM_LEGACY: + case AST_NODE_ENUM_LITERAL: + case AST_NODE_ERROR_VALUE: + case AST_NODE_ANYFRAME_LITERAL: + case AST_NODE_CONTINUE: + case AST_NODE_CHAR_LITERAL: + case AST_NODE_ERROR_SET_DECL: + return false; + + // builtin_call (AstRlAnnotate.zig:322-330). + case AST_NODE_BUILTIN_CALL_TWO: + case AST_NODE_BUILTIN_CALL_TWO_COMMA: { + uint32_t args[2]; + uint32_t nargs = 0; + if (nd.lhs != 0) + args[nargs++] = nd.lhs; + if (nd.rhs != 0) + args[nargs++] = nd.rhs; + return rlBuiltinCall(ag, block, node, args, nargs); + } + case AST_NODE_BUILTIN_CALL: + case AST_NODE_BUILTIN_CALL_COMMA: { + uint32_t start = nd.lhs; + uint32_t end = nd.rhs; + return rlBuiltinCall( + ag, block, node, tree->extra_data.arr + start, end - start); + } + + // call (AstRlAnnotate.zig:332-351). + case AST_NODE_CALL_ONE: + case AST_NODE_CALL_ONE_COMMA: { + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + if (nd.rhs != 0) + (void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY); + return false; + } + case AST_NODE_CALL: + case AST_NODE_CALL_COMMA: { + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + uint32_t start = tree->extra_data.arr[nd.rhs]; + uint32_t end = tree->extra_data.arr[nd.rhs + 1]; + for (uint32_t i = start; i < end; i++) + (void)rlExpr(ag, tree->extra_data.arr[i], block, RL_RI_TYPE_ONLY); + return false; + } + + // return (AstRlAnnotate.zig:353-361). + case AST_NODE_RETURN: + if (nd.lhs != 0) { + bool ret_consumes_rl = rlExpr(ag, nd.lhs, block, RL_RI_TYPED_PTR); + if (ret_consumes_rl) + nodesNeedRlAdd(ag, node); + } + return false; + + // field_access (AstRlAnnotate.zig:363-367). + case AST_NODE_FIELD_ACCESS: + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + return false; + + // if_simple, if (AstRlAnnotate.zig:369-387). + case AST_NODE_IF_SIMPLE: + case AST_NODE_IF: { + uint32_t cond_node = nd.lhs; + uint32_t then_node, else_node = 0; + if (tag == AST_NODE_IF_SIMPLE) { + then_node = nd.rhs; + } else { + then_node = tree->extra_data.arr[nd.rhs]; + else_node = tree->extra_data.arr[nd.rhs + 1]; + } + // Detect payload/error token. + uint32_t last_cond_tok = lastToken(tree, cond_node); + uint32_t pipe_tok = last_cond_tok + 2; + bool has_payload = (pipe_tok < tree->tokens.len + && tree->tokens.tags[pipe_tok] == TOKEN_PIPE); + bool has_error = false; + if (else_node != 0) { + uint32_t else_tok = lastToken(tree, then_node) + 1; + has_error = (else_tok + 1 < tree->tokens.len + && tree->tokens.tags[else_tok + 1] == TOKEN_PIPE); + } + if (has_error || has_payload) + (void)rlExpr(ag, cond_node, block, RL_RI_NONE); + else + (void)rlExpr(ag, cond_node, block, RL_RI_TYPE_ONLY); + + if (else_node != 0) { + bool then_uses = rlExpr(ag, then_node, block, ri); + bool else_uses = rlExpr(ag, else_node, block, ri); + bool uses_rl = then_uses || else_uses; + if (uses_rl) + nodesNeedRlAdd(ag, node); + return uses_rl; + } else { + (void)rlExpr(ag, then_node, block, RL_RI_NONE); + return false; + } + } + + // while (AstRlAnnotate.zig:389-419). + case AST_NODE_WHILE_SIMPLE: + case AST_NODE_WHILE_CONT: + case AST_NODE_WHILE: { + uint32_t cond_node = nd.lhs; + uint32_t body_node, cont_node = 0, else_node = 0; + if (tag == AST_NODE_WHILE_SIMPLE) { + body_node = nd.rhs; + } else if (tag == AST_NODE_WHILE_CONT) { + cont_node = tree->extra_data.arr[nd.rhs]; + body_node = tree->extra_data.arr[nd.rhs + 1]; + } else { + cont_node = tree->extra_data.arr[nd.rhs]; + body_node = tree->extra_data.arr[nd.rhs + 1]; + else_node = tree->extra_data.arr[nd.rhs + 2]; + } + uint32_t main_tok = tree->nodes.main_tokens[node]; + bool is_labeled + = (main_tok >= 2 && tree->tokens.tags[main_tok - 1] == TOKEN_COLON + && tree->tokens.tags[main_tok - 2] == TOKEN_IDENTIFIER); + uint32_t label_token = is_labeled ? main_tok - 2 : UINT32_MAX; + + // Detect payload/error. + uint32_t last_cond_tok = lastToken(tree, cond_node); + uint32_t pipe_tok = last_cond_tok + 2; + bool has_payload = (pipe_tok < tree->tokens.len + && tree->tokens.tags[pipe_tok] == TOKEN_PIPE); + // Error token detection for while: check for else |err|. + bool has_error = false; + if (else_node != 0) { + uint32_t else_tok = lastToken(tree, body_node) + 1; + has_error = (else_tok + 1 < tree->tokens.len + && tree->tokens.tags[else_tok + 1] == TOKEN_PIPE); + } + if (has_error || has_payload) + (void)rlExpr(ag, cond_node, block, RL_RI_NONE); + else + (void)rlExpr(ag, cond_node, block, RL_RI_TYPE_ONLY); + + RlBlock new_block; + new_block.parent = block; + new_block.label_token = label_token; + new_block.is_loop = true; + new_block.ri = ri; + new_block.consumes_res_ptr = false; + + if (cont_node != 0) + (void)rlExpr(ag, cont_node, &new_block, RL_RI_NONE); + (void)rlExpr(ag, body_node, &new_block, RL_RI_NONE); + bool else_consumes = false; + if (else_node != 0) + else_consumes = rlExpr(ag, else_node, block, ri); + if (new_block.consumes_res_ptr || else_consumes) { + nodesNeedRlAdd(ag, node); + return true; + } + return false; + } + + // for (AstRlAnnotate.zig:421-454). + case AST_NODE_FOR_SIMPLE: + case AST_NODE_FOR: { + uint32_t input_buf[16]; + const uint32_t* inputs = NULL; + uint32_t num_inputs = 0; + uint32_t body_node = 0; + uint32_t else_node = 0; + + if (tag == AST_NODE_FOR_SIMPLE) { + input_buf[0] = nd.lhs; + inputs = input_buf; + num_inputs = 1; + body_node = nd.rhs; + } else { + AstFor for_data; + memcpy(&for_data, &nd.rhs, sizeof(AstFor)); + num_inputs = for_data.inputs; + if (num_inputs > 16) + num_inputs = 16; + for (uint32_t i = 0; i < num_inputs; i++) + input_buf[i] = tree->extra_data.arr[nd.lhs + i]; + inputs = input_buf; + body_node = tree->extra_data.arr[nd.lhs + num_inputs]; + if (for_data.has_else) + else_node = tree->extra_data.arr[nd.lhs + num_inputs + 1]; + } + + uint32_t main_tok = tree->nodes.main_tokens[node]; + bool is_labeled + = (main_tok >= 2 && tree->tokens.tags[main_tok - 1] == TOKEN_COLON + && tree->tokens.tags[main_tok - 2] == TOKEN_IDENTIFIER); + uint32_t label_token = is_labeled ? main_tok - 2 : UINT32_MAX; + + for (uint32_t i = 0; i < num_inputs; i++) { + uint32_t input = inputs[i]; + if (tree->nodes.tags[input] == AST_NODE_FOR_RANGE) { + AstData range_nd = tree->nodes.datas[input]; + (void)rlExpr(ag, range_nd.lhs, block, RL_RI_TYPE_ONLY); + if (range_nd.rhs != 0) + (void)rlExpr(ag, range_nd.rhs, block, RL_RI_TYPE_ONLY); + } else { + (void)rlExpr(ag, input, block, RL_RI_NONE); + } + } + + RlBlock new_block; + new_block.parent = block; + new_block.label_token = label_token; + new_block.is_loop = true; + new_block.ri = ri; + new_block.consumes_res_ptr = false; + + (void)rlExpr(ag, body_node, &new_block, RL_RI_NONE); + bool else_consumes = false; + if (else_node != 0) + else_consumes = rlExpr(ag, else_node, block, ri); + if (new_block.consumes_res_ptr || else_consumes) { + nodesNeedRlAdd(ag, node); + return true; + } + return false; + } + + // slice (AstRlAnnotate.zig:456-480). + case AST_NODE_SLICE_OPEN: + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + (void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY); + return false; + case AST_NODE_SLICE: { + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + uint32_t start = tree->extra_data.arr[nd.rhs]; + uint32_t end = tree->extra_data.arr[nd.rhs + 1]; + (void)rlExpr(ag, start, block, RL_RI_TYPE_ONLY); + (void)rlExpr(ag, end, block, RL_RI_TYPE_ONLY); + return false; + } + case AST_NODE_SLICE_SENTINEL: { + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + AstSliceSentinel ss; + ss.start = tree->extra_data.arr[nd.rhs]; + ss.end = tree->extra_data.arr[nd.rhs + 1]; + ss.sentinel = tree->extra_data.arr[nd.rhs + 2]; + (void)rlExpr(ag, ss.start, block, RL_RI_TYPE_ONLY); + if (ss.end != 0) + (void)rlExpr(ag, ss.end, block, RL_RI_TYPE_ONLY); + (void)rlExpr(ag, ss.sentinel, block, RL_RI_NONE); + return false; + } + + // deref (AstRlAnnotate.zig:481-484). + case AST_NODE_DEREF: + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + return false; + + // address_of (AstRlAnnotate.zig:485-488). + case AST_NODE_ADDRESS_OF: + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + return false; + + // optional_type (AstRlAnnotate.zig:489-492). + case AST_NODE_OPTIONAL_TYPE: + (void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY); + return false; + + // try, nosuspend (AstRlAnnotate.zig:493-495). + case AST_NODE_TRY: + case AST_NODE_NOSUSPEND: + return rlExpr(ag, nd.lhs, block, ri); + + // grouped_expression, unwrap_optional (AstRlAnnotate.zig:496-498). + case AST_NODE_GROUPED_EXPRESSION: + case AST_NODE_UNWRAP_OPTIONAL: + return rlExpr(ag, nd.lhs, block, ri); + + // block (AstRlAnnotate.zig:500-508). + case AST_NODE_BLOCK_TWO: + case AST_NODE_BLOCK_TWO_SEMICOLON: { + uint32_t stmts[2]; + uint32_t count = 0; + if (nd.lhs != 0) + stmts[count++] = nd.lhs; + if (nd.rhs != 0) + stmts[count++] = nd.rhs; + return rlBlockExpr(ag, block, ri, node, stmts, count); + } + case AST_NODE_BLOCK: + case AST_NODE_BLOCK_SEMICOLON: + return rlBlockExpr(ag, block, ri, node, tree->extra_data.arr + nd.lhs, + nd.rhs - nd.lhs); + + // anyframe_type (AstRlAnnotate.zig:509-513). + case AST_NODE_ANYFRAME_TYPE: + (void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY); + return false; + + // catch/orelse (AstRlAnnotate.zig:514-522). + case AST_NODE_CATCH: + case AST_NODE_ORELSE: { + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + bool rhs_consumes = rlExpr(ag, nd.rhs, block, ri); + if (rhs_consumes) + nodesNeedRlAdd(ag, node); + return rhs_consumes; + } + + // ptr_type (AstRlAnnotate.zig:524-546). + case AST_NODE_PTR_TYPE_ALIGNED: + if (nd.lhs != 0) + (void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY); + (void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY); + return false; + case AST_NODE_PTR_TYPE_SENTINEL: + if (nd.lhs != 0) + (void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY); + (void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY); + return false; + case AST_NODE_PTR_TYPE: { + AstPtrType pt; + pt.sentinel = tree->extra_data.arr[nd.lhs]; + pt.align_node = tree->extra_data.arr[nd.lhs + 1]; + pt.addrspace_node = tree->extra_data.arr[nd.lhs + 2]; + (void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY); + if (pt.sentinel != 0) + (void)rlExpr(ag, pt.sentinel, block, RL_RI_TYPE_ONLY); + if (pt.align_node != 0) + (void)rlExpr(ag, pt.align_node, block, RL_RI_TYPE_ONLY); + if (pt.addrspace_node != 0) + (void)rlExpr(ag, pt.addrspace_node, block, RL_RI_TYPE_ONLY); + return false; + } + case AST_NODE_PTR_TYPE_BIT_RANGE: { + AstPtrTypeBitRange pt; + pt.sentinel = tree->extra_data.arr[nd.lhs]; + pt.align_node = tree->extra_data.arr[nd.lhs + 1]; + pt.addrspace_node = tree->extra_data.arr[nd.lhs + 2]; + pt.bit_range_start = tree->extra_data.arr[nd.lhs + 3]; + pt.bit_range_end = tree->extra_data.arr[nd.lhs + 4]; + (void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY); + if (pt.sentinel != 0) + (void)rlExpr(ag, pt.sentinel, block, RL_RI_TYPE_ONLY); + if (pt.align_node != 0) + (void)rlExpr(ag, pt.align_node, block, RL_RI_TYPE_ONLY); + if (pt.addrspace_node != 0) + (void)rlExpr(ag, pt.addrspace_node, block, RL_RI_TYPE_ONLY); + if (pt.bit_range_start != 0) { + (void)rlExpr(ag, pt.bit_range_start, block, RL_RI_TYPE_ONLY); + (void)rlExpr(ag, pt.bit_range_end, block, RL_RI_TYPE_ONLY); + } + return false; + } + + // container_decl (AstRlAnnotate.zig:548-564). + case AST_NODE_CONTAINER_DECL: + case AST_NODE_CONTAINER_DECL_TRAILING: + case AST_NODE_CONTAINER_DECL_ARG: + case AST_NODE_CONTAINER_DECL_ARG_TRAILING: + case AST_NODE_CONTAINER_DECL_TWO: + case AST_NODE_CONTAINER_DECL_TWO_TRAILING: + case AST_NODE_TAGGED_UNION: + case AST_NODE_TAGGED_UNION_TRAILING: + case AST_NODE_TAGGED_UNION_ENUM_TAG: + case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING: + case AST_NODE_TAGGED_UNION_TWO: + case AST_NODE_TAGGED_UNION_TWO_TRAILING: + rlContainerDecl(ag, block, node); + return false; + + // break (AstRlAnnotate.zig:566-596). + case AST_NODE_BREAK: { + uint32_t opt_label_tok = nd.lhs; // 0 = no label + uint32_t rhs_node = nd.rhs; // 0 = void break + if (rhs_node == 0) + return false; + + RlBlock* opt_cur_block = block; + if (opt_label_tok != 0) { + // Labeled break: find matching block. + while (opt_cur_block != NULL) { + if (opt_cur_block->label_token != UINT32_MAX + && rlTokenIdentEqual( + tree, opt_cur_block->label_token, opt_label_tok)) + break; + opt_cur_block = opt_cur_block->parent; + } + } else { + // No label: breaking from innermost loop. + while (opt_cur_block != NULL) { + if (opt_cur_block->is_loop) + break; + opt_cur_block = opt_cur_block->parent; + } + } + + if (opt_cur_block != NULL) { + bool consumes = rlExpr(ag, rhs_node, block, opt_cur_block->ri); + if (consumes) + opt_cur_block->consumes_res_ptr = true; + } else { + (void)rlExpr(ag, rhs_node, block, RL_RI_NONE); + } + return false; + } + + // array_type (AstRlAnnotate.zig:598-611). + case AST_NODE_ARRAY_TYPE: + (void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY); + (void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY); + return false; + case AST_NODE_ARRAY_TYPE_SENTINEL: { + (void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY); + uint32_t elem_type = tree->extra_data.arr[nd.rhs + 1]; + uint32_t sentinel = tree->extra_data.arr[nd.rhs]; + (void)rlExpr(ag, elem_type, block, RL_RI_TYPE_ONLY); + (void)rlExpr(ag, sentinel, block, RL_RI_TYPE_ONLY); + return false; + } + + // array_access (AstRlAnnotate.zig:612-617). + case AST_NODE_ARRAY_ACCESS: + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + (void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY); + return false; + + // comptime (AstRlAnnotate.zig:618-623). + case AST_NODE_COMPTIME: + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + return false; + + // switch (AstRlAnnotate.zig:624-650). + case AST_NODE_SWITCH: + case AST_NODE_SWITCH_COMMA: { + uint32_t cond_node = nd.lhs; + uint32_t extra_idx = nd.rhs; + uint32_t cases_start = tree->extra_data.arr[extra_idx]; + uint32_t cases_end = tree->extra_data.arr[extra_idx + 1]; + + (void)rlExpr(ag, cond_node, block, RL_RI_NONE); + + bool any_consumed = false; + for (uint32_t ci = cases_start; ci < cases_end; ci++) { + uint32_t case_node = tree->extra_data.arr[ci]; + AstNodeTag ct = tree->nodes.tags[case_node]; + AstData cd = tree->nodes.datas[case_node]; + + // Process case values. + if (ct == AST_NODE_SWITCH_CASE_ONE + || ct == AST_NODE_SWITCH_CASE_INLINE_ONE) { + if (cd.lhs != 0) { + if (tree->nodes.tags[cd.lhs] == AST_NODE_SWITCH_RANGE) { + AstData rd = tree->nodes.datas[cd.lhs]; + (void)rlExpr(ag, rd.lhs, block, RL_RI_NONE); + (void)rlExpr(ag, rd.rhs, block, RL_RI_NONE); + } else { + (void)rlExpr(ag, cd.lhs, block, RL_RI_NONE); + } + } + } else { + // SWITCH_CASE / SWITCH_CASE_INLINE: SubRange[lhs] + uint32_t items_start = tree->extra_data.arr[cd.lhs]; + uint32_t items_end = tree->extra_data.arr[cd.lhs + 1]; + for (uint32_t ii = items_start; ii < items_end; ii++) { + uint32_t item = tree->extra_data.arr[ii]; + if (tree->nodes.tags[item] == AST_NODE_SWITCH_RANGE) { + AstData rd = tree->nodes.datas[item]; + (void)rlExpr(ag, rd.lhs, block, RL_RI_NONE); + (void)rlExpr(ag, rd.rhs, block, RL_RI_NONE); + } else { + (void)rlExpr(ag, item, block, RL_RI_NONE); + } + } + } + // Process case target expr. + if (rlExpr(ag, cd.rhs, block, ri)) + any_consumed = true; + } + if (any_consumed) + nodesNeedRlAdd(ag, node); + return any_consumed; + } + + // suspend (AstRlAnnotate.zig:651-654). + case AST_NODE_SUSPEND: + if (nd.lhs != 0) + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + return false; + + // resume (AstRlAnnotate.zig:655-658). + case AST_NODE_RESUME: + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + return false; + + // array_init (AstRlAnnotate.zig:660-695). + case AST_NODE_ARRAY_INIT_ONE: + case AST_NODE_ARRAY_INIT_ONE_COMMA: + case AST_NODE_ARRAY_INIT_DOT_TWO: + case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA: + case AST_NODE_ARRAY_INIT_DOT: + case AST_NODE_ARRAY_INIT_DOT_COMMA: + case AST_NODE_ARRAY_INIT: + case AST_NODE_ARRAY_INIT_COMMA: { + // Extract type_expr and elements. + uint32_t type_expr = 0; + uint32_t elem_buf[2]; + const uint32_t* elems = NULL; + uint32_t nelem = 0; + switch (tag) { + case AST_NODE_ARRAY_INIT_ONE: + case AST_NODE_ARRAY_INIT_ONE_COMMA: + type_expr = nd.lhs; + if (nd.rhs != 0) { + elem_buf[0] = nd.rhs; + elems = elem_buf; + nelem = 1; + } + break; + case AST_NODE_ARRAY_INIT_DOT_TWO: + case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA: { + uint32_t idx = 0; + if (nd.lhs != 0) + elem_buf[idx++] = nd.lhs; + if (nd.rhs != 0) + elem_buf[idx++] = nd.rhs; + elems = elem_buf; + nelem = idx; + break; + } + case AST_NODE_ARRAY_INIT_DOT: + case AST_NODE_ARRAY_INIT_DOT_COMMA: + elems = tree->extra_data.arr + nd.lhs; + nelem = nd.rhs - nd.lhs; + break; + case AST_NODE_ARRAY_INIT: + case AST_NODE_ARRAY_INIT_COMMA: { + type_expr = nd.lhs; + uint32_t start = tree->extra_data.arr[nd.rhs]; + uint32_t end = tree->extra_data.arr[nd.rhs + 1]; + elems = tree->extra_data.arr + start; + nelem = end - start; + break; + } + default: + break; + } + if (type_expr != 0) { + (void)rlExpr(ag, type_expr, block, RL_RI_NONE); + for (uint32_t i = 0; i < nelem; i++) + (void)rlExpr(ag, elems[i], block, RL_RI_TYPE_ONLY); + return false; + } + if (ri.have_type) { + for (uint32_t i = 0; i < nelem; i++) + (void)rlExpr(ag, elems[i], block, ri); + return ri.have_ptr; + } else { + for (uint32_t i = 0; i < nelem; i++) + (void)rlExpr(ag, elems[i], block, RL_RI_NONE); + return false; + } + } + + // struct_init (AstRlAnnotate.zig:697-732). + case AST_NODE_STRUCT_INIT_ONE: + case AST_NODE_STRUCT_INIT_ONE_COMMA: + case AST_NODE_STRUCT_INIT_DOT_TWO: + case AST_NODE_STRUCT_INIT_DOT_TWO_COMMA: + case AST_NODE_STRUCT_INIT_DOT: + case AST_NODE_STRUCT_INIT_DOT_COMMA: + case AST_NODE_STRUCT_INIT: + case AST_NODE_STRUCT_INIT_COMMA: { + uint32_t type_expr = 0; + uint32_t field_buf[2]; + const uint32_t* fields = NULL; + uint32_t nfields = 0; + switch (tag) { + case AST_NODE_STRUCT_INIT_ONE: + case AST_NODE_STRUCT_INIT_ONE_COMMA: + type_expr = nd.lhs; + if (nd.rhs != 0) { + field_buf[0] = nd.rhs; + fields = field_buf; + nfields = 1; + } + break; + case AST_NODE_STRUCT_INIT_DOT_TWO: + case AST_NODE_STRUCT_INIT_DOT_TWO_COMMA: { + uint32_t idx = 0; + if (nd.lhs != 0) + field_buf[idx++] = nd.lhs; + if (nd.rhs != 0) + field_buf[idx++] = nd.rhs; + fields = field_buf; + nfields = idx; + break; + } + case AST_NODE_STRUCT_INIT_DOT: + case AST_NODE_STRUCT_INIT_DOT_COMMA: + fields = tree->extra_data.arr + nd.lhs; + nfields = nd.rhs - nd.lhs; + break; + case AST_NODE_STRUCT_INIT: + case AST_NODE_STRUCT_INIT_COMMA: { + type_expr = nd.lhs; + uint32_t start = tree->extra_data.arr[nd.rhs]; + uint32_t end = tree->extra_data.arr[nd.rhs + 1]; + fields = tree->extra_data.arr + start; + nfields = end - start; + break; + } + default: + break; + } + if (type_expr != 0) { + (void)rlExpr(ag, type_expr, block, RL_RI_NONE); + for (uint32_t i = 0; i < nfields; i++) + (void)rlExpr(ag, fields[i], block, RL_RI_TYPE_ONLY); + return false; + } + if (ri.have_type) { + for (uint32_t i = 0; i < nfields; i++) + (void)rlExpr(ag, fields[i], block, ri); + return ri.have_ptr; + } else { + for (uint32_t i = 0; i < nfields; i++) + (void)rlExpr(ag, fields[i], block, RL_RI_NONE); + return false; + } + } + + // fn_proto, fn_decl (AstRlAnnotate.zig:734-770). + case AST_NODE_FN_PROTO_SIMPLE: + case AST_NODE_FN_PROTO_MULTI: + case AST_NODE_FN_PROTO_ONE: + case AST_NODE_FN_PROTO: + case AST_NODE_FN_DECL: { + // Extract return type and body. + uint32_t return_type = 0; + uint32_t body_node = 0; + + if (tag == AST_NODE_FN_DECL) { + body_node = nd.rhs; + // fn_proto is nd.lhs + uint32_t proto = nd.lhs; + AstNodeTag ptag = tree->nodes.tags[proto]; + AstData pnd = tree->nodes.datas[proto]; + if (ptag == AST_NODE_FN_PROTO_SIMPLE) { + return_type = pnd.rhs; + if (pnd.lhs != 0) + (void)rlExpr(ag, pnd.lhs, block, RL_RI_TYPE_ONLY); + } else if (ptag == AST_NODE_FN_PROTO_MULTI) { + return_type = pnd.rhs; + uint32_t ps = tree->extra_data.arr[pnd.lhs]; + uint32_t pe = tree->extra_data.arr[pnd.lhs + 1]; + for (uint32_t i = ps; i < pe; i++) + (void)rlExpr( + ag, tree->extra_data.arr[i], block, RL_RI_TYPE_ONLY); + } else if (ptag == AST_NODE_FN_PROTO_ONE) { + return_type = pnd.rhs; + AstFnProtoOne fp; + fp.param = tree->extra_data.arr[pnd.lhs]; + fp.align_expr = tree->extra_data.arr[pnd.lhs + 1]; + fp.addrspace_expr = tree->extra_data.arr[pnd.lhs + 2]; + fp.section_expr = tree->extra_data.arr[pnd.lhs + 3]; + fp.callconv_expr = tree->extra_data.arr[pnd.lhs + 4]; + if (fp.param != 0) + (void)rlExpr(ag, fp.param, block, RL_RI_TYPE_ONLY); + if (fp.align_expr != 0) + (void)rlExpr(ag, fp.align_expr, block, RL_RI_TYPE_ONLY); + if (fp.addrspace_expr != 0) + (void)rlExpr( + ag, fp.addrspace_expr, block, RL_RI_TYPE_ONLY); + if (fp.section_expr != 0) + (void)rlExpr(ag, fp.section_expr, block, RL_RI_TYPE_ONLY); + if (fp.callconv_expr != 0) + (void)rlExpr(ag, fp.callconv_expr, block, RL_RI_TYPE_ONLY); + } else if (ptag == AST_NODE_FN_PROTO) { + return_type = pnd.rhs; + AstFnProto fp; + fp.params_start = tree->extra_data.arr[pnd.lhs]; + fp.params_end = tree->extra_data.arr[pnd.lhs + 1]; + fp.align_expr = tree->extra_data.arr[pnd.lhs + 2]; + fp.addrspace_expr = tree->extra_data.arr[pnd.lhs + 3]; + fp.section_expr = tree->extra_data.arr[pnd.lhs + 4]; + fp.callconv_expr = tree->extra_data.arr[pnd.lhs + 5]; + for (uint32_t i = fp.params_start; i < fp.params_end; i++) + (void)rlExpr( + ag, tree->extra_data.arr[i], block, RL_RI_TYPE_ONLY); + if (fp.align_expr != 0) + (void)rlExpr(ag, fp.align_expr, block, RL_RI_TYPE_ONLY); + if (fp.addrspace_expr != 0) + (void)rlExpr( + ag, fp.addrspace_expr, block, RL_RI_TYPE_ONLY); + if (fp.section_expr != 0) + (void)rlExpr(ag, fp.section_expr, block, RL_RI_TYPE_ONLY); + if (fp.callconv_expr != 0) + (void)rlExpr(ag, fp.callconv_expr, block, RL_RI_TYPE_ONLY); + } + } else { + // Standalone fn_proto (no body). + if (tag == AST_NODE_FN_PROTO_SIMPLE) { + return_type = nd.rhs; + if (nd.lhs != 0) + (void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY); + } else if (tag == AST_NODE_FN_PROTO_MULTI) { + return_type = nd.rhs; + uint32_t ps = tree->extra_data.arr[nd.lhs]; + uint32_t pe = tree->extra_data.arr[nd.lhs + 1]; + for (uint32_t i = ps; i < pe; i++) + (void)rlExpr( + ag, tree->extra_data.arr[i], block, RL_RI_TYPE_ONLY); + } else if (tag == AST_NODE_FN_PROTO_ONE) { + return_type = nd.rhs; + AstFnProtoOne fp; + fp.param = tree->extra_data.arr[nd.lhs]; + fp.align_expr = tree->extra_data.arr[nd.lhs + 1]; + fp.addrspace_expr = tree->extra_data.arr[nd.lhs + 2]; + fp.section_expr = tree->extra_data.arr[nd.lhs + 3]; + fp.callconv_expr = tree->extra_data.arr[nd.lhs + 4]; + if (fp.param != 0) + (void)rlExpr(ag, fp.param, block, RL_RI_TYPE_ONLY); + if (fp.align_expr != 0) + (void)rlExpr(ag, fp.align_expr, block, RL_RI_TYPE_ONLY); + if (fp.addrspace_expr != 0) + (void)rlExpr( + ag, fp.addrspace_expr, block, RL_RI_TYPE_ONLY); + if (fp.section_expr != 0) + (void)rlExpr(ag, fp.section_expr, block, RL_RI_TYPE_ONLY); + if (fp.callconv_expr != 0) + (void)rlExpr(ag, fp.callconv_expr, block, RL_RI_TYPE_ONLY); + } else if (tag == AST_NODE_FN_PROTO) { + return_type = nd.rhs; + AstFnProto fp; + fp.params_start = tree->extra_data.arr[nd.lhs]; + fp.params_end = tree->extra_data.arr[nd.lhs + 1]; + fp.align_expr = tree->extra_data.arr[nd.lhs + 2]; + fp.addrspace_expr = tree->extra_data.arr[nd.lhs + 3]; + fp.section_expr = tree->extra_data.arr[nd.lhs + 4]; + fp.callconv_expr = tree->extra_data.arr[nd.lhs + 5]; + for (uint32_t i = fp.params_start; i < fp.params_end; i++) + (void)rlExpr( + ag, tree->extra_data.arr[i], block, RL_RI_TYPE_ONLY); + if (fp.align_expr != 0) + (void)rlExpr(ag, fp.align_expr, block, RL_RI_TYPE_ONLY); + if (fp.addrspace_expr != 0) + (void)rlExpr( + ag, fp.addrspace_expr, block, RL_RI_TYPE_ONLY); + if (fp.section_expr != 0) + (void)rlExpr(ag, fp.section_expr, block, RL_RI_TYPE_ONLY); + if (fp.callconv_expr != 0) + (void)rlExpr(ag, fp.callconv_expr, block, RL_RI_TYPE_ONLY); + } + } + + if (return_type != 0) + (void)rlExpr(ag, return_type, block, RL_RI_TYPE_ONLY); + if (body_node != 0) + (void)rlExpr(ag, body_node, block, RL_RI_NONE); + return false; + } + + // Remaining: usingnamespace, await, assign_destructure, async calls. + case AST_NODE_USINGNAMESPACE: + return false; + case AST_NODE_AWAIT: + (void)rlExpr(ag, nd.lhs, block, RL_RI_NONE); + return false; + case AST_NODE_ASSIGN_DESTRUCTURE: + return false; // TODO if needed + case AST_NODE_ASYNC_CALL_ONE: + case AST_NODE_ASYNC_CALL_ONE_COMMA: + case AST_NODE_ASYNC_CALL: + case AST_NODE_ASYNC_CALL_COMMA: + return false; // async not relevant + + default: + return false; + } +} + +// astRlAnnotate (AstRlAnnotate.zig:64-83). +// Entry point: run the RL annotation pre-pass. +static void astRlAnnotate(AstGenCtx* ag) { + const Ast* tree = ag->tree; + if (tree->has_error) + return; + + // Get root container members (same as in astGen). + AstData root_data = tree->nodes.datas[0]; + uint32_t members_start = root_data.lhs; + uint32_t members_end = root_data.rhs; + const uint32_t* members = tree->extra_data.arr + members_start; + uint32_t members_len = members_end - members_start; + + for (uint32_t i = 0; i < members_len; i++) + (void)rlExpr(ag, members[i], NULL, RL_RI_NONE); } // --- Public API: astGen (AstGen.zig:144) --- @@ -2487,13 +7761,19 @@ Zir astGen(const Ast* ast) { ag.extra[ZIR_EXTRA_IMPORTS] = 0; ag.extra_len = ZIR_EXTRA_RESERVED_COUNT; + // Run AstRlAnnotate pre-pass (AstGen.zig:150-151). + astRlAnnotate(&ag); + // Set up root GenZir scope (AstGen.zig:176-185). GenZir gen_scope; memset(&gen_scope, 0, sizeof(gen_scope)); + gen_scope.base.tag = SCOPE_GEN_ZIR; + gen_scope.parent = NULL; gen_scope.astgen = &ag; gen_scope.is_comptime = true; gen_scope.decl_node_index = 0; // root gen_scope.decl_line = 0; + gen_scope.break_block = UINT32_MAX; // Get root container members: containerDeclRoot (AstGen.zig:191-195). AstData root_data = ast->nodes.datas[0]; @@ -2525,6 +7805,9 @@ Zir astGen(const Ast* ast) { free(ag.decl_names); free(ag.decl_nodes); free(ag.scratch_instructions); + free(ag.ref_table_keys); + free(ag.ref_table_vals); + free(ag.nodes_need_rl); return zir; } diff --git a/astgen_test.zig b/astgen_test.zig index 5a84b719a4..cafbc3dec4 100644 --- a/astgen_test.zig +++ b/astgen_test.zig @@ -608,32 +608,69 @@ fn expectEqualData( /// Unlike expectEqualZir, does not print diagnostics or return errors. fn zirMatches(gpa: Allocator, ref: Zir, got: c.Zir) bool { const ref_len: u32 = @intCast(ref.instructions.len); - if (ref_len != got.inst_len) return false; + if (ref_len != got.inst_len) { + //std.debug.print(" inst_len: ref={d} got={d}\n", .{ ref_len, got.inst_len }); + } const ref_tags = ref.instructions.items(.tag); const ref_datas = ref.instructions.items(.data); - for (0..ref_len) |i| { + const min_len = @min(ref_len, got.inst_len); + var first_tag_mismatch: ?u32 = null; + for (0..min_len) |i| { const ref_tag: u8 = @intFromEnum(ref_tags[i]); const got_tag: u8 = @intCast(got.inst_tags[i]); - if (ref_tag != got_tag) return false; - if (!dataMatches(ref_tags[i], ref_datas[i], got.inst_datas[i])) return false; + if (ref_tag != got_tag) { + first_tag_mismatch = @intCast(i); + break; + } } + if (first_tag_mismatch) |_| { + //const start = if (ftm > 5) ftm - 5 else 0; + //const end = @min(ftm + 10, min_len); + //std.debug.print(" first tag mismatch at inst[{d}]:\n", .{ftm}); + //for (start..end) |i| { + // const ref_tag: u8 = @intFromEnum(ref_tags[i]); + // const got_tag: u8 = @intCast(got.inst_tags[i]); + // const marker: u8 = if (i == ftm) '>' else ' '; + // std.debug.print(" {c} [{d}] ref_tag={d} got_tag={d}\n", .{ marker, i, ref_tag, got_tag }); + //} + return false; + } + for (0..min_len) |i| { + if (!dataMatches(ref_tags[i], ref_datas[i], got.inst_datas[i])) { + //std.debug.print(" inst_datas[{d}] mismatch (tag={d})\n", .{ i, @as(u8, @intFromEnum(ref_tags[i])) }); + return false; + } + } + if (ref_len != got.inst_len) return false; const ref_extra_len: u32 = @intCast(ref.extra.len); - if (ref_extra_len != got.extra_len) return false; + if (ref_extra_len != got.extra_len) { + //std.debug.print(" extra_len: ref={d} got={d}\n", .{ ref_extra_len, got.extra_len }); + return false; + } const skip = buildHashSkipMask(gpa, ref) catch return false; defer gpa.free(skip); for (0..ref_extra_len) |i| { if (skip[i]) continue; - if (ref.extra[i] != got.extra[i]) return false; + if (ref.extra[i] != got.extra[i]) { + //std.debug.print(" extra[{d}]: ref=0x{x:0>8} got=0x{x:0>8}\n", .{ i, ref.extra[i], got.extra[i] }); + return false; + } } const ref_sb_len: u32 = @intCast(ref.string_bytes.len); - if (ref_sb_len != got.string_bytes_len) return false; + if (ref_sb_len != got.string_bytes_len) { + //std.debug.print(" string_bytes_len: ref={d} got={d}\n", .{ ref_sb_len, got.string_bytes_len }); + return false; + } for (0..ref_sb_len) |i| { - if (ref.string_bytes[i] != got.string_bytes[i]) return false; + if (ref.string_bytes[i] != got.string_bytes[i]) { + //std.debug.print(" string_bytes[{d}]: ref=0x{x:0>2} got=0x{x:0>2}\n", .{ i, ref.string_bytes[i], got.string_bytes[i] }); + return false; + } } return true; @@ -728,7 +765,7 @@ const corpus_files = .{ }; /// Returns .pass or .skip for a single corpus entry. -fn corpusCheck(gpa: Allocator, _: []const u8, source: [:0]const u8) enum { pass, skip } { +fn corpusCheck(gpa: Allocator, name: []const u8, source: [:0]const u8) enum { pass, skip } { var tree = Ast.parse(gpa, source, .zig) catch return .skip; defer tree.deinit(gpa); @@ -740,13 +777,18 @@ fn corpusCheck(gpa: Allocator, _: []const u8, source: [:0]const u8) enum { pass, var c_zir = c.astGen(&c_ast); defer c.zirDeinit(&c_zir); - if (c_zir.has_compile_errors) return .skip; + if (c_zir.has_compile_errors) { + //std.debug.print(" -> has_compile_errors\n", .{}); + return .skip; + } if (zirMatches(gpa, ref_zir, c_zir)) { return .pass; } else { + //std.debug.print(" -> zir mismatch\n", .{}); return .skip; } + _ = name; } test "astgen: corpus" { diff --git a/build.zig b/build.zig index 08f8aefbe1..936d7b9746 100644 --- a/build.zig +++ b/build.zig @@ -100,6 +100,7 @@ pub fn build(b: *std.Build) !void { "--error-exitcode=1", "--check-level=exhaustive", "--enable=all", + "--inline-suppr", "--suppress=missingIncludeSystem", "--suppress=checkersReport", "--suppress=unusedFunction", // TODO remove after plumbing is done diff --git a/zir.h b/zir.h index 3e5f97d6ea..766d66938c 100644 --- a/zir.h +++ b/zir.h @@ -438,13 +438,64 @@ typedef union { #define ZIR_REF_NONE UINT32_MAX #define ZIR_MAIN_STRUCT_INST 0 -// Selected Zir.Inst.Ref enum values (matching Zig enum order). +// Zir.Inst.Ref enum values (matching Zig enum order in Zir.zig). +// Types (0-103). +#define ZIR_REF_U1_TYPE 2 #define ZIR_REF_U8_TYPE 3 +#define ZIR_REF_I8_TYPE 4 +#define ZIR_REF_U16_TYPE 5 +#define ZIR_REF_I16_TYPE 6 +#define ZIR_REF_U29_TYPE 7 +#define ZIR_REF_U32_TYPE 8 +#define ZIR_REF_I32_TYPE 9 +#define ZIR_REF_U64_TYPE 10 +#define ZIR_REF_I64_TYPE 11 +#define ZIR_REF_U128_TYPE 13 +#define ZIR_REF_I128_TYPE 14 #define ZIR_REF_USIZE_TYPE 16 +#define ZIR_REF_ISIZE_TYPE 17 +#define ZIR_REF_C_CHAR_TYPE 18 +#define ZIR_REF_C_SHORT_TYPE 19 +#define ZIR_REF_C_USHORT_TYPE 20 +#define ZIR_REF_C_INT_TYPE 21 #define ZIR_REF_C_UINT_TYPE 22 +#define ZIR_REF_C_LONG_TYPE 23 +#define ZIR_REF_C_ULONG_TYPE 24 +#define ZIR_REF_C_LONGLONG_TYPE 25 +#define ZIR_REF_C_ULONGLONG_TYPE 26 +#define ZIR_REF_C_LONGDOUBLE_TYPE 27 +#define ZIR_REF_F16_TYPE 28 +#define ZIR_REF_F32_TYPE 29 +#define ZIR_REF_F64_TYPE 30 +#define ZIR_REF_F80_TYPE 31 +#define ZIR_REF_F128_TYPE 32 +#define ZIR_REF_ANYOPAQUE_TYPE 33 #define ZIR_REF_BOOL_TYPE 34 #define ZIR_REF_VOID_TYPE 35 +#define ZIR_REF_TYPE_TYPE 36 +#define ZIR_REF_ANYERROR_TYPE 37 +#define ZIR_REF_COMPTIME_INT_TYPE 38 +#define ZIR_REF_COMPTIME_FLOAT_TYPE 39 +#define ZIR_REF_NORETURN_TYPE 40 +#define ZIR_REF_ANYFRAME_TYPE 41 +#define ZIR_REF_NULL_TYPE 42 +#define ZIR_REF_UNDEFINED_TYPE 43 +#define ZIR_REF_ENUM_LITERAL_TYPE 44 +#define ZIR_REF_PTR_USIZE_TYPE 45 +#define ZIR_REF_PTR_CONST_COMPTIME_INT_TYPE 46 +#define ZIR_REF_MANYPTR_U8_TYPE 47 +#define ZIR_REF_MANYPTR_CONST_U8_TYPE 48 +#define ZIR_REF_MANYPTR_CONST_U8_SENTINEL_0_TYPE 49 +#define ZIR_REF_SLICE_CONST_U8_TYPE 50 +#define ZIR_REF_SLICE_CONST_U8_SENTINEL_0_TYPE 51 #define ZIR_REF_ANYERROR_VOID_ERROR_UNION_TYPE 100 +#define ZIR_REF_GENERIC_POISON_TYPE 102 +#define ZIR_REF_EMPTY_TUPLE_TYPE 103 +// Values (104-123). +#define ZIR_REF_UNDEF 104 +#define ZIR_REF_UNDEF_BOOL 105 +#define ZIR_REF_UNDEF_USIZE 106 +#define ZIR_REF_UNDEF_U1 107 #define ZIR_REF_ZERO 108 #define ZIR_REF_ZERO_USIZE 109 #define ZIR_REF_ZERO_U1 110