Files
zig/stage0/astgen.c

10700 lines
402 KiB
C

// astgen.c — AST to ZIR conversion, ported from lib/std/zig/AstGen.zig.
//
// Structural translation of AstGen.zig into C.
// Each function corresponds to a Zig function with the same name,
// with line references to Zig 0.15.1 AstGen.zig.
#include "astgen.h"
#include "common.h"
#include <assert.h>
#include <stdlib.h>
#include <string.h>
// --- Declaration.Flags.Id enum (Zir.zig:2724) ---
typedef enum {
DECL_ID_UNNAMED_TEST,
DECL_ID_TEST,
DECL_ID_DECLTEST,
DECL_ID_COMPTIME,
DECL_ID_CONST_SIMPLE,
DECL_ID_CONST_TYPED,
DECL_ID_CONST,
DECL_ID_PUB_CONST_SIMPLE,
DECL_ID_PUB_CONST_TYPED,
DECL_ID_PUB_CONST,
DECL_ID_EXTERN_CONST_SIMPLE,
DECL_ID_EXTERN_CONST,
DECL_ID_PUB_EXTERN_CONST_SIMPLE,
DECL_ID_PUB_EXTERN_CONST,
DECL_ID_EXPORT_CONST,
DECL_ID_PUB_EXPORT_CONST,
DECL_ID_VAR_SIMPLE,
DECL_ID_VAR,
DECL_ID_VAR_THREADLOCAL,
DECL_ID_PUB_VAR_SIMPLE,
DECL_ID_PUB_VAR,
DECL_ID_PUB_VAR_THREADLOCAL,
DECL_ID_EXTERN_VAR,
DECL_ID_EXTERN_VAR_THREADLOCAL,
DECL_ID_PUB_EXTERN_VAR,
DECL_ID_PUB_EXTERN_VAR_THREADLOCAL,
DECL_ID_EXPORT_VAR,
DECL_ID_EXPORT_VAR_THREADLOCAL,
DECL_ID_PUB_EXPORT_VAR,
DECL_ID_PUB_EXPORT_VAR_THREADLOCAL,
} DeclFlagsId;
// --- Import tracking (AstGen.zig:265) ---
typedef struct {
uint32_t name; // NullTerminatedString index
uint32_t token; // Ast.TokenIndex
} ImportEntry;
// --- AstGen internal context (mirrors AstGen struct, AstGen.zig:153) ---
typedef struct {
const Ast* tree;
ZirInstTag* inst_tags;
ZirInstData* inst_datas;
uint32_t inst_len;
uint32_t inst_cap;
uint32_t* extra;
uint32_t extra_len;
uint32_t extra_cap;
uint8_t* string_bytes;
uint32_t string_bytes_len;
uint32_t string_bytes_cap;
// String dedup table: stores positions in string_bytes that are
// registered for deduplication (mirrors AstGen.string_table).
// Only strings added via identAsString/strLitAsString (non-embedded-null)
// are registered. Multiline strings are NOT registered.
uint32_t* string_table;
uint32_t string_table_len;
uint32_t string_table_cap;
uint32_t source_offset;
uint32_t source_line;
uint32_t source_column;
ImportEntry* imports;
uint32_t imports_len;
uint32_t imports_cap;
// Namespace decl table: maps string indices to node indices.
// Populated by scanContainer, used by identifier resolution.
uint32_t* decl_names; // string indices
uint32_t* decl_nodes; // node indices
uint32_t decl_table_len;
uint32_t decl_table_cap;
// Shared dynamic array for GenZir instructions (AstGen.zig:11796).
// Sub-blocks share this array and track their slice via
// instructions_top.
uint32_t* scratch_instructions;
uint32_t scratch_inst_len;
uint32_t scratch_inst_cap;
// Scratch extra array for call arguments (mirrors AstGen.scratch in Zig).
// Used to collect body lengths + body instructions before copying to
// extra.
uint32_t* scratch_extra;
uint32_t scratch_extra_len;
uint32_t scratch_extra_cap;
// Return type ref for the current function (set during fnDecl/testDecl).
uint32_t fn_ret_ty; // ZirInstRef
// Pointer to the fn_block GenZir for the current function (AstGen.zig:45).
void* fn_block; // GenZir*
// ref_table: deferred REF instructions (AstGen.zig:58-68).
// Key = operand inst index, Value = ref inst index.
uint32_t* ref_table_keys;
uint32_t* ref_table_vals;
uint32_t ref_table_len;
uint32_t ref_table_cap;
// nodes_need_rl: set of AST node indices that need result locations.
// Populated by astRlAnnotate() pre-pass (AstRlAnnotate.zig).
uint32_t* nodes_need_rl;
uint32_t nodes_need_rl_len;
uint32_t nodes_need_rl_cap;
bool has_compile_errors;
} AstGenCtx;
static void setCompileError(AstGenCtx* ag, const char* where, int line) {
(void)where;
(void)line;
ag->has_compile_errors = true;
}
#define SET_ERROR(ag) setCompileError(ag, __func__, __LINE__)
// Set fn_block pointer on AstGenCtx. The caller is responsible for saving
// and restoring the previous value before the pointed-to GenZir goes out
// of scope (AstGen.zig:45).
static void setFnBlock(AstGenCtx* ag, void* block) { ag->fn_block = block; }
// --- ref_table operations (AstGen.zig:58-68) ---
// Simple linear-scan hash table for deferred REF instructions.
// Returns pointer to existing value if key found, NULL if not found.
static uint32_t* refTableGet(AstGenCtx* ag, uint32_t key) {
for (uint32_t i = 0; i < ag->ref_table_len; i++) {
if (ag->ref_table_keys[i] == key)
return &ag->ref_table_vals[i];
}
return NULL;
}
// getOrPut: returns pointer to value slot; sets *found to true if existed.
static uint32_t* refTableGetOrPut(AstGenCtx* ag, uint32_t key, bool* found) {
for (uint32_t i = 0; i < ag->ref_table_len; i++) {
if (ag->ref_table_keys[i] == key) {
*found = true;
return &ag->ref_table_vals[i];
}
}
*found = false;
if (ag->ref_table_len >= ag->ref_table_cap) {
uint32_t new_cap = ag->ref_table_cap == 0 ? 16 : ag->ref_table_cap * 2;
ag->ref_table_keys
= realloc(ag->ref_table_keys, new_cap * sizeof(uint32_t));
ag->ref_table_vals
= realloc(ag->ref_table_vals, new_cap * sizeof(uint32_t));
ag->ref_table_cap = new_cap;
}
uint32_t idx = ag->ref_table_len++;
ag->ref_table_keys[idx] = key;
return &ag->ref_table_vals[idx];
}
// fetchRemove: if key exists, remove it and return true with *val set.
static bool refTableFetchRemove(AstGenCtx* ag, uint32_t key, uint32_t* val) {
for (uint32_t i = 0; i < ag->ref_table_len; i++) {
if (ag->ref_table_keys[i] == key) {
*val = ag->ref_table_vals[i];
// Swap with last element.
ag->ref_table_len--;
if (i < ag->ref_table_len) {
ag->ref_table_keys[i] = ag->ref_table_keys[ag->ref_table_len];
ag->ref_table_vals[i] = ag->ref_table_vals[ag->ref_table_len];
}
return true;
}
}
return false;
}
// --- Result location (AstGen.zig:11808) ---
// Simplified version of ResultInfo.Loc.
// Defined here (before GenZir) because GenZir.break_result_info uses it.
// ResultInfo.Context (AstGen.zig:371-386).
typedef enum {
RI_CTX_NONE,
RI_CTX_RETURN,
RI_CTX_ERROR_HANDLING_EXPR,
RI_CTX_SHIFT_OP,
RI_CTX_FN_ARG,
RI_CTX_CONST_INIT,
RI_CTX_ASSIGNMENT,
} ResultCtx;
typedef enum {
RL_NONE, // Just compute the value.
RL_REF, // Compute a pointer to the value.
RL_DISCARD, // Compute but discard (emit ensure_result_non_error).
RL_TY, // Coerce to specific type.
RL_COERCED_TY, // Coerce to specific type, result is the coercion.
RL_PTR, // Store result to typed pointer. data=alloc inst, src_node=node.
RL_INFERRED_PTR, // Store result to inferred pointer. data=alloc inst.
RL_REF_COERCED_TY, // Ref with pointer type. data=ptr_ty_inst.
} ResultLocTag;
typedef struct {
ResultLocTag tag;
uint32_t data; // ZirInstRef: ty_inst for TY/COERCED_TY, alloc inst for
// PTR/INFERRED_PTR.
uint32_t src_node; // Only used for RL_PTR.
ResultCtx ctx; // ResultInfo.Context (AstGen.zig:371).
} ResultLoc;
#define RL_NONE_VAL \
((ResultLoc) { \
.tag = RL_NONE, .data = 0, .src_node = 0, .ctx = RI_CTX_NONE })
#define RL_REF_VAL \
((ResultLoc) { \
.tag = RL_REF, .data = 0, .src_node = 0, .ctx = RI_CTX_NONE })
#define RL_DISCARD_VAL \
((ResultLoc) { \
.tag = RL_DISCARD, .data = 0, .src_node = 0, .ctx = RI_CTX_NONE })
#define RL_IS_REF(rl) ((rl).tag == RL_REF || (rl).tag == RL_REF_COERCED_TY)
// --- Scope types (AstGen.zig:11621-11768) ---
typedef enum {
SCOPE_GEN_ZIR,
SCOPE_LOCAL_VAL,
SCOPE_LOCAL_PTR,
SCOPE_DEFER_NORMAL,
SCOPE_DEFER_ERROR,
SCOPE_NAMESPACE,
SCOPE_TOP,
SCOPE_LABEL,
} ScopeTag;
typedef struct Scope {
ScopeTag tag;
} Scope;
// --- GenZir scope (mirrors GenZir struct, AstGen.zig:11772) ---
//
// Sub-blocks share the parent AstGenCtx's scratch_instructions array and
// record their starting offset (instructions_top). This mirrors the upstream
// GenZir.instructions / instructions_top design (AstGen.zig:11796-11850).
typedef struct {
Scope base; // tag = SCOPE_GEN_ZIR
Scope* parent;
AstGenCtx* astgen;
uint32_t decl_node_index;
uint32_t decl_line;
bool is_comptime;
bool is_inline; // true for inline for/while, labeled blocks in comptime
bool c_import; // true inside @cImport block
uint32_t instructions_top; // start index in shared array
uint32_t break_block; // UINT32_MAX = none (AstGen.zig:11780)
uint32_t continue_block; // UINT32_MAX = none (AstGen.zig:11784)
// Label for labeled blocks (AstGen.zig:11800, 11869-11874).
uint32_t label_token; // UINT32_MAX = no label
uint32_t label_block_inst; // the BLOCK instruction index
ResultLoc break_result_info; // RL for break values
} GenZir;
// Scope.LocalVal (AstGen.zig:11682).
// This is always a `const` local and the `inst` is a value type, not a
// pointer.
typedef struct {
Scope base; // tag = SCOPE_LOCAL_VAL
Scope* parent;
GenZir* gen_zir;
uint32_t inst; // ZirInstRef
uint32_t token_src; // Ast.TokenIndex
uint32_t name; // NullTerminatedString (string table index)
} ScopeLocalVal;
// Scope.LocalPtr (AstGen.zig:11704).
// This could be a `const` or `var` local. It has a pointer instead of a value.
typedef struct {
Scope base; // tag = SCOPE_LOCAL_PTR
Scope* parent;
GenZir* gen_zir;
uint32_t ptr; // ZirInstRef
uint32_t token_src; // Ast.TokenIndex
uint32_t name; // NullTerminatedString (string table index)
bool maybe_comptime;
} ScopeLocalPtr;
// Scope.Defer (AstGen.zig:11741).
typedef struct {
Scope base; // tag = SCOPE_DEFER_NORMAL or SCOPE_DEFER_ERROR
Scope* parent;
uint32_t index;
uint32_t len;
} ScopeDefer;
// Scope.Label — for labeled blocks and loops.
typedef struct {
Scope base; // tag = SCOPE_LABEL
Scope* parent;
uint32_t label_name; // NullTerminatedString
uint32_t block_inst; // instruction index (not ref)
} ScopeLabel;
// --- GenZir instruction helpers (AstGen.zig:11830-11850) ---
// Returns the number of instructions in this scope.
static uint32_t gzInstructionsLen(const GenZir* gz) {
return gz->astgen->scratch_inst_len - gz->instructions_top;
}
// Returns pointer to start of this scope's instructions in the shared array.
static const uint32_t* gzInstructionsSlice(const GenZir* gz) {
return gz->astgen->scratch_instructions + gz->instructions_top;
}
// Mirrors GenZir.instructionsSliceUpto (AstGen.zig:11835).
// Returns instructions from gz up to (but not including) stacked_gz's start.
static uint32_t gzInstructionsLenUpto(
const GenZir* gz, const GenZir* stacked_gz) {
return stacked_gz->instructions_top - gz->instructions_top;
}
static const uint32_t* gzInstructionsSliceUpto(
const GenZir* gz, const GenZir* stacked_gz) {
(void)stacked_gz; // used only for length computation
return gz->astgen->scratch_instructions + gz->instructions_top;
}
// Mirrors GenZir.unstack (AstGen.zig:11822).
// Restores the shared array length to this scope's start.
static void gzUnstack(GenZir* gz) {
gz->astgen->scratch_inst_len = gz->instructions_top;
}
// Append an instruction index to this scope's portion of the shared array.
static void gzAppendInstruction(GenZir* gz, uint32_t inst_idx) {
AstGenCtx* ag = gz->astgen;
if (ag->scratch_inst_len >= ag->scratch_inst_cap) {
uint32_t new_cap
= ag->scratch_inst_cap > 0 ? ag->scratch_inst_cap * 2 : 64;
uint32_t* p
= realloc(ag->scratch_instructions, new_cap * sizeof(uint32_t));
if (!p)
exit(1);
ag->scratch_instructions = p;
ag->scratch_inst_cap = new_cap;
}
ag->scratch_instructions[ag->scratch_inst_len++] = inst_idx;
}
// Mirrors GenZir.makeSubBlock (AstGen.zig:11852).
static GenZir makeSubBlock(GenZir* parent, Scope* scope) {
GenZir sub;
memset(&sub, 0, sizeof(sub));
sub.base.tag = SCOPE_GEN_ZIR;
sub.parent = scope;
sub.astgen = parent->astgen;
sub.decl_node_index = parent->decl_node_index;
sub.decl_line = parent->decl_line;
sub.is_comptime = parent->is_comptime;
sub.c_import = parent->c_import;
sub.instructions_top = parent->astgen->scratch_inst_len;
sub.break_block = UINT32_MAX;
sub.continue_block = UINT32_MAX;
sub.label_token = UINT32_MAX;
return sub;
}
// --- Capacity helpers ---
static void ensureExtraCapacity(AstGenCtx* ag, uint32_t additional) {
uint32_t needed = ag->extra_len + additional;
if (needed > ag->extra_cap) {
uint32_t new_cap = ag->extra_cap * 2;
if (new_cap < needed)
new_cap = needed;
uint32_t* p = realloc(ag->extra, new_cap * sizeof(uint32_t));
if (!p)
exit(1);
ag->extra = p;
ag->extra_cap = new_cap;
}
}
static void ensureInstCapacity(AstGenCtx* ag, uint32_t additional) {
uint32_t needed = ag->inst_len + additional;
if (needed > ag->inst_cap) {
uint32_t new_cap = ag->inst_cap * 2;
if (new_cap < needed)
new_cap = needed;
ZirInstTag* t = realloc(ag->inst_tags, new_cap * sizeof(ZirInstTag));
ZirInstData* d
= realloc(ag->inst_datas, new_cap * sizeof(ZirInstData));
if (!t || !d)
exit(1);
ag->inst_tags = t;
ag->inst_datas = d;
ag->inst_cap = new_cap;
}
}
static void ensureStringBytesCapacity(AstGenCtx* ag, uint32_t additional) {
uint32_t needed = ag->string_bytes_len + additional;
if (needed > ag->string_bytes_cap) {
uint32_t new_cap = ag->string_bytes_cap * 2;
if (new_cap < needed)
new_cap = needed;
uint8_t* p = realloc(ag->string_bytes, new_cap * sizeof(uint8_t));
if (!p)
exit(1);
ag->string_bytes = p;
ag->string_bytes_cap = new_cap;
}
}
// --- Extra data helpers ---
static uint32_t addExtraU32(AstGenCtx* ag, uint32_t value) {
ensureExtraCapacity(ag, 1);
uint32_t idx = ag->extra_len;
ag->extra[ag->extra_len++] = value;
return idx;
}
// --- Instruction helpers ---
// Mirrors AstGen.reserveInstructionIndex (AstGen.zig:12902).
static uint32_t reserveInstructionIndex(AstGenCtx* ag) {
ensureInstCapacity(ag, 1);
uint32_t idx = ag->inst_len;
memset(&ag->inst_datas[idx], 0, sizeof(ZirInstData));
ag->inst_tags[idx] = (ZirInstTag)0;
ag->inst_len++;
return idx;
}
// Forward declarations.
static int32_t tokenIndexToRelative(const GenZir* gz, uint32_t token);
static uint32_t firstToken(const Ast* tree, uint32_t node);
static bool nodesNeedRlContains(const AstGenCtx* ag, uint32_t node);
// Mirrors GenZir.makeUnTok (AstGen.zig:12520).
// Allocates an instruction but does NOT add to GenZir body.
// Returns the raw instruction INDEX (not a Ref).
static uint32_t makeUnTok(
GenZir* gz, ZirInstTag tag, uint32_t operand, uint32_t abs_tok_index) {
AstGenCtx* ag = gz->astgen;
ensureInstCapacity(ag, 1);
uint32_t idx = ag->inst_len;
ZirInstData data;
data.un_tok.src_tok = tokenIndexToRelative(gz, abs_tok_index);
data.un_tok.operand = operand;
ag->inst_tags[idx] = tag;
ag->inst_datas[idx] = data;
ag->inst_len++;
return idx; // Raw index, NOT a Ref.
}
// Mirrors GenZir.add (AstGen.zig:13162).
// Appends an instruction and records it in the GenZir body.
// Returns the instruction index as a Ref (index + ZIR_INST_REF_START_INDEX).
static uint32_t addInstruction(GenZir* gz, ZirInstTag tag, ZirInstData data) {
AstGenCtx* ag = gz->astgen;
ensureInstCapacity(ag, 1);
uint32_t idx = ag->inst_len;
ag->inst_tags[idx] = tag;
ag->inst_datas[idx] = data;
ag->inst_len++;
// Record in sub-block body.
gzAppendInstruction(gz, idx);
return idx + ZIR_REF_START_INDEX; // toRef()
}
// Mirrors GenZir.addInt (AstGen.zig:12238).
static uint32_t addInt(GenZir* gz, uint64_t integer) {
ZirInstData data;
data.int_val = integer;
return addInstruction(gz, ZIR_INST_INT, data);
}
// Mirrors GenZir.add for bin data (Zir.zig:1877).
// Creates an instruction with bin data (lhs + rhs stored in inst_datas).
static uint32_t addBin(
GenZir* gz, ZirInstTag tag, uint32_t lhs, uint32_t rhs) {
ZirInstData data;
data.bin.lhs = lhs;
data.bin.rhs = rhs;
return addInstruction(gz, tag, data);
}
// Mirrors GenZir.addPlNode (AstGen.zig:12308).
// Creates an instruction with pl_node data and 2-word payload.
static uint32_t addPlNodeBin(
GenZir* gz, ZirInstTag tag, uint32_t node, uint32_t lhs, uint32_t rhs) {
AstGenCtx* ag = gz->astgen;
ensureExtraCapacity(ag, 2);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = lhs;
ag->extra[ag->extra_len++] = rhs;
ZirInstData data;
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
data.pl_node.payload_index = payload_index;
return addInstruction(gz, tag, data);
}
// Mirrors addPlNode for 3-operand payloads (e.g. ArrayTypeSentinel).
static uint32_t addPlNodeTriple(GenZir* gz, ZirInstTag tag, uint32_t node,
uint32_t a, uint32_t b, uint32_t c) {
AstGenCtx* ag = gz->astgen;
ensureExtraCapacity(ag, 3);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = a;
ag->extra[ag->extra_len++] = b;
ag->extra[ag->extra_len++] = c;
ZirInstData data;
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
data.pl_node.payload_index = payload_index;
return addInstruction(gz, tag, data);
}
// Checks if an AST identifier node is the single underscore `_`.
// Used for inferred array length detection in [_]T patterns.
// Intentionally does NOT support @"_" syntax (matches upstream).
static bool isUnderscoreIdent(const Ast* tree, uint32_t ident_node) {
uint32_t id_tok = tree->nodes.main_tokens[ident_node];
uint32_t id_start = tree->tokens.starts[id_tok];
if (tree->source[id_start] != '_')
return false;
if (id_start + 1 >= tree->source_len)
return true;
char next = tree->source[id_start + 1];
return !((next >= 'a' && next <= 'z') || (next >= 'A' && next <= 'Z')
|| next == '_' || (next >= '0' && next <= '9'));
}
// Mirrors GenZir.addUnNode (AstGen.zig:12406).
static uint32_t addUnNode(
GenZir* gz, ZirInstTag tag, uint32_t operand, uint32_t node) {
ZirInstData data;
data.un_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
data.un_node.operand = operand;
return addInstruction(gz, tag, data);
}
// Mirrors GenZir.addUnTok (AstGen.zig:12497).
static uint32_t addUnTok(
GenZir* gz, ZirInstTag tag, uint32_t operand, uint32_t abs_tok_index) {
ZirInstData data;
data.un_tok.src_tok = tokenIndexToRelative(gz, abs_tok_index);
data.un_tok.operand = operand;
return addInstruction(gz, tag, data);
}
// Mirrors GenZir.addStrTok (AstGen.zig:12349).
static uint32_t addStrTok(
GenZir* gz, ZirInstTag tag, uint32_t str_index, uint32_t token) {
ZirInstData data;
data.str_tok.start = str_index;
data.str_tok.src_tok = tokenIndexToRelative(gz, token);
return addInstruction(gz, tag, data);
}
// Mirrors GenZir.addPlNodePayloadIndex (AstGen.zig:12332).
static uint32_t addPlNodePayloadIndex(
GenZir* gz, ZirInstTag tag, uint32_t node, uint32_t payload_index) {
ZirInstData data;
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
data.pl_node.payload_index = payload_index;
return addInstruction(gz, tag, data);
}
// --- Source cursor (AstGen.zig:13335-13359) ---
// Mirrors AstGen.advanceSourceCursor (AstGen.zig:13342).
static void advanceSourceCursor(AstGenCtx* ag, uint32_t end) {
const char* source = ag->tree->source;
uint32_t i = ag->source_offset;
uint32_t line = ag->source_line;
uint32_t column = ag->source_column;
assert(i <= end);
while (i < end) {
if (source[i] == '\n') {
line++;
column = 0;
} else {
column++;
}
i++;
}
ag->source_offset = i;
ag->source_line = line;
ag->source_column = column;
}
// Mirrors tree.firstToken (Ast.zig:596).
// Recurse through nodes to find the first token.
static uint32_t firstToken(const Ast* tree, uint32_t node) {
uint32_t n = node;
while (1) {
AstNodeTag tag = tree->nodes.tags[n];
switch (tag) {
case AST_NODE_ROOT:
return 0;
// Return main_token directly (Ast.zig:602-643).
case AST_NODE_TEST_DECL:
case AST_NODE_ERRDEFER:
case AST_NODE_DEFER:
case AST_NODE_BOOL_NOT:
case AST_NODE_NEGATION:
case AST_NODE_BIT_NOT:
case AST_NODE_NEGATION_WRAP:
case AST_NODE_ADDRESS_OF:
case AST_NODE_TRY:
case AST_NODE_AWAIT:
case AST_NODE_OPTIONAL_TYPE:
case AST_NODE_SWITCH:
case AST_NODE_SWITCH_COMMA:
case AST_NODE_IF_SIMPLE:
case AST_NODE_IF:
case AST_NODE_SUSPEND:
case AST_NODE_RESUME:
case AST_NODE_CONTINUE:
case AST_NODE_BREAK:
case AST_NODE_RETURN:
case AST_NODE_ANYFRAME_TYPE:
case AST_NODE_IDENTIFIER:
case AST_NODE_ANYFRAME_LITERAL:
case AST_NODE_CHAR_LITERAL:
case AST_NODE_NUMBER_LITERAL:
case AST_NODE_UNREACHABLE_LITERAL:
case AST_NODE_STRING_LITERAL:
case AST_NODE_MULTILINE_STRING_LITERAL:
case AST_NODE_GROUPED_EXPRESSION:
case AST_NODE_BUILTIN_CALL_TWO:
case AST_NODE_BUILTIN_CALL_TWO_COMMA:
case AST_NODE_BUILTIN_CALL:
case AST_NODE_BUILTIN_CALL_COMMA:
case AST_NODE_ERROR_SET_DECL:
case AST_NODE_COMPTIME:
case AST_NODE_NOSUSPEND:
case AST_NODE_ASM_SIMPLE:
case AST_NODE_ASM:
case AST_NODE_ARRAY_TYPE:
case AST_NODE_ARRAY_TYPE_SENTINEL:
case AST_NODE_ERROR_VALUE:
case AST_NODE_PTR_TYPE_ALIGNED:
case AST_NODE_PTR_TYPE_SENTINEL:
case AST_NODE_PTR_TYPE:
case AST_NODE_PTR_TYPE_BIT_RANGE:
return tree->nodes.main_tokens[n];
// Return main_token - 1: dot-prefixed inits and enum_literal
// (Ast.zig:645-654).
case AST_NODE_ARRAY_INIT_DOT:
case AST_NODE_ARRAY_INIT_DOT_COMMA:
case AST_NODE_ARRAY_INIT_DOT_TWO:
case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA:
case AST_NODE_STRUCT_INIT_DOT:
case AST_NODE_STRUCT_INIT_DOT_COMMA:
case AST_NODE_STRUCT_INIT_DOT_TWO:
case AST_NODE_STRUCT_INIT_DOT_TWO_COMMA:
case AST_NODE_ENUM_LITERAL:
return tree->nodes.main_tokens[n] - 1;
// Recurse into LHS: all binary ops and compound expressions
// (Ast.zig:656-733).
case AST_NODE_CATCH:
case AST_NODE_EQUAL_EQUAL:
case AST_NODE_BANG_EQUAL:
case AST_NODE_LESS_THAN:
case AST_NODE_GREATER_THAN:
case AST_NODE_LESS_OR_EQUAL:
case AST_NODE_GREATER_OR_EQUAL:
case AST_NODE_ASSIGN_MUL:
case AST_NODE_ASSIGN_DIV:
case AST_NODE_ASSIGN_MOD:
case AST_NODE_ASSIGN_ADD:
case AST_NODE_ASSIGN_SUB:
case AST_NODE_ASSIGN_SHL:
case AST_NODE_ASSIGN_SHL_SAT:
case AST_NODE_ASSIGN_SHR:
case AST_NODE_ASSIGN_BIT_AND:
case AST_NODE_ASSIGN_BIT_XOR:
case AST_NODE_ASSIGN_BIT_OR:
case AST_NODE_ASSIGN_MUL_WRAP:
case AST_NODE_ASSIGN_ADD_WRAP:
case AST_NODE_ASSIGN_SUB_WRAP:
case AST_NODE_ASSIGN_MUL_SAT:
case AST_NODE_ASSIGN_ADD_SAT:
case AST_NODE_ASSIGN_SUB_SAT:
case AST_NODE_ASSIGN:
case AST_NODE_MERGE_ERROR_SETS:
case AST_NODE_MUL:
case AST_NODE_DIV:
case AST_NODE_MOD:
case AST_NODE_ARRAY_MULT:
case AST_NODE_MUL_WRAP:
case AST_NODE_MUL_SAT:
case AST_NODE_ADD:
case AST_NODE_SUB:
case AST_NODE_ARRAY_CAT:
case AST_NODE_ADD_WRAP:
case AST_NODE_SUB_WRAP:
case AST_NODE_ADD_SAT:
case AST_NODE_SUB_SAT:
case AST_NODE_SHL:
case AST_NODE_SHL_SAT:
case AST_NODE_SHR:
case AST_NODE_BIT_AND:
case AST_NODE_BIT_XOR:
case AST_NODE_BIT_OR:
case AST_NODE_ORELSE:
case AST_NODE_BOOL_AND:
case AST_NODE_BOOL_OR:
case AST_NODE_SLICE_OPEN:
case AST_NODE_ARRAY_ACCESS:
case AST_NODE_ARRAY_INIT_ONE:
case AST_NODE_ARRAY_INIT_ONE_COMMA:
case AST_NODE_SWITCH_RANGE:
case AST_NODE_ERROR_UNION:
case AST_NODE_FOR_RANGE:
case AST_NODE_CALL_ONE:
case AST_NODE_CALL_ONE_COMMA:
case AST_NODE_STRUCT_INIT_ONE:
case AST_NODE_STRUCT_INIT_ONE_COMMA:
case AST_NODE_CALL:
case AST_NODE_CALL_COMMA:
case AST_NODE_STRUCT_INIT:
case AST_NODE_STRUCT_INIT_COMMA:
case AST_NODE_SLICE:
case AST_NODE_SLICE_SENTINEL:
case AST_NODE_ARRAY_INIT:
case AST_NODE_ARRAY_INIT_COMMA:
case AST_NODE_FIELD_ACCESS:
case AST_NODE_UNWRAP_OPTIONAL:
case AST_NODE_DEREF:
case AST_NODE_ASYNC_CALL_ONE:
case AST_NODE_ASYNC_CALL_ONE_COMMA:
case AST_NODE_ASYNC_CALL:
case AST_NODE_ASYNC_CALL_COMMA:
n = tree->nodes.datas[n].lhs;
continue;
// Var decls: scan backwards for modifiers (Ast.zig:771-792).
case AST_NODE_GLOBAL_VAR_DECL:
case AST_NODE_LOCAL_VAR_DECL:
case AST_NODE_SIMPLE_VAR_DECL:
case AST_NODE_ALIGNED_VAR_DECL: {
uint32_t mt = tree->nodes.main_tokens[n];
uint32_t i = mt;
while (i > 0) {
TokenizerTag tt = tree->tokens.tags[i - 1];
if (tt == TOKEN_KEYWORD_EXTERN || tt == TOKEN_KEYWORD_EXPORT
|| tt == TOKEN_KEYWORD_PUB
|| tt == TOKEN_KEYWORD_THREADLOCAL
|| tt == TOKEN_KEYWORD_COMPTIME
|| tt == TOKEN_STRING_LITERAL) {
i--;
} else {
break;
}
}
return i;
}
// Fn decls: scan backwards for modifiers (Ast.zig:737-759).
case AST_NODE_FN_DECL:
case AST_NODE_FN_PROTO_SIMPLE:
case AST_NODE_FN_PROTO_MULTI:
case AST_NODE_FN_PROTO_ONE:
case AST_NODE_FN_PROTO: {
uint32_t mt = tree->nodes.main_tokens[n];
uint32_t i = mt;
while (i > 0) {
TokenizerTag tt = tree->tokens.tags[i - 1];
if (tt == TOKEN_KEYWORD_EXTERN || tt == TOKEN_KEYWORD_EXPORT
|| tt == TOKEN_KEYWORD_PUB || tt == TOKEN_KEYWORD_INLINE
|| tt == TOKEN_KEYWORD_NOINLINE
|| tt == TOKEN_STRING_LITERAL) {
i--;
} else {
break;
}
}
return i;
}
// Container fields: check for preceding comptime (Ast.zig:761-769).
case AST_NODE_CONTAINER_FIELD_INIT:
case AST_NODE_CONTAINER_FIELD_ALIGN:
case AST_NODE_CONTAINER_FIELD: {
uint32_t mt = tree->nodes.main_tokens[n];
if (mt > 0 && tree->tokens.tags[mt - 1] == TOKEN_KEYWORD_COMPTIME)
return mt - 1;
return mt;
}
// Blocks: check for label (Ast.zig:794-805).
case AST_NODE_BLOCK:
case AST_NODE_BLOCK_SEMICOLON:
case AST_NODE_BLOCK_TWO:
case AST_NODE_BLOCK_TWO_SEMICOLON: {
uint32_t lbrace = tree->nodes.main_tokens[n];
if (lbrace >= 2 && tree->tokens.tags[lbrace - 1] == TOKEN_COLON
&& tree->tokens.tags[lbrace - 2] == TOKEN_IDENTIFIER)
return lbrace - 2;
return lbrace;
}
// Fallback for any remaining node types.
default:
return tree->nodes.main_tokens[n];
}
}
}
// Mirrors AstGen.advanceSourceCursorToNode (AstGen.zig:13335).
static void advanceSourceCursorToNode(AstGenCtx* ag, uint32_t node) {
uint32_t ft = firstToken(ag->tree, node);
uint32_t token_start = ag->tree->tokens.starts[ft];
(void)0; // cursor backward check disabled temporarily
advanceSourceCursor(ag, token_start);
}
// Mirrors maybeAdvanceSourceCursorToMainToken (AstGen.zig:13324).
// Skips advancing when in comptime scope (matching upstream behavior).
static void advanceSourceCursorToMainToken(
AstGenCtx* ag, const GenZir* gz, uint32_t node) {
if (gz->is_comptime)
return;
uint32_t main_tok = ag->tree->nodes.main_tokens[node];
uint32_t token_start = ag->tree->tokens.starts[main_tok];
advanceSourceCursor(ag, token_start);
}
// --- Token helpers ---
// Mirrors GenZir.tokenIndexToRelative (AstGen.zig:11897).
// Returns destination - base as i32.
static int32_t tokenIndexToRelative(const GenZir* gz, uint32_t token) {
uint32_t base = firstToken(gz->astgen->tree, gz->decl_node_index);
return (int32_t)token - (int32_t)base;
}
// --- String bytes helpers ---
// Search for an existing null-terminated string in string_bytes.
// Returns the index if found, or UINT32_MAX if not found.
// Mirrors string_table dedup (AstGen.zig:11564).
// Find a string in string_table (registered strings only).
// Mirrors AstGen.string_table hash table lookup.
static uint32_t findExistingString(
const AstGenCtx* ag, const char* str, uint32_t len) {
for (uint32_t k = 0; k < ag->string_table_len; k++) {
uint32_t pos = ag->string_table[k];
// Compare: string at pos is null-terminated in string_bytes.
const char* existing = (const char*)ag->string_bytes + pos;
uint32_t existing_len = (uint32_t)strlen(existing);
if (existing_len == len && memcmp(existing, str, len) == 0) {
return pos;
}
}
return UINT32_MAX;
}
// Register a string position in the string table for deduplication.
static void registerString(AstGenCtx* ag, uint32_t pos) {
if (ag->string_table_len >= ag->string_table_cap) {
uint32_t new_cap = ag->string_table_cap * 2;
if (new_cap < 64)
new_cap = 64;
uint32_t* p = realloc(ag->string_table, new_cap * sizeof(uint32_t));
if (!p)
exit(1);
ag->string_table = p;
ag->string_table_cap = new_cap;
}
ag->string_table[ag->string_table_len++] = pos;
}
// Mirrors AstGen.tokenIdentEql (AstGen.zig:6148-6152).
// Compares two identifier tokens by source text without touching string_bytes.
static bool tokenIdentEql(const Ast* tree, uint32_t tok1, uint32_t tok2) {
uint32_t s1 = tree->tokens.starts[tok1];
uint32_t s2 = tree->tokens.starts[tok2];
uint32_t e1 = tree->tokens.starts[tok1 + 1];
uint32_t e2 = tree->tokens.starts[tok2 + 1];
// Token length includes trailing whitespace in starts delta, but for
// identifiers the actual content is a contiguous alphanumeric/underscore
// run. Compute actual identifier lengths.
uint32_t len1 = 0;
while (s1 + len1 < e1) {
char c = tree->source[s1 + len1];
if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')
|| (c >= '0' && c <= '9') || c == '_'))
break;
len1++;
}
uint32_t len2 = 0;
while (s2 + len2 < e2) {
char c = tree->source[s2 + len2];
if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')
|| (c >= '0' && c <= '9') || c == '_'))
break;
len2++;
}
return len1 == len2
&& memcmp(tree->source + s1, tree->source + s2, len1) == 0;
}
// Forward declaration for strLitAsString (used by identAsString for @"..."
// quoted identifiers with escapes).
static void strLitAsString(AstGenCtx* ag, uint32_t str_lit_token,
uint32_t* out_index, uint32_t* out_len);
// Mirrors AstGen.identAsString (AstGen.zig:11530).
// Handles both bare identifiers and @"..." quoted identifiers.
static uint32_t identAsString(AstGenCtx* ag, uint32_t ident_token) {
uint32_t start = ag->tree->tokens.starts[ident_token];
const char* source = ag->tree->source;
if (source[start] == '@' && start + 1 < ag->tree->source_len
&& source[start + 1] == '"') {
// Quoted identifier: @"name" (AstGen.zig:11297-11308).
// Extract content between quotes, handling escapes.
uint32_t si, sl;
// str_lit_token refers to the same token, content starts after @"
// We reuse strLitAsString but offset by 1 to skip '@'.
// Actually, strLitAsString expects a token whose source starts
// with '"'. The @"..." token starts with '@'. We need to handle
// the offset manually.
uint32_t content_start = start + 2; // skip @"
uint32_t content_end = content_start;
while (
content_end < ag->tree->source_len && source[content_end] != '"')
content_end++;
// Check for escapes.
bool has_escapes = false;
for (uint32_t j = content_start; j < content_end; j++) {
if (source[j] == '\\') {
has_escapes = true;
break;
}
}
if (!has_escapes) {
uint32_t content_len = content_end - content_start;
uint32_t existing
= findExistingString(ag, source + content_start, content_len);
if (existing != UINT32_MAX)
return existing;
uint32_t str_index = ag->string_bytes_len;
ensureStringBytesCapacity(ag, content_len + 1);
memcpy(ag->string_bytes + ag->string_bytes_len,
source + content_start, content_len);
ag->string_bytes_len += content_len;
ag->string_bytes[ag->string_bytes_len++] = 0;
registerString(ag, str_index);
return str_index;
}
// With escapes: use strLitAsString-like decoding.
strLitAsString(ag, ident_token, &si, &sl);
return si;
}
// Bare identifier: scan alphanumeric + underscore.
uint32_t end = start;
while (end < ag->tree->source_len) {
char ch = source[end];
if ((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z')
|| (ch >= '0' && ch <= '9') || ch == '_') {
end++;
} else {
break;
}
}
uint32_t ident_len = end - start;
// Check for existing string (dedup).
uint32_t existing = findExistingString(ag, source + start, ident_len);
if (existing != UINT32_MAX)
return existing;
uint32_t str_index = ag->string_bytes_len;
ensureStringBytesCapacity(ag, ident_len + 1);
memcpy(ag->string_bytes + ag->string_bytes_len, source + start, ident_len);
ag->string_bytes_len += ident_len;
ag->string_bytes[ag->string_bytes_len++] = 0;
registerString(ag, str_index);
return str_index;
}
// Mirrors AstGen.strLitAsString (AstGen.zig:11553).
// Decodes string literal, checks for embedded nulls.
// If embedded null found: store raw bytes without trailing null, no dedup.
// Otherwise: dedup via string_table, add trailing null.
static void strLitAsString(AstGenCtx* ag, uint32_t str_lit_token,
uint32_t* out_index, uint32_t* out_len) {
uint32_t tok_start = ag->tree->tokens.starts[str_lit_token];
const char* source = ag->tree->source;
// Skip opening quote.
uint32_t i = tok_start + 1;
// Find closing quote, skipping escaped characters.
uint32_t raw_end = i;
while (raw_end < ag->tree->source_len) {
if (source[raw_end] == '\\') {
raw_end += 2; // skip escape + escaped char
} else if (source[raw_end] == '"') {
break;
} else {
raw_end++;
}
}
// Check if there are any escape sequences.
bool has_escapes = false;
for (uint32_t j = i; j < raw_end; j++) {
if (source[j] == '\\') {
has_escapes = true;
break;
}
}
if (!has_escapes) {
// Fast path: no escapes, no embedded nulls possible.
uint32_t content_len = raw_end - i;
uint32_t existing = findExistingString(ag, source + i, content_len);
if (existing != UINT32_MAX) {
*out_index = existing;
*out_len = content_len;
return;
}
uint32_t str_index = ag->string_bytes_len;
ensureStringBytesCapacity(ag, content_len + 1);
memcpy(
ag->string_bytes + ag->string_bytes_len, source + i, content_len);
ag->string_bytes_len += content_len;
ag->string_bytes[ag->string_bytes_len++] = 0;
registerString(ag, str_index);
*out_index = str_index;
*out_len = content_len;
return;
}
// Slow path: process escape sequences (AstGen.zig:11558).
// Decode directly into string_bytes (like upstream).
uint32_t str_index = ag->string_bytes_len;
uint32_t max_len = raw_end - i;
ensureStringBytesCapacity(ag, max_len + 1);
while (i < raw_end) {
if (source[i] == '\\') {
i++;
if (i >= raw_end)
break;
switch (source[i]) {
case 'n':
ag->string_bytes[ag->string_bytes_len++] = '\n';
break;
case 'r':
ag->string_bytes[ag->string_bytes_len++] = '\r';
break;
case 't':
ag->string_bytes[ag->string_bytes_len++] = '\t';
break;
case '\\':
ag->string_bytes[ag->string_bytes_len++] = '\\';
break;
case '\'':
ag->string_bytes[ag->string_bytes_len++] = '\'';
break;
case '"':
ag->string_bytes[ag->string_bytes_len++] = '"';
break;
case 'x': {
// \xNN hex escape.
uint8_t val = 0;
for (int k = 0; k < 2 && i + 1 < raw_end; k++) {
i++;
char c = source[i];
if (c >= '0' && c <= '9')
val = (uint8_t)(val * 16 + (uint8_t)(c - '0'));
else if (c >= 'a' && c <= 'f')
val = (uint8_t)(val * 16 + 10 + (uint8_t)(c - 'a'));
else if (c >= 'A' && c <= 'F')
val = (uint8_t)(val * 16 + 10 + (uint8_t)(c - 'A'));
}
ag->string_bytes[ag->string_bytes_len++] = val;
break;
}
case 'u': {
// \u{NNNNNN} unicode escape (string_literal.zig:194-231).
// Skip past '{'.
i++;
// Parse hex digits until '}'.
uint32_t codepoint = 0;
while (i + 1 < raw_end) {
i++;
char c = source[i];
if (c >= '0' && c <= '9') {
codepoint = codepoint * 16 + (uint32_t)(c - '0');
} else if (c >= 'a' && c <= 'f') {
codepoint = codepoint * 16 + 10 + (uint32_t)(c - 'a');
} else if (c >= 'A' && c <= 'F') {
codepoint = codepoint * 16 + 10 + (uint32_t)(c - 'A');
} else {
// Must be '}', done.
break;
}
}
// Encode codepoint as UTF-8 (unicode.zig:53-82).
if (codepoint <= 0x7F) {
ag->string_bytes[ag->string_bytes_len++]
= (uint8_t)codepoint;
} else if (codepoint <= 0x7FF) {
ag->string_bytes[ag->string_bytes_len++]
= (uint8_t)(0xC0 | (codepoint >> 6));
ag->string_bytes[ag->string_bytes_len++]
= (uint8_t)(0x80 | (codepoint & 0x3F));
} else if (codepoint <= 0xFFFF) {
ag->string_bytes[ag->string_bytes_len++]
= (uint8_t)(0xE0 | (codepoint >> 12));
ag->string_bytes[ag->string_bytes_len++]
= (uint8_t)(0x80 | ((codepoint >> 6) & 0x3F));
ag->string_bytes[ag->string_bytes_len++]
= (uint8_t)(0x80 | (codepoint & 0x3F));
} else {
ag->string_bytes[ag->string_bytes_len++]
= (uint8_t)(0xF0 | (codepoint >> 18));
ag->string_bytes[ag->string_bytes_len++]
= (uint8_t)(0x80 | ((codepoint >> 12) & 0x3F));
ag->string_bytes[ag->string_bytes_len++]
= (uint8_t)(0x80 | ((codepoint >> 6) & 0x3F));
ag->string_bytes[ag->string_bytes_len++]
= (uint8_t)(0x80 | (codepoint & 0x3F));
}
break;
}
default:
ag->string_bytes[ag->string_bytes_len++] = (uint8_t)source[i];
break;
}
} else {
ag->string_bytes[ag->string_bytes_len++] = (uint8_t)source[i];
}
i++;
}
uint32_t decoded_len = ag->string_bytes_len - str_index;
uint8_t* key = ag->string_bytes + str_index;
// Check for embedded null bytes (AstGen.zig:11560).
// If found, skip dedup and don't add trailing null.
bool has_embedded_null = false;
for (uint32_t j = 0; j < decoded_len; j++) {
if (key[j] == 0) {
has_embedded_null = true;
break;
}
}
if (has_embedded_null) {
*out_index = str_index;
*out_len = decoded_len;
return;
}
// Dedup against string_table (AstGen.zig:11564-11585).
uint32_t existing = findExistingString(ag, (const char*)key, decoded_len);
if (existing != UINT32_MAX) {
// Shrink back (AstGen.zig:11570).
ag->string_bytes_len = str_index;
*out_index = existing;
*out_len = decoded_len;
return;
}
// New entry: add trailing null and register.
ensureStringBytesCapacity(ag, 1);
ag->string_bytes[ag->string_bytes_len++] = 0;
registerString(ag, str_index);
*out_index = str_index;
*out_len = decoded_len;
}
// --- Declaration helpers ---
// Mirrors GenZir.makeDeclaration (AstGen.zig:12906).
static uint32_t makeDeclaration(AstGenCtx* ag, uint32_t node) {
ensureInstCapacity(ag, 1);
uint32_t idx = ag->inst_len;
ag->inst_tags[idx] = ZIR_INST_DECLARATION;
ZirInstData data;
memset(&data, 0, sizeof(data));
data.declaration.src_node = node;
// payload_index is set later by setDeclaration.
ag->inst_datas[idx] = data;
ag->inst_len++;
return idx;
}
// Mirrors GenZir.makeBreakCommon (AstGen.zig:12667).
// Creates a break_inline instruction with a Break payload in extra.
// Records the instruction in the GenZir body.
static uint32_t makeBreakInline(GenZir* gz, uint32_t block_inst,
uint32_t operand, int32_t operand_src_node) {
AstGenCtx* ag = gz->astgen;
ensureInstCapacity(ag, 1);
ensureExtraCapacity(ag, 2);
// Write Zir.Inst.Break payload to extra (Zir.zig:2489).
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = (uint32_t)operand_src_node;
ag->extra[ag->extra_len++] = block_inst;
uint32_t idx = ag->inst_len;
ag->inst_tags[idx] = ZIR_INST_BREAK_INLINE;
ZirInstData data;
data.break_data.operand = operand;
data.break_data.payload_index = payload_index;
ag->inst_datas[idx] = data;
ag->inst_len++;
// Record in sub-block body.
gzAppendInstruction(gz, idx);
return idx;
}
// Mirrors GenZir.makeBlockInst (AstGen.zig:12890).
// Creates a pl_node instruction with payload_index left as 0 (set later).
// Does NOT append to gz's instruction list.
// Returns instruction index (not a ref).
static uint32_t makeBlockInst(
AstGenCtx* ag, ZirInstTag tag, const GenZir* gz, uint32_t node) {
ensureInstCapacity(ag, 1);
uint32_t idx = ag->inst_len;
ag->inst_tags[idx] = tag;
ZirInstData data;
memset(&data, 0, sizeof(data));
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
data.pl_node.payload_index = 0; // set later
ag->inst_datas[idx] = data;
ag->inst_len++;
return idx;
}
// Mirrors appendPossiblyRefdBodyInst (AstGen.zig:13675-13683).
// Appends body_inst first, then recursively appends ref_table entry.
static void appendPossiblyRefdBodyInst(AstGenCtx* ag, uint32_t body_inst) {
ag->extra[ag->extra_len++] = body_inst;
uint32_t ref_inst;
if (refTableFetchRemove(ag, body_inst, &ref_inst)) {
appendPossiblyRefdBodyInst(ag, ref_inst);
}
}
// Mirrors appendBodyWithFixupsExtraRefsArrayList (AstGen.zig:13659-13673).
// First processes extra_refs (e.g. param_insts), prepending their ref_table
// entries. Then writes body instructions with ref_table fixups.
static void appendBodyWithFixupsExtraRefs(AstGenCtx* ag, const uint32_t* body,
uint32_t body_len, const uint32_t* extra_refs, uint32_t extra_refs_len) {
for (uint32_t i = 0; i < extra_refs_len; i++) {
uint32_t ref_inst;
if (refTableFetchRemove(ag, extra_refs[i], &ref_inst)) {
appendPossiblyRefdBodyInst(ag, ref_inst);
}
}
for (uint32_t i = 0; i < body_len; i++) {
appendPossiblyRefdBodyInst(ag, body[i]);
}
}
// Scratch extra capacity helper (for call arg bodies).
static void ensureScratchExtraCapacity(AstGenCtx* ag, uint32_t additional) {
uint32_t needed = ag->scratch_extra_len + additional;
if (needed > ag->scratch_extra_cap) {
uint32_t new_cap = ag->scratch_extra_cap * 2;
if (new_cap < needed)
new_cap = needed;
if (new_cap < 64)
new_cap = 64;
uint32_t* p = realloc(ag->scratch_extra, new_cap * sizeof(uint32_t));
if (!p)
exit(1);
ag->scratch_extra = p;
ag->scratch_extra_cap = new_cap;
}
}
// Like appendPossiblyRefdBodyInst but appends to scratch_extra instead of
// extra.
static void appendPossiblyRefdBodyInstScratch(
AstGenCtx* ag, uint32_t body_inst) {
ag->scratch_extra[ag->scratch_extra_len++] = body_inst;
uint32_t ref_inst;
if (refTableFetchRemove(ag, body_inst, &ref_inst)) {
ensureScratchExtraCapacity(ag, 1);
appendPossiblyRefdBodyInstScratch(ag, ref_inst);
}
}
// Mirrors countBodyLenAfterFixupsExtraRefs (AstGen.zig:13694-13711).
static uint32_t countBodyLenAfterFixupsExtraRefs(AstGenCtx* ag,
const uint32_t* body, uint32_t body_len, const uint32_t* extra_refs,
uint32_t extra_refs_len) {
uint32_t count = body_len;
for (uint32_t i = 0; i < body_len; i++) {
uint32_t check_inst = body[i];
const uint32_t* ref;
while ((ref = refTableGet(ag, check_inst)) != NULL) {
count++;
check_inst = *ref;
}
}
for (uint32_t i = 0; i < extra_refs_len; i++) {
uint32_t check_inst = extra_refs[i];
const uint32_t* ref;
while ((ref = refTableGet(ag, check_inst)) != NULL) {
count++;
check_inst = *ref;
}
}
return count;
}
// Mirrors countBodyLenAfterFixups (AstGen.zig:13686-13688).
static uint32_t countBodyLenAfterFixups(
AstGenCtx* ag, const uint32_t* body, uint32_t body_len) {
return countBodyLenAfterFixupsExtraRefs(ag, body, body_len, NULL, 0);
}
// Mirrors GenZir.setBlockBody (AstGen.zig:11949).
// Writes Block payload (body_len + instruction indices) to extra.
// Sets the instruction's payload_index. Unstacks gz.
static void setBlockBody(AstGenCtx* ag, GenZir* gz, uint32_t inst) {
uint32_t raw_body_len = gzInstructionsLen(gz);
const uint32_t* body = gzInstructionsSlice(gz);
uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_body_len);
ensureExtraCapacity(ag, 1 + body_len);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = body_len;
for (uint32_t i = 0; i < raw_body_len; i++) {
appendPossiblyRefdBodyInst(ag, body[i]);
}
ag->inst_datas[inst].pl_node.payload_index = payload_index;
gzUnstack(gz);
}
// Mirrors GenZir.setTryBody (AstGen.zig:11997).
// Writes Try payload (operand + body_len + instruction indices) to extra.
// Sets the instruction's payload_index. Unstacks gz.
static void setTryBody(
AstGenCtx* ag, GenZir* gz, uint32_t inst, uint32_t operand) {
uint32_t raw_body_len = gzInstructionsLen(gz);
const uint32_t* body = gzInstructionsSlice(gz);
uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_body_len);
ensureExtraCapacity(ag, 2 + body_len);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = operand; // Try.operand
ag->extra[ag->extra_len++] = body_len; // Try.body_len
for (uint32_t i = 0; i < raw_body_len; i++) {
appendPossiblyRefdBodyInst(ag, body[i]);
}
ag->inst_datas[inst].pl_node.payload_index = payload_index;
gzUnstack(gz);
}
// Mirrors GenZir.setBlockComptimeBody (AstGen.zig:11972).
// Like setBlockBody but prepends comptime_reason before body_len.
// Asserts inst is a BLOCK_COMPTIME.
static void setBlockComptimeBody(
AstGenCtx* ag, GenZir* gz, uint32_t inst, uint32_t comptime_reason) {
uint32_t raw_body_len = gzInstructionsLen(gz);
const uint32_t* body = gzInstructionsSlice(gz);
uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_body_len);
ensureExtraCapacity(ag, 2 + body_len);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = comptime_reason;
ag->extra[ag->extra_len++] = body_len;
for (uint32_t i = 0; i < raw_body_len; i++) {
appendPossiblyRefdBodyInst(ag, body[i]);
}
ag->inst_datas[inst].pl_node.payload_index = payload_index;
gzUnstack(gz);
}
// Mirrors GenZir.addBreak (AstGen.zig:12623).
// Creates a ZIR_INST_BREAK instruction.
static uint32_t addBreak(GenZir* gz, ZirInstTag tag, uint32_t block_inst,
uint32_t operand, int32_t operand_src_node) {
AstGenCtx* ag = gz->astgen;
ensureInstCapacity(ag, 1);
ensureExtraCapacity(ag, 2);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = (uint32_t)operand_src_node;
ag->extra[ag->extra_len++] = block_inst;
uint32_t idx = ag->inst_len;
ag->inst_tags[idx] = tag;
ZirInstData data;
data.break_data.operand = operand;
data.break_data.payload_index = payload_index;
ag->inst_datas[idx] = data;
ag->inst_len++;
gzAppendInstruction(gz, idx);
return idx;
}
// Mirrors GenZir.addCondBr (AstGen.zig:12834).
// Creates condbr instruction placeholder with src_node set.
// Payload is filled later by setCondBrPayload.
static uint32_t addCondBr(GenZir* gz, ZirInstTag tag, uint32_t node) {
AstGenCtx* ag = gz->astgen;
ensureInstCapacity(ag, 1);
uint32_t idx = ag->inst_len;
ag->inst_tags[idx] = tag;
ZirInstData data;
memset(&data, 0, sizeof(data));
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
data.pl_node.payload_index = 0; // set later
ag->inst_datas[idx] = data;
ag->inst_len++;
gzAppendInstruction(gz, idx);
return idx;
}
// Mirrors setCondBrPayload (AstGen.zig:6501).
// Writes CondBr payload: {condition, then_body_len, else_body_len} then
// then_body instructions, then else_body instructions. Unstacks both scopes.
// IMPORTANT: then_gz and else_gz are stacked (else on top of then), so
// then's instructions must use instructionsSliceUpto(else_gz) to avoid
// including else_gz's instructions in then's body.
static void setCondBrPayload(AstGenCtx* ag, uint32_t condbr_inst,
uint32_t condition, GenZir* then_gz, GenZir* else_gz) {
uint32_t raw_then_len = gzInstructionsLenUpto(then_gz, else_gz);
const uint32_t* then_body = gzInstructionsSliceUpto(then_gz, else_gz);
uint32_t raw_else_len = gzInstructionsLen(else_gz);
const uint32_t* else_body = gzInstructionsSlice(else_gz);
uint32_t then_len = countBodyLenAfterFixups(ag, then_body, raw_then_len);
uint32_t else_len = countBodyLenAfterFixups(ag, else_body, raw_else_len);
ensureExtraCapacity(ag, 3 + then_len + else_len);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = condition; // CondBr.condition
ag->extra[ag->extra_len++] = then_len; // CondBr.then_body_len
ag->extra[ag->extra_len++] = else_len; // CondBr.else_body_len
for (uint32_t i = 0; i < raw_then_len; i++)
appendPossiblyRefdBodyInst(ag, then_body[i]);
for (uint32_t i = 0; i < raw_else_len; i++)
appendPossiblyRefdBodyInst(ag, else_body[i]);
ag->inst_datas[condbr_inst].pl_node.payload_index = payload_index;
gzUnstack(else_gz);
gzUnstack(then_gz);
}
// Does this Declaration.Flags.Id have a name? (Zir.zig:2762)
static bool declIdHasName(DeclFlagsId id) {
return id != DECL_ID_UNNAMED_TEST && id != DECL_ID_COMPTIME;
}
// Does this Declaration.Flags.Id have a lib name? (Zir.zig:2771)
static bool declIdHasLibName(DeclFlagsId id) {
switch (id) {
case DECL_ID_EXTERN_CONST:
case DECL_ID_PUB_EXTERN_CONST:
case DECL_ID_EXTERN_VAR:
case DECL_ID_EXTERN_VAR_THREADLOCAL:
case DECL_ID_PUB_EXTERN_VAR:
case DECL_ID_PUB_EXTERN_VAR_THREADLOCAL:
return true;
default:
return false;
}
}
// Does this Declaration.Flags.Id have a type body? (Zir.zig:2783)
static bool declIdHasTypeBody(DeclFlagsId id) {
switch (id) {
case DECL_ID_UNNAMED_TEST:
case DECL_ID_TEST:
case DECL_ID_DECLTEST:
case DECL_ID_COMPTIME:
case DECL_ID_CONST_SIMPLE:
case DECL_ID_PUB_CONST_SIMPLE:
case DECL_ID_VAR_SIMPLE:
case DECL_ID_PUB_VAR_SIMPLE:
return false;
default:
return true;
}
}
// Does this Declaration.Flags.Id have a value body? (Zir.zig:2800)
static bool declIdHasValueBody(DeclFlagsId id) {
switch (id) {
case DECL_ID_EXTERN_CONST_SIMPLE:
case DECL_ID_EXTERN_CONST:
case DECL_ID_PUB_EXTERN_CONST_SIMPLE:
case DECL_ID_PUB_EXTERN_CONST:
case DECL_ID_EXTERN_VAR:
case DECL_ID_EXTERN_VAR_THREADLOCAL:
case DECL_ID_PUB_EXTERN_VAR:
case DECL_ID_PUB_EXTERN_VAR_THREADLOCAL:
return false;
default:
return true;
}
}
// Does this Declaration.Flags.Id have special bodies? (Zir.zig:2815)
static bool declIdHasSpecialBodies(DeclFlagsId id) {
switch (id) {
case DECL_ID_UNNAMED_TEST:
case DECL_ID_TEST:
case DECL_ID_DECLTEST:
case DECL_ID_COMPTIME:
case DECL_ID_CONST_SIMPLE:
case DECL_ID_CONST_TYPED:
case DECL_ID_PUB_CONST_SIMPLE:
case DECL_ID_PUB_CONST_TYPED:
case DECL_ID_EXTERN_CONST_SIMPLE:
case DECL_ID_PUB_EXTERN_CONST_SIMPLE:
case DECL_ID_VAR_SIMPLE:
case DECL_ID_PUB_VAR_SIMPLE:
return false;
default:
return true;
}
}
// Mirrors setDeclaration (AstGen.zig:13883).
// Full version with type/align/linksection/addrspace/value bodies.
typedef struct {
uint32_t src_line;
uint32_t src_column;
DeclFlagsId id;
uint32_t name; // NullTerminatedString index
uint32_t lib_name; // NullTerminatedString index (UINT32_MAX=none)
const uint32_t* type_body;
uint32_t type_body_len;
const uint32_t* align_body;
uint32_t align_body_len;
const uint32_t* linksection_body;
uint32_t linksection_body_len;
const uint32_t* addrspace_body;
uint32_t addrspace_body_len;
const uint32_t* value_body;
uint32_t value_body_len;
} SetDeclArgs;
static void setDeclaration(
AstGenCtx* ag, uint32_t decl_inst, SetDeclArgs args) {
DeclFlagsId id = args.id;
bool has_name = declIdHasName(id);
bool has_lib_name = declIdHasLibName(id);
bool has_type_body_field = declIdHasTypeBody(id);
bool has_special_bodies = declIdHasSpecialBodies(id);
bool has_value_body_field = declIdHasValueBody(id);
uint32_t type_len
= countBodyLenAfterFixups(ag, args.type_body, args.type_body_len);
uint32_t align_len
= countBodyLenAfterFixups(ag, args.align_body, args.align_body_len);
uint32_t linksection_len = countBodyLenAfterFixups(
ag, args.linksection_body, args.linksection_body_len);
uint32_t addrspace_len = countBodyLenAfterFixups(
ag, args.addrspace_body, args.addrspace_body_len);
uint32_t value_len
= countBodyLenAfterFixups(ag, args.value_body, args.value_body_len);
uint32_t need = 6; // src_hash[4] + flags[2]
if (has_name)
need++;
if (has_lib_name)
need++;
if (has_type_body_field)
need++;
if (has_special_bodies)
need += 3;
if (has_value_body_field)
need++;
need += type_len + align_len + linksection_len + addrspace_len + value_len;
ensureExtraCapacity(ag, need);
uint32_t payload_start = ag->extra_len;
// src_hash (4 words): zero-filled; hash comparison skipped in tests.
ag->extra[ag->extra_len++] = 0;
ag->extra[ag->extra_len++] = 0;
ag->extra[ag->extra_len++] = 0;
ag->extra[ag->extra_len++] = 0;
// Declaration.Flags: packed struct(u64) { src_line: u30, src_column: u29,
// id: u5 } (Zir.zig:2719)
uint64_t flags = 0;
flags |= (uint64_t)(args.src_line & 0x3FFFFFFFu);
flags |= (uint64_t)(args.src_column & 0x1FFFFFFFu) << 30;
flags |= (uint64_t)((uint32_t)id & 0x1Fu) << 59;
ag->extra[ag->extra_len++] = (uint32_t)(flags & 0xFFFFFFFFu);
ag->extra[ag->extra_len++] = (uint32_t)(flags >> 32);
if (has_name)
ag->extra[ag->extra_len++] = args.name;
if (has_lib_name) {
ag->extra[ag->extra_len++]
= (args.lib_name != UINT32_MAX) ? args.lib_name : 0;
}
if (has_type_body_field)
ag->extra[ag->extra_len++] = type_len;
if (has_special_bodies) {
ag->extra[ag->extra_len++] = align_len;
ag->extra[ag->extra_len++] = linksection_len;
ag->extra[ag->extra_len++] = addrspace_len;
}
if (has_value_body_field)
ag->extra[ag->extra_len++] = value_len;
for (uint32_t i = 0; i < args.type_body_len; i++)
appendPossiblyRefdBodyInst(ag, args.type_body[i]);
for (uint32_t i = 0; i < args.align_body_len; i++)
appendPossiblyRefdBodyInst(ag, args.align_body[i]);
for (uint32_t i = 0; i < args.linksection_body_len; i++)
appendPossiblyRefdBodyInst(ag, args.linksection_body[i]);
for (uint32_t i = 0; i < args.addrspace_body_len; i++)
appendPossiblyRefdBodyInst(ag, args.addrspace_body[i]);
for (uint32_t i = 0; i < args.value_body_len; i++)
appendPossiblyRefdBodyInst(ag, args.value_body[i]);
ag->inst_datas[decl_inst].declaration.payload_index = payload_start;
}
// --- StructDecl.Small packing (Zir.zig StructDecl.Small) ---
typedef struct {
bool has_captures_len;
bool has_fields_len;
bool has_decls_len;
bool has_backing_int;
bool known_non_opv;
bool known_comptime_only;
uint8_t name_strategy; // 2 bits
uint8_t layout; // 2 bits
bool any_default_inits;
bool any_comptime_fields;
bool any_aligned_fields;
} StructDeclSmall;
static uint16_t packStructDeclSmall(StructDeclSmall s) {
uint16_t r = 0;
if (s.has_captures_len)
r |= (1u << 0);
if (s.has_fields_len)
r |= (1u << 1);
if (s.has_decls_len)
r |= (1u << 2);
if (s.has_backing_int)
r |= (1u << 3);
if (s.known_non_opv)
r |= (1u << 4);
if (s.known_comptime_only)
r |= (1u << 5);
r |= (uint16_t)(s.name_strategy & 0x3u) << 6;
r |= (uint16_t)(s.layout & 0x3u) << 8;
if (s.any_default_inits)
r |= (1u << 10);
if (s.any_comptime_fields)
r |= (1u << 11);
if (s.any_aligned_fields)
r |= (1u << 12);
return r;
}
// Mirrors GenZir.setStruct (AstGen.zig:12935).
// Writes StructDecl payload and optional length fields.
// The caller appends captures, backing_int, decls, fields, bodies after.
static void setStruct(AstGenCtx* ag, uint32_t inst, uint32_t src_node,
StructDeclSmall small, uint32_t captures_len, uint32_t fields_len,
uint32_t decls_len) {
ensureExtraCapacity(ag, 6 + 3);
uint32_t payload_index = ag->extra_len;
// fields_hash (4 words): zero-filled; hash comparison skipped in tests.
ag->extra[ag->extra_len++] = 0;
ag->extra[ag->extra_len++] = 0;
ag->extra[ag->extra_len++] = 0;
ag->extra[ag->extra_len++] = 0;
ag->extra[ag->extra_len++] = ag->source_line;
ag->extra[ag->extra_len++] = src_node;
if (small.has_captures_len)
ag->extra[ag->extra_len++] = captures_len;
if (small.has_fields_len)
ag->extra[ag->extra_len++] = fields_len;
if (small.has_decls_len)
ag->extra[ag->extra_len++] = decls_len;
ag->inst_tags[inst] = ZIR_INST_EXTENDED;
ZirInstData data;
memset(&data, 0, sizeof(data));
data.extended.opcode = (uint16_t)ZIR_EXT_STRUCT_DECL;
data.extended.small = packStructDeclSmall(small);
data.extended.operand = payload_index;
ag->inst_datas[inst] = data;
}
// --- scanContainer (AstGen.zig:13384) ---
// Add a name→node entry to the decl table.
static void addDeclToTable(
AstGenCtx* ag, uint32_t name_str_index, uint32_t node) {
if (ag->decl_table_len >= ag->decl_table_cap) {
uint32_t new_cap = ag->decl_table_cap > 0 ? ag->decl_table_cap * 2 : 8;
uint32_t* n = realloc(ag->decl_names, new_cap * sizeof(uint32_t));
uint32_t* d = realloc(ag->decl_nodes, new_cap * sizeof(uint32_t));
if (!n || !d)
exit(1);
ag->decl_names = n;
ag->decl_nodes = d;
ag->decl_table_cap = new_cap;
}
ag->decl_names[ag->decl_table_len] = name_str_index;
ag->decl_nodes[ag->decl_table_len] = node;
ag->decl_table_len++;
}
// Mirrors scanContainer (AstGen.zig:13384).
// Also populates the decl table (namespace.decls) for identifier resolution.
static uint32_t scanContainer(
AstGenCtx* ag, const uint32_t* members, uint32_t member_count) {
const Ast* tree = ag->tree;
uint32_t decl_count = 0;
for (uint32_t i = 0; i < member_count; i++) {
uint32_t member = members[i];
AstNodeTag tag = tree->nodes.tags[member];
switch (tag) {
case AST_NODE_GLOBAL_VAR_DECL:
case AST_NODE_LOCAL_VAR_DECL:
case AST_NODE_SIMPLE_VAR_DECL:
case AST_NODE_ALIGNED_VAR_DECL: {
decl_count++;
uint32_t name_token = tree->nodes.main_tokens[member] + 1;
uint32_t name_str = identAsString(ag, name_token);
addDeclToTable(ag, name_str, member);
break;
}
case AST_NODE_FN_PROTO_SIMPLE:
case AST_NODE_FN_PROTO_MULTI:
case AST_NODE_FN_PROTO_ONE:
case AST_NODE_FN_PROTO:
case AST_NODE_FN_DECL: {
decl_count++;
uint32_t name_token = tree->nodes.main_tokens[member] + 1;
uint32_t name_str = identAsString(ag, name_token);
addDeclToTable(ag, name_str, member);
break;
}
// Container fields: add field name to string table for ordering
// (AstGen.zig:13509).
case AST_NODE_CONTAINER_FIELD_INIT:
case AST_NODE_CONTAINER_FIELD_ALIGN:
case AST_NODE_CONTAINER_FIELD: {
uint32_t main_token = tree->nodes.main_tokens[member];
identAsString(ag, main_token);
break;
}
case AST_NODE_COMPTIME:
decl_count++;
break;
case AST_NODE_TEST_DECL: {
decl_count++;
// Process test name string to match upstream string table
// ordering (AstGen.zig:13465-13500).
uint32_t test_name_token = tree->nodes.main_tokens[member] + 1;
TokenizerTag tt = tree->tokens.tags[test_name_token];
if (tt == TOKEN_STRING_LITERAL) {
uint32_t si, sl;
strLitAsString(ag, test_name_token, &si, &sl);
} else if (tt == TOKEN_IDENTIFIER) {
identAsString(ag, test_name_token);
}
break;
}
default:
break;
}
}
return decl_count;
}
// --- Import tracking ---
static void addImport(AstGenCtx* ag, uint32_t name_index, uint32_t token) {
// Check for duplicates.
for (uint32_t i = 0; i < ag->imports_len; i++) {
if (ag->imports[i].name == name_index)
return;
}
if (ag->imports_len >= ag->imports_cap) {
uint32_t new_cap = ag->imports_cap > 0 ? ag->imports_cap * 2 : 4;
ImportEntry* p = realloc(ag->imports, new_cap * sizeof(ImportEntry));
if (!p)
exit(1);
ag->imports = p;
ag->imports_cap = new_cap;
}
ag->imports[ag->imports_len].name = name_index;
ag->imports[ag->imports_len].token = token;
ag->imports_len++;
}
// Write imports list to extra (AstGen.zig:227-244).
static void writeImports(AstGenCtx* ag) {
if (ag->imports_len == 0) {
ag->extra[ZIR_EXTRA_IMPORTS] = 0;
return;
}
uint32_t need = 1 + ag->imports_len * 2;
ensureExtraCapacity(ag, need);
uint32_t imports_index = ag->extra_len;
ag->extra[ag->extra_len++] = ag->imports_len;
for (uint32_t i = 0; i < ag->imports_len; i++) {
ag->extra[ag->extra_len++] = ag->imports[i].name;
ag->extra[ag->extra_len++] = ag->imports[i].token;
}
ag->extra[ZIR_EXTRA_IMPORTS] = imports_index;
}
// ri.br() (AstGen.zig:274-282): convert coerced_ty to ty for branching.
static inline ResultLoc rlBr(ResultLoc rl) {
if (rl.tag == RL_COERCED_TY) {
return (ResultLoc) {
.tag = RL_TY, .data = rl.data, .src_node = 0, .ctx = rl.ctx
};
}
return rl;
}
// setBreakResultInfo (AstGen.zig:11905-11926): compute break result info
// from parent RL. Converts coerced_ty → ty, discard → discard, else passes
// through. For ptr/inferred_ptr, converts to ty/none respectively.
static ResultLoc breakResultInfo(
GenZir* gz, ResultLoc parent_rl, uint32_t node, bool need_rl) {
// First: compute block_ri (AstGen.zig:7639-7646).
// When need_rl is true, forward the rl as-is (don't convert ptr→ty).
ResultLoc block_ri;
if (need_rl) {
block_ri = parent_rl;
} else {
switch (parent_rl.tag) {
case RL_PTR: {
uint32_t ptr_ty
= addUnNode(gz, ZIR_INST_TYPEOF, parent_rl.data, node);
uint32_t ty = addUnNode(gz, ZIR_INST_ELEM_TYPE, ptr_ty, node);
block_ri = (ResultLoc) {
.tag = RL_TY, .data = ty, .src_node = 0, .ctx = parent_rl.ctx
};
break;
}
case RL_INFERRED_PTR:
block_ri = (ResultLoc) {
.tag = RL_NONE, .data = 0, .src_node = 0, .ctx = parent_rl.ctx
};
break;
default:
block_ri = parent_rl;
break;
}
}
// Then: setBreakResultInfo (AstGen.zig:11910-11925).
switch (block_ri.tag) {
case RL_COERCED_TY:
return (ResultLoc) { .tag = RL_TY,
.data = block_ri.data,
.src_node = 0,
.ctx = block_ri.ctx };
case RL_DISCARD:
// Don't forward ctx (AstGen.zig:11916-11920).
return RL_DISCARD_VAL;
default:
return block_ri;
}
}
// resultType (AstGen.zig:341-351): extract result type from RL.
// Returns 0 if no result type available.
static uint32_t rlResultType(GenZir* gz, ResultLoc rl, uint32_t node) {
switch (rl.tag) {
case RL_TY:
case RL_COERCED_TY:
return rl.data;
case RL_REF_COERCED_TY:
// AstGen.zig:345: .ref_coerced_ty => |ptr_ty| gz.addUnNode(.elem_type,
// ptr_ty, node)
return addUnNode(gz, ZIR_INST_ELEM_TYPE, rl.data, node);
case RL_PTR: {
// typeof(ptr) -> elem_type (AstGen.zig:346-349).
uint32_t ptr_ty = addUnNode(gz, ZIR_INST_TYPEOF, rl.data, node);
return addUnNode(gz, ZIR_INST_ELEM_TYPE, ptr_ty, node);
}
default:
return 0;
}
}
// rvalue (AstGen.zig:11051-11224): apply result location wrapping.
static uint32_t rvalue(
GenZir* gz, ResultLoc rl, uint32_t result, uint32_t node) {
switch (rl.tag) {
case RL_NONE:
case RL_COERCED_TY:
return result;
case RL_DISCARD:
// ensure_result_non_error (AstGen.zig:11071-11074).
addUnNode(gz, ZIR_INST_ENSURE_RESULT_NON_ERROR, result, node);
return ZIR_REF_VOID_VALUE;
case RL_REF:
case RL_REF_COERCED_TY: {
// coerce_ptr_elem_ty for ref_coerced_ty (AstGen.zig:11077-11083).
uint32_t coerced_result = result;
if (rl.tag == RL_REF_COERCED_TY) {
coerced_result = addPlNodeBin(
gz, ZIR_INST_COERCE_PTR_ELEM_TY, node, rl.data, result);
}
AstGenCtx* ag = gz->astgen;
uint32_t src_token = firstToken(ag->tree, node);
// If result is not an instruction index (e.g. a well-known ref),
// emit ref directly (AstGen.zig:11091-11092).
if (coerced_result < ZIR_REF_START_INDEX) {
return addUnTok(gz, ZIR_INST_REF, coerced_result, src_token);
}
// Deduplication via ref_table (AstGen.zig:11093-11097).
uint32_t result_index = coerced_result - ZIR_REF_START_INDEX;
bool found;
uint32_t* val_ptr = refTableGetOrPut(ag, result_index, &found);
if (!found) {
*val_ptr = makeUnTok(gz, ZIR_INST_REF, coerced_result, src_token);
}
return *val_ptr + ZIR_REF_START_INDEX;
}
case RL_TY: {
// Quick elimination of common, unnecessary type coercions
// (AstGen.zig:11099-11209).
#define RC(t, v) (((uint64_t)(t) << 32) | (uint64_t)(v))
uint64_t combined = RC(rl.data, result);
switch (combined) {
// Identity: type of result is already correct
// (AstGen.zig:11109-11176).
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U1_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U8_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_I8_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U16_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U29_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_I16_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U32_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_I32_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U64_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_I64_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U128_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_I128_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_USIZE_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ISIZE_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_CHAR_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_SHORT_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_USHORT_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_INT_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_UINT_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_LONG_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_ULONG_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_LONGLONG_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_ULONGLONG_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_LONGDOUBLE_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_F16_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_F32_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_F64_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_F80_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_F128_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ANYOPAQUE_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_BOOL_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_VOID_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_TYPE_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ANYERROR_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_COMPTIME_INT_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_COMPTIME_FLOAT_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_NORETURN_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ANYFRAME_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_NULL_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_UNDEFINED_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ENUM_LITERAL_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_PTR_USIZE_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_PTR_CONST_COMPTIME_INT_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_MANYPTR_U8_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_MANYPTR_CONST_U8_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_MANYPTR_CONST_U8_SENTINEL_0_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_SLICE_CONST_U8_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_SLICE_CONST_U8_SENTINEL_0_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ANYERROR_VOID_ERROR_UNION_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_GENERIC_POISON_TYPE):
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_EMPTY_TUPLE_TYPE):
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ZERO):
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ONE):
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_NEGATIVE_ONE):
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_UNDEF_USIZE):
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ZERO_USIZE):
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ONE_USIZE):
case RC(ZIR_REF_U1_TYPE, ZIR_REF_UNDEF_U1):
case RC(ZIR_REF_U1_TYPE, ZIR_REF_ZERO_U1):
case RC(ZIR_REF_U1_TYPE, ZIR_REF_ONE_U1):
case RC(ZIR_REF_U8_TYPE, ZIR_REF_ZERO_U8):
case RC(ZIR_REF_U8_TYPE, ZIR_REF_ONE_U8):
case RC(ZIR_REF_U8_TYPE, ZIR_REF_FOUR_U8):
case RC(ZIR_REF_BOOL_TYPE, ZIR_REF_UNDEF_BOOL):
case RC(ZIR_REF_BOOL_TYPE, ZIR_REF_BOOL_TRUE):
case RC(ZIR_REF_BOOL_TYPE, ZIR_REF_BOOL_FALSE):
case RC(ZIR_REF_VOID_TYPE, ZIR_REF_VOID_VALUE):
return result;
// Conversions (AstGen.zig:11178-11202).
case RC(ZIR_REF_BOOL_TYPE, ZIR_REF_UNDEF):
return ZIR_REF_UNDEF_BOOL;
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_UNDEF):
return ZIR_REF_UNDEF_USIZE;
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_UNDEF_U1):
return ZIR_REF_UNDEF_USIZE;
case RC(ZIR_REF_U1_TYPE, ZIR_REF_UNDEF):
return ZIR_REF_UNDEF_U1;
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ZERO):
return ZIR_REF_ZERO_USIZE;
case RC(ZIR_REF_U1_TYPE, ZIR_REF_ZERO):
return ZIR_REF_ZERO_U1;
case RC(ZIR_REF_U8_TYPE, ZIR_REF_ZERO):
return ZIR_REF_ZERO_U8;
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ONE):
return ZIR_REF_ONE_USIZE;
case RC(ZIR_REF_U1_TYPE, ZIR_REF_ONE):
return ZIR_REF_ONE_U1;
case RC(ZIR_REF_U8_TYPE, ZIR_REF_ONE):
return ZIR_REF_ONE_U8;
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ZERO_USIZE):
return ZIR_REF_ZERO;
case RC(ZIR_REF_U1_TYPE, ZIR_REF_ZERO_USIZE):
return ZIR_REF_ZERO_U1;
case RC(ZIR_REF_U8_TYPE, ZIR_REF_ZERO_USIZE):
return ZIR_REF_ZERO_U8;
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ONE_USIZE):
return ZIR_REF_ONE;
case RC(ZIR_REF_U1_TYPE, ZIR_REF_ONE_USIZE):
return ZIR_REF_ONE_U1;
case RC(ZIR_REF_U8_TYPE, ZIR_REF_ONE_USIZE):
return ZIR_REF_ONE_U8;
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ZERO_U1):
return ZIR_REF_ZERO;
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ZERO_U8):
return ZIR_REF_ZERO;
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ZERO_U1):
return ZIR_REF_ZERO_USIZE;
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ZERO_U8):
return ZIR_REF_ZERO_USIZE;
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ONE_U1):
return ZIR_REF_ONE;
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ONE_U8):
return ZIR_REF_ONE;
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ONE_U1):
return ZIR_REF_ONE_USIZE;
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ONE_U8):
return ZIR_REF_ONE_USIZE;
default: {
ZirInstTag as_tag = (rl.ctx == RI_CTX_SHIFT_OP)
? ZIR_INST_AS_SHIFT_OPERAND
: ZIR_INST_AS_NODE;
return addPlNodeBin(gz, as_tag, node, rl.data, result);
}
}
#undef RC
}
case RL_PTR:
// store_node (AstGen.zig:11211-11216).
addPlNodeBin(gz, ZIR_INST_STORE_NODE,
rl.src_node != 0 ? rl.src_node : node, rl.data, result);
return ZIR_REF_VOID_VALUE;
case RL_INFERRED_PTR:
// store_to_inferred_ptr (AstGen.zig:11218-11223).
addPlNodeBin(
gz, ZIR_INST_STORE_TO_INFERRED_PTR, node, rl.data, result);
return ZIR_REF_VOID_VALUE;
}
return result;
}
// rvalueNoCoercePreRef (AstGen.zig:11042-11049): like rvalue but does NOT
// emit coerce_ptr_elem_ty for RL_REF_COERCED_TY. Used for local var refs.
static uint32_t rvalueNoCoercePreRef(
GenZir* gz, ResultLoc rl, uint32_t result, uint32_t node) {
if (rl.tag == RL_REF_COERCED_TY) {
ResultLoc ref_rl = rl;
ref_rl.tag = RL_REF;
return rvalue(gz, ref_rl, result, node);
}
return rvalue(gz, rl, result, node);
}
// --- Expression evaluation (AstGen.zig:634) ---
// Forward declarations.
static uint32_t expr(GenZir* gz, Scope* scope, uint32_t node);
// --- DefersToEmit (AstGen.zig:3008) ---
#define DEFER_NORMAL_ONLY 0
#define DEFER_BOTH_SANS_ERR 1
// --- DeferCounts (AstGen.zig:2966) ---
typedef struct {
bool have_any;
bool have_normal;
bool have_err;
bool need_err_code;
} DeferCounts;
static DeferCounts countDefers(const Scope* outer_scope, Scope* inner_scope);
static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node);
static void assignStmt(GenZir* gz, Scope* scope, uint32_t infix_node);
static void assignOp(
GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag);
static uint32_t shiftOp(
GenZir* gz, Scope* scope, uint32_t node, ZirInstTag tag);
static void emitDbgStmt(GenZir* gz, uint32_t line, uint32_t column);
static void genDefers(
GenZir* gz, const Scope* outer_scope, Scope* inner_scope, int which);
static void emitDbgStmtForceCurrentIndex(
GenZir* gz, uint32_t line, uint32_t column);
static void emitDbgNode(GenZir* gz, uint32_t node);
static void addDbgVar(
GenZir* gz, ZirInstTag tag, uint32_t name, uint32_t inst);
static bool addEnsureResult(
GenZir* gz, uint32_t maybe_unused_result, uint32_t statement);
static void blockExprStmts(
GenZir* gz, Scope* scope, const uint32_t* statements, uint32_t stmt_count);
static uint32_t fullBodyExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node);
static uint32_t containerDecl(GenZir* gz, Scope* scope, uint32_t node);
static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
const uint32_t* members, uint32_t members_len);
static uint32_t enumDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
const uint32_t* members, uint32_t members_len);
static uint32_t blockExprExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node);
static uint32_t ifExpr(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node);
static uint32_t forExpr(
GenZir* gz, Scope* scope, uint32_t node, bool is_statement);
static uint32_t orelseCatchExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node, bool is_catch);
static uint32_t arrayInitDotExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node);
static uint32_t switchExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node);
static uint32_t whileExpr(
GenZir* gz, Scope* scope, uint32_t node, bool is_statement);
#define EVAL_TO_ERROR_NEVER 0
#define EVAL_TO_ERROR_ALWAYS 1
#define EVAL_TO_ERROR_MAYBE 2
static int nodeMayEvalToError(const Ast* tree, uint32_t node);
static bool nodeMayAppendToErrorTrace(const Ast* tree, uint32_t node);
static void addSaveErrRetIndex(GenZir* gz, uint32_t operand);
static void addRestoreErrRetIndexBlock(
GenZir* gz, uint32_t block_inst, uint32_t operand, uint32_t node);
static void restoreErrRetIndex(GenZir* gz, uint32_t block_inst, ResultLoc rl,
uint32_t node, uint32_t result);
static uint32_t identAsString(AstGenCtx* ag, uint32_t token);
static uint32_t lastToken(const Ast* tree, uint32_t node);
static uint32_t simpleBinOp(
GenZir* gz, Scope* scope, uint32_t node, ZirInstTag tag);
// Mirrors GenZir.endsWithNoReturn (AstGen.zig:11770).
static bool endsWithNoReturn(GenZir* gz) {
uint32_t len = gzInstructionsLen(gz);
if (len == 0)
return false;
uint32_t last = gzInstructionsSlice(gz)[len - 1];
ZirInstTag tag = gz->astgen->inst_tags[last];
switch (tag) {
case ZIR_INST_BREAK:
case ZIR_INST_BREAK_INLINE:
case ZIR_INST_CONDBR:
case ZIR_INST_CONDBR_INLINE:
case ZIR_INST_COMPILE_ERROR:
case ZIR_INST_RET_NODE:
case ZIR_INST_RET_LOAD:
case ZIR_INST_RET_IMPLICIT:
case ZIR_INST_RET_ERR_VALUE:
case ZIR_INST_UNREACHABLE:
case ZIR_INST_REPEAT:
case ZIR_INST_REPEAT_INLINE:
case ZIR_INST_PANIC:
case ZIR_INST_TRAP:
case ZIR_INST_CHECK_COMPTIME_CONTROL_FLOW:
case ZIR_INST_SWITCH_CONTINUE:
return true;
default:
return false;
}
}
// Mirrors GenZir.refIsNoReturn (AstGen.zig:11885).
static bool refIsNoReturn(GenZir* gz, uint32_t inst_ref) {
if (inst_ref == ZIR_REF_UNREACHABLE_VALUE)
return true;
if (inst_ref >= ZIR_REF_START_INDEX) {
uint32_t inst_index = inst_ref - ZIR_REF_START_INDEX;
ZirInstTag tag = gz->astgen->inst_tags[inst_index];
switch (tag) {
case ZIR_INST_BREAK:
case ZIR_INST_BREAK_INLINE:
case ZIR_INST_CONDBR:
case ZIR_INST_CONDBR_INLINE:
case ZIR_INST_COMPILE_ERROR:
case ZIR_INST_RET_NODE:
case ZIR_INST_RET_LOAD:
case ZIR_INST_RET_IMPLICIT:
case ZIR_INST_RET_ERR_VALUE:
case ZIR_INST_UNREACHABLE:
case ZIR_INST_REPEAT:
case ZIR_INST_REPEAT_INLINE:
case ZIR_INST_PANIC:
case ZIR_INST_TRAP:
case ZIR_INST_CHECK_COMPTIME_CONTROL_FLOW:
case ZIR_INST_SWITCH_CONTINUE:
return true;
default:
return false;
}
}
return false;
}
static uint32_t tryResolvePrimitiveIdent(GenZir* gz, uint32_t node);
// SimpleComptimeReason (std.zig:727) — values used in block_comptime payload.
#define COMPTIME_REASON_TYPE 29
#define COMPTIME_REASON_ARRAY_SENTINEL 30
#define COMPTIME_REASON_POINTER_SENTINEL 31
#define COMPTIME_REASON_SLICE_SENTINEL 32
#define COMPTIME_REASON_ARRAY_LENGTH 33
#define COMPTIME_REASON_ALIGN 50
#define COMPTIME_REASON_ADDRSPACE 51
#define COMPTIME_REASON_COMPTIME_KEYWORD 53
#define COMPTIME_REASON_SWITCH_ITEM 56
// Mirrors comptimeExpr2 (AstGen.zig:1982).
// Evaluates a node in a comptime block_comptime scope.
static uint32_t comptimeExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node, uint32_t reason) {
// Skip wrapping when already in comptime context (AstGen.zig:1990).
if (gz->is_comptime)
return exprRl(gz, scope, rl, node);
// Optimization: certain node types are trivially comptime and don't need
// a block_comptime wrapper (AstGen.zig:1997-2046).
AstGenCtx* ag = gz->astgen;
AstNodeTag tag = ag->tree->nodes.tags[node];
switch (tag) {
// Identifier handling (AstGen.zig:2000-2003):
// Upstream calls identifier() with force_comptime which resolves
// primitives/int types directly and only wraps others in block_comptime.
// We mirror this by resolving primitives here and falling through for
// non-primitives.
case AST_NODE_IDENTIFIER: {
uint32_t prim = tryResolvePrimitiveIdent(gz, node);
if (prim != ZIR_REF_NONE)
return prim;
break; // non-primitive: fall through to block_comptime wrapping
}
case AST_NODE_NUMBER_LITERAL:
case AST_NODE_CHAR_LITERAL:
case AST_NODE_STRING_LITERAL:
case AST_NODE_MULTILINE_STRING_LITERAL:
case AST_NODE_ENUM_LITERAL:
case AST_NODE_ERROR_VALUE:
// Type expressions that force comptime eval of sub-expressions
// (AstGen.zig:2017-2042).
case AST_NODE_ERROR_UNION:
case AST_NODE_MERGE_ERROR_SETS:
case AST_NODE_OPTIONAL_TYPE:
case AST_NODE_PTR_TYPE_ALIGNED:
case AST_NODE_PTR_TYPE_SENTINEL:
case AST_NODE_PTR_TYPE:
case AST_NODE_PTR_TYPE_BIT_RANGE:
case AST_NODE_ARRAY_TYPE:
case AST_NODE_ARRAY_TYPE_SENTINEL:
case AST_NODE_FN_PROTO_SIMPLE:
case AST_NODE_FN_PROTO_MULTI:
case AST_NODE_FN_PROTO_ONE:
case AST_NODE_FN_PROTO:
case AST_NODE_CONTAINER_DECL:
case AST_NODE_CONTAINER_DECL_TRAILING:
case AST_NODE_CONTAINER_DECL_ARG:
case AST_NODE_CONTAINER_DECL_ARG_TRAILING:
case AST_NODE_CONTAINER_DECL_TWO:
case AST_NODE_CONTAINER_DECL_TWO_TRAILING:
case AST_NODE_TAGGED_UNION:
case AST_NODE_TAGGED_UNION_TRAILING:
case AST_NODE_TAGGED_UNION_ENUM_TAG:
case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING:
case AST_NODE_TAGGED_UNION_TWO:
case AST_NODE_TAGGED_UNION_TWO_TRAILING:
return exprRl(gz, scope, rl, node);
default:
break;
}
// General case: wrap in block_comptime (AstGen.zig:2078-2096).
uint32_t block_inst = makeBlockInst(ag, ZIR_INST_BLOCK_COMPTIME, gz, node);
GenZir block_scope = makeSubBlock(gz, scope);
block_scope.is_comptime = true;
// Transform RL to type-only (AstGen.zig:2084-2090).
ResultLoc ty_only_rl;
uint32_t res_ty = rlResultType(gz, rl, node);
if (res_ty != 0)
ty_only_rl = (ResultLoc) {
.tag = RL_COERCED_TY, .data = res_ty, .src_node = 0, .ctx = rl.ctx
};
else
ty_only_rl = (ResultLoc) {
.tag = RL_NONE, .data = 0, .src_node = 0, .ctx = rl.ctx
};
uint32_t result = exprRl(&block_scope, scope, ty_only_rl, node);
addBreak(&block_scope, ZIR_INST_BREAK_INLINE, block_inst, result,
AST_NODE_OFFSET_NONE);
setBlockComptimeBody(ag, &block_scope, block_inst, reason);
gzAppendInstruction(gz, block_inst);
return rvalue(gz, rl, block_inst + ZIR_REF_START_INDEX, node);
}
// Mirrors typeExpr (AstGen.zig:394).
static uint32_t typeExpr(GenZir* gz, Scope* scope, uint32_t node) {
ResultLoc rl = { .tag = RL_COERCED_TY,
.data = ZIR_REF_TYPE_TYPE,
.src_node = 0,
.ctx = RI_CTX_NONE };
return comptimeExpr(gz, scope, rl, node, COMPTIME_REASON_TYPE);
}
// Mirrors numberLiteral (AstGen.zig:8544).
// Parses integer and float literals, returns appropriate ZIR ref.
static uint32_t numberLiteral(GenZir* gz, uint32_t node) {
AstGenCtx* ag = gz->astgen;
uint32_t num_token = ag->tree->nodes.main_tokens[node];
uint32_t tok_start = ag->tree->tokens.starts[num_token];
const char* source = ag->tree->source;
// Determine token length by scanning to next non-number character.
uint32_t tok_end = tok_start;
while (tok_end < ag->tree->source_len
&& ((source[tok_end] >= '0' && source[tok_end] <= '9')
|| source[tok_end] == '_' || source[tok_end] == '.'
|| source[tok_end] == 'x' || source[tok_end] == 'o'
|| source[tok_end] == 'b'
|| (source[tok_end] >= 'a' && source[tok_end] <= 'f')
|| (source[tok_end] >= 'A' && source[tok_end] <= 'F'))) {
tok_end++;
}
// Parse the integer value (simplified: decimal and hex).
uint64_t value = 0;
bool is_hex = false;
uint32_t pos = tok_start;
if (tok_end - tok_start >= 2 && source[tok_start] == '0'
&& source[tok_start + 1] == 'x') {
is_hex = true;
pos = tok_start + 2;
}
if (is_hex) {
for (; pos < tok_end; pos++) {
if (source[pos] == '_')
continue;
if (source[pos] >= '0' && source[pos] <= '9')
value = value * 16 + (uint64_t)(source[pos] - '0');
else if (source[pos] >= 'a' && source[pos] <= 'f')
value = value * 16 + 10 + (uint64_t)(source[pos] - 'a');
else if (source[pos] >= 'A' && source[pos] <= 'F')
value = value * 16 + 10 + (uint64_t)(source[pos] - 'A');
}
} else {
for (; pos < tok_end; pos++) {
if (source[pos] == '_')
continue;
if (source[pos] == '.')
break; // float — not handled yet
if (source[pos] >= '0' && source[pos] <= '9')
value = value * 10 + (uint64_t)(source[pos] - '0');
}
}
// Special cases for 0 and 1 (AstGen.zig:8687-8703).
if (value == 0)
return ZIR_REF_ZERO;
if (value == 1)
return ZIR_REF_ONE;
return addInt(gz, value);
}
// Mirrors builtinCall (AstGen.zig:9191), @import case (AstGen.zig:9242).
static uint32_t builtinCallImport(GenZir* gz, Scope* scope, uint32_t node) {
(void)scope;
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
// For builtin_call_two: data.lhs = first arg node.
AstData node_data = tree->nodes.datas[node];
uint32_t operand_node = node_data.lhs;
assert(tree->nodes.tags[operand_node] == AST_NODE_STRING_LITERAL);
uint32_t str_lit_token = tree->nodes.main_tokens[operand_node];
uint32_t str_index, str_len;
strLitAsString(ag, str_lit_token, &str_index, &str_len);
// Write Import payload to extra (Zir.Inst.Import: res_ty, path).
ensureExtraCapacity(ag, 2);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = ZIR_REF_NONE; // res_ty = .none
ag->extra[ag->extra_len++] = str_index; // path
// Create .import instruction with pl_tok data.
ZirInstData data;
data.pl_tok.src_tok = tokenIndexToRelative(gz, str_lit_token);
data.pl_tok.payload_index = payload_index;
uint32_t result_ref = addInstruction(gz, ZIR_INST_IMPORT, data);
// Track import (AstGen.zig:9269).
addImport(ag, str_index, str_lit_token);
return result_ref;
}
// Mirrors cImport (AstGen.zig:10011).
static uint32_t cImportExpr(GenZir* gz, Scope* scope, uint32_t node) {
AstGenCtx* ag = gz->astgen;
AstData nd = ag->tree->nodes.datas[node];
uint32_t body_node = nd.lhs; // first arg = body
uint32_t block_inst = makeBlockInst(ag, ZIR_INST_C_IMPORT, gz, node);
GenZir block_scope = makeSubBlock(gz, scope);
block_scope.is_comptime = true;
block_scope.c_import = true;
// Use fullBodyExpr to inline unlabeled block body (AstGen.zig:10028).
fullBodyExpr(&block_scope, &block_scope.base, RL_NONE_VAL, body_node);
// ensure_result_used on gz (parent), not block_scope (AstGen.zig:10029).
addUnNode(gz, ZIR_INST_ENSURE_RESULT_USED, ZIR_REF_VOID_VALUE, node);
// break_inline (AstGen.zig:10030-10032).
makeBreakInline(
&block_scope, block_inst, ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
setBlockBody(ag, &block_scope, block_inst);
// block_scope unstacked now, can add to gz.
gzAppendInstruction(gz, block_inst);
return block_inst + ZIR_REF_START_INDEX; // toRef()
}
// Mirrors simpleCBuiltin (AstGen.zig:9938).
static uint32_t simpleCBuiltin(GenZir* gz, Scope* scope, uint32_t node,
uint32_t operand_node, uint16_t ext_tag) {
AstGenCtx* ag = gz->astgen;
// Evaluate operand as comptime string.
uint32_t operand = expr(gz, scope, operand_node);
// Emit extended instruction with UnNode payload (AstGen.zig:9954).
ensureExtraCapacity(ag, 2);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++]
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
ag->extra[ag->extra_len++] = operand;
ZirInstData data;
data.extended.opcode = ext_tag;
data.extended.small = 0xAAAAu; // undefined (addExtendedPayload passes
// undefined for small)
data.extended.operand = payload_index;
addInstruction(gz, ZIR_INST_EXTENDED, data);
return ZIR_REF_VOID_VALUE;
}
// Mirrors builtinCall (AstGen.zig:9191) dispatch.
static uint32_t builtinCall(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
uint32_t builtin_token = tree->nodes.main_tokens[node];
uint32_t tok_start = tree->tokens.starts[builtin_token];
const char* source = tree->source;
// Identify builtin name from source.
// Skip '@' prefix and scan identifier.
uint32_t name_start = tok_start + 1; // skip '@'
uint32_t name_end = name_start;
while (name_end < tree->source_len
&& ((source[name_end] >= 'a' && source[name_end] <= 'z')
|| (source[name_end] >= 'A' && source[name_end] <= 'Z')
|| source[name_end] == '_')) {
name_end++;
}
uint32_t name_len = name_end - name_start;
// clang-format off
if (name_len == 6 && memcmp(source + name_start, "import", 6) == 0)
return builtinCallImport(gz, scope, node);
if (name_len == 7 && memcmp(source + name_start, "cImport", 7) == 0)
return cImportExpr(gz, scope, node);
if (name_len == 8 && memcmp(source + name_start, "cInclude", 8) == 0) {
AstData nd = tree->nodes.datas[node];
return simpleCBuiltin(gz, scope, node, nd.lhs, (uint16_t)ZIR_EXT_C_INCLUDE);
}
// @intCast — typeCast pattern (AstGen.zig:9416, 9807-9826).
if (name_len == 7 && memcmp(source + name_start, "intCast", 7) == 0) {
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t result_type = rlResultType(gz, rl, node);
AstData nd = tree->nodes.datas[node];
uint32_t operand = expr(gz, scope, nd.lhs);
emitDbgStmt(gz, saved_line, saved_col);
return addPlNodeBin(gz, ZIR_INST_INT_CAST, node,
result_type, operand);
}
// @embedFile (AstGen.zig:9626).
if (name_len == 9 && memcmp(source + name_start, "embedFile", 9) == 0) {
AstData nd = tree->nodes.datas[node];
uint32_t operand = expr(gz, scope, nd.lhs);
return addUnNode(gz, ZIR_INST_EMBED_FILE, operand, node);
}
// @intFromEnum (AstGen.zig:9478).
if (name_len == 11 && memcmp(source + name_start, "intFromEnum", 11) == 0) {
AstData nd = tree->nodes.datas[node];
uint32_t operand = expr(gz, scope, nd.lhs);
return addUnNode(gz, ZIR_INST_INT_FROM_ENUM, operand, node);
}
// @tagName (AstGen.zig:9407) — simpleUnOp with dbg_stmt.
if (name_len == 7 && memcmp(source + name_start, "tagName", 7) == 0) {
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
AstData nd = tree->nodes.datas[node];
uint32_t operand = expr(gz, scope, nd.lhs);
emitDbgStmt(gz, saved_line, saved_col);
return addUnNode(gz, ZIR_INST_TAG_NAME, operand, node);
}
// @as (AstGen.zig:8909-8920).
if (name_len == 2 && memcmp(source + name_start, "as", 2) == 0) {
AstData nd = tree->nodes.datas[node];
uint32_t dest_type = typeExpr(gz, scope, nd.lhs);
ResultLoc as_rl = { .tag = RL_TY, .data = dest_type, .src_node = 0,
.ctx = rl.ctx };
uint32_t operand = exprRl(gz, scope, as_rl, nd.rhs);
return rvalue(gz, rl, operand, node);
}
// @truncate — typeCast pattern (AstGen.zig:9417, 9807-9826).
if (name_len == 8 && memcmp(source + name_start, "truncate", 8) == 0) {
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t result_type = rlResultType(gz, rl, node);
AstData nd = tree->nodes.datas[node];
uint32_t operand = expr(gz, scope, nd.lhs);
emitDbgStmt(gz, saved_line, saved_col);
return addPlNodeBin(gz, ZIR_INST_TRUNCATE, node,
result_type, operand);
}
// @ptrCast — typeCast pattern (AstGen.zig:9056, 9807-9826).
if (name_len == 7 && memcmp(source + name_start, "ptrCast", 7) == 0) {
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t result_type = rlResultType(gz, rl, node);
AstData nd = tree->nodes.datas[node];
uint32_t operand = expr(gz, scope, nd.lhs);
emitDbgStmt(gz, saved_line, saved_col);
return addPlNodeBin(gz, ZIR_INST_PTR_CAST, node,
result_type, operand);
}
// @enumFromInt — typeCast pattern (AstGen.zig:9414, 9807-9826).
if (name_len == 11 && memcmp(source + name_start, "enumFromInt", 11) == 0) {
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t result_type = rlResultType(gz, rl, node);
AstData nd = tree->nodes.datas[node];
uint32_t operand = expr(gz, scope, nd.lhs);
emitDbgStmt(gz, saved_line, saved_col);
return addPlNodeBin(gz, ZIR_INST_ENUM_FROM_INT, node,
result_type, operand);
}
// @bitCast (AstGen.zig:8944-8958, dispatched at 9313).
if (name_len == 7 && memcmp(source + name_start, "bitCast", 7) == 0) {
uint32_t result_type = rlResultType(gz, rl, node);
AstData nd = tree->nodes.datas[node];
uint32_t operand = expr(gz, scope, nd.lhs);
return addPlNodeBin(gz, ZIR_INST_BITCAST, node,
result_type, operand);
}
// @memcpy (AstGen.zig:9631-9637).
if (name_len == 6 && memcmp(source + name_start, "memcpy", 6) == 0) {
AstData nd = tree->nodes.datas[node];
uint32_t dst = expr(gz, scope, nd.lhs);
uint32_t src = expr(gz, scope, nd.rhs);
addPlNodeBin(gz, ZIR_INST_MEMCPY, node, dst, src);
return ZIR_REF_VOID_VALUE;
}
// @memset (AstGen.zig:9638-9647).
if (name_len == 6 && memcmp(source + name_start, "memset", 6) == 0) {
AstData nd = tree->nodes.datas[node];
uint32_t lhs = expr(gz, scope, nd.lhs);
uint32_t lhs_ty = addUnNode(gz, ZIR_INST_TYPEOF, lhs, nd.lhs);
uint32_t elem_ty =
addUnNode(gz, ZIR_INST_INDEXABLE_PTR_ELEM_TYPE, lhs_ty, nd.lhs);
ResultLoc val_rl = {
.tag = RL_COERCED_TY, .data = elem_ty, .src_node = 0};
uint32_t val = exprRl(gz, scope, val_rl, nd.rhs);
addPlNodeBin(gz, ZIR_INST_MEMSET, node, lhs, val);
return ZIR_REF_VOID_VALUE;
}
// @min (AstGen.zig:9155).
if (name_len == 3 && memcmp(source + name_start, "min", 3) == 0) {
AstData nd = tree->nodes.datas[node];
uint32_t a = expr(gz, scope, nd.lhs);
uint32_t b = expr(gz, scope, nd.rhs);
return addPlNodeBin(gz, ZIR_INST_MIN, node, a, b);
}
// @max (AstGen.zig:9155).
if (name_len == 3 && memcmp(source + name_start, "max", 3) == 0) {
AstData nd = tree->nodes.datas[node];
uint32_t a = expr(gz, scope, nd.lhs);
uint32_t b = expr(gz, scope, nd.rhs);
return addPlNodeBin(gz, ZIR_INST_MAX, node, a, b);
}
// clang-format on
// TODO: handle other builtins.
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
// --- identifier (AstGen.zig:8282) ---
// Simplified: handles decl_val resolution for container-level declarations.
// Tries to resolve an identifier as a primitive type or integer type.
// Returns the ZIR ref if it's a primitive/int type, or ZIR_REF_NONE.
// Mirrors primitive_instrs + integer type checks in identifier()
// (AstGen.zig:8298-8337).
static uint32_t tryResolvePrimitiveIdent(GenZir* gz, uint32_t node) {
AstGenCtx* ag = gz->astgen;
uint32_t ident_token = ag->tree->nodes.main_tokens[node];
uint32_t tok_start = ag->tree->tokens.starts[ident_token];
const char* source = ag->tree->source;
uint32_t tok_end = tok_start;
while (tok_end < ag->tree->source_len
&& ((source[tok_end] >= 'a' && source[tok_end] <= 'z')
|| (source[tok_end] >= 'A' && source[tok_end] <= 'Z')
|| (source[tok_end] >= '0' && source[tok_end] <= '9')
|| source[tok_end] == '_'))
tok_end++;
uint32_t tok_len = tok_end - tok_start;
// Check well-known primitive refs (primitive_instrs map,
// AstGen.zig:10236-10281).
// clang-format off
if (tok_len == 2 && memcmp(source+tok_start, "u1", 2) == 0) return ZIR_REF_U1_TYPE;
if (tok_len == 2 && memcmp(source+tok_start, "u8", 2) == 0) return ZIR_REF_U8_TYPE;
if (tok_len == 2 && memcmp(source+tok_start, "i8", 2) == 0) return ZIR_REF_I8_TYPE;
if (tok_len == 3 && memcmp(source+tok_start, "u16", 3) == 0) return ZIR_REF_U16_TYPE;
if (tok_len == 3 && memcmp(source+tok_start, "i16", 3) == 0) return ZIR_REF_I16_TYPE;
if (tok_len == 3 && memcmp(source+tok_start, "u29", 3) == 0) return ZIR_REF_U29_TYPE;
if (tok_len == 3 && memcmp(source+tok_start, "u32", 3) == 0) return ZIR_REF_U32_TYPE;
if (tok_len == 3 && memcmp(source+tok_start, "i32", 3) == 0) return ZIR_REF_I32_TYPE;
if (tok_len == 3 && memcmp(source+tok_start, "u64", 3) == 0) return ZIR_REF_U64_TYPE;
if (tok_len == 3 && memcmp(source+tok_start, "i64", 3) == 0) return ZIR_REF_I64_TYPE;
if (tok_len == 4 && memcmp(source+tok_start, "u128", 4) == 0) return ZIR_REF_U128_TYPE;
if (tok_len == 4 && memcmp(source+tok_start, "i128", 4) == 0) return ZIR_REF_I128_TYPE;
if (tok_len == 5 && memcmp(source+tok_start, "usize", 5) == 0) return ZIR_REF_USIZE_TYPE;
if (tok_len == 5 && memcmp(source+tok_start, "isize", 5) == 0) return ZIR_REF_ISIZE_TYPE;
if (tok_len == 6 && memcmp(source+tok_start, "c_char", 6) == 0) return ZIR_REF_C_CHAR_TYPE;
if (tok_len == 7 && memcmp(source+tok_start, "c_short", 7) == 0) return ZIR_REF_C_SHORT_TYPE;
if (tok_len == 8 && memcmp(source+tok_start, "c_ushort", 8) == 0) return ZIR_REF_C_USHORT_TYPE;
if (tok_len == 5 && memcmp(source+tok_start, "c_int", 5) == 0) return ZIR_REF_C_INT_TYPE;
if (tok_len == 6 && memcmp(source+tok_start, "c_uint", 6) == 0) return ZIR_REF_C_UINT_TYPE;
if (tok_len == 6 && memcmp(source+tok_start, "c_long", 6) == 0) return ZIR_REF_C_LONG_TYPE;
if (tok_len == 7 && memcmp(source+tok_start, "c_ulong", 7) == 0) return ZIR_REF_C_ULONG_TYPE;
if (tok_len == 10 && memcmp(source+tok_start, "c_longlong", 10) == 0) return ZIR_REF_C_LONGLONG_TYPE;
if (tok_len == 11 && memcmp(source+tok_start, "c_ulonglong", 11) == 0) return ZIR_REF_C_ULONGLONG_TYPE;
if (tok_len == 14 && memcmp(source+tok_start, "comptime_float", 14) == 0) return ZIR_REF_COMPTIME_FLOAT_TYPE;
if (tok_len == 12 && memcmp(source+tok_start, "comptime_int", 12) == 0) return ZIR_REF_COMPTIME_INT_TYPE;
if (tok_len == 3 && memcmp(source+tok_start, "f16", 3) == 0) return ZIR_REF_F16_TYPE;
if (tok_len == 3 && memcmp(source+tok_start, "f32", 3) == 0) return ZIR_REF_F32_TYPE;
if (tok_len == 3 && memcmp(source+tok_start, "f64", 3) == 0) return ZIR_REF_F64_TYPE;
if (tok_len == 3 && memcmp(source+tok_start, "f80", 3) == 0) return ZIR_REF_F80_TYPE;
if (tok_len == 4 && memcmp(source+tok_start, "f128", 4) == 0) return ZIR_REF_F128_TYPE;
if (tok_len == 9 && memcmp(source+tok_start, "anyopaque", 9) == 0) return ZIR_REF_ANYOPAQUE_TYPE;
if (tok_len == 4 && memcmp(source+tok_start, "bool", 4) == 0) return ZIR_REF_BOOL_TYPE;
if (tok_len == 4 && memcmp(source+tok_start, "void", 4) == 0) return ZIR_REF_VOID_TYPE;
if (tok_len == 4 && memcmp(source+tok_start, "type", 4) == 0) return ZIR_REF_TYPE_TYPE;
if (tok_len == 8 && memcmp(source+tok_start, "anyerror", 8) == 0) return ZIR_REF_ANYERROR_TYPE;
if (tok_len == 8 && memcmp(source+tok_start, "noreturn", 8) == 0) return ZIR_REF_NORETURN_TYPE;
if (tok_len == 4 && memcmp(source+tok_start, "true", 4) == 0) return ZIR_REF_BOOL_TRUE;
if (tok_len == 5 && memcmp(source+tok_start, "false", 5) == 0) return ZIR_REF_BOOL_FALSE;
if (tok_len == 4 && memcmp(source+tok_start, "null", 4) == 0) return ZIR_REF_NULL_VALUE;
if (tok_len == 9 && memcmp(source+tok_start, "undefined", 9) == 0) return ZIR_REF_UNDEF;
// clang-format on
// Integer type detection: u29, i13, etc. (AstGen.zig:8304-8336).
if (tok_len >= 2
&& (source[tok_start] == 'u' || source[tok_start] == 'i')) {
// Zig Signedness enum: unsigned=1, signed=0
uint8_t signedness = (source[tok_start] == 'u') ? 1 : 0;
uint16_t bit_count = 0;
bool valid = true;
for (uint32_t k = tok_start + 1; k < tok_end; k++) {
if (source[k] >= '0' && source[k] <= '9') {
bit_count
= (uint16_t)(bit_count * 10 + (uint16_t)(source[k] - '0'));
} else {
valid = false;
break;
}
}
if (valid && bit_count > 0) {
ZirInstData data;
data.int_type.src_node
= (int32_t)node - (int32_t)gz->decl_node_index;
data.int_type.signedness = signedness;
data.int_type._pad = 0;
data.int_type.bit_count = bit_count;
return addInstruction(gz, ZIR_INST_INT_TYPE, data);
}
}
return ZIR_REF_NONE;
}
static uint32_t identifierExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
AstGenCtx* ag = gz->astgen;
uint32_t ident_token = ag->tree->nodes.main_tokens[node];
// Check for primitive types FIRST (AstGen.zig:8298-8338).
uint32_t prim = tryResolvePrimitiveIdent(gz, node);
if (prim != ZIR_REF_NONE)
return rvalue(gz, rl, prim, node);
// Scope chain walk (AstGen.zig:8340-8461).
uint32_t name_str = identAsString(ag, ident_token);
for (Scope* s = scope; s != NULL;) {
switch (s->tag) {
case SCOPE_LOCAL_VAL: {
ScopeLocalVal* lv = (ScopeLocalVal*)s;
if (lv->name == name_str)
return rvalueNoCoercePreRef(gz, rl, lv->inst, node);
s = lv->parent;
continue;
}
case SCOPE_LOCAL_PTR: {
ScopeLocalPtr* lp = (ScopeLocalPtr*)s;
if (lp->name == name_str) {
if (RL_IS_REF(rl))
return lp->ptr;
return addUnNode(gz, ZIR_INST_LOAD, lp->ptr, node);
}
s = lp->parent;
continue;
}
case SCOPE_GEN_ZIR: {
GenZir* gzs = (GenZir*)s;
s = gzs->parent;
continue;
}
case SCOPE_DEFER_NORMAL:
case SCOPE_DEFER_ERROR: {
ScopeDefer* sd = (ScopeDefer*)s;
s = sd->parent;
continue;
}
case SCOPE_LABEL: {
ScopeLabel* sl = (ScopeLabel*)s;
s = sl->parent;
continue;
}
case SCOPE_NAMESPACE:
case SCOPE_TOP:
goto decl_table;
}
}
decl_table:
// Decl table lookup (AstGen.zig:8462-8520).
for (uint32_t i = 0; i < ag->decl_table_len; i++) {
if (ag->decl_names[i] == name_str) {
ZirInstTag itag
= (RL_IS_REF(rl)) ? ZIR_INST_DECL_REF : ZIR_INST_DECL_VAL;
ZirInstData data;
data.str_tok.start = name_str;
data.str_tok.src_tok = tokenIndexToRelative(gz, ident_token);
return addInstruction(gz, itag, data);
}
}
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
// --- fieldAccess (AstGen.zig:6154) ---
// Simplified: emits field_val instruction with Field payload.
static uint32_t fieldAccessExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstData nd = tree->nodes.datas[node];
// data.lhs = object node, data.rhs = field identifier token.
uint32_t object_node = nd.lhs;
uint32_t field_ident = nd.rhs;
// Get field name as string (AstGen.zig:6180).
uint32_t str_index = identAsString(ag, field_ident);
// Evaluate the LHS object expression (AstGen.zig:6181).
// For .ref rl, LHS is also evaluated with .ref (AstGen.zig:6161).
ResultLoc lhs_rl = (RL_IS_REF(rl)) ? RL_REF_VAL : RL_NONE_VAL;
uint32_t lhs = exprRl(gz, scope, lhs_rl, object_node);
// Emit dbg_stmt for the dot token (AstGen.zig:6183-6184).
advanceSourceCursorToMainToken(ag, gz, node);
{
uint32_t line = ag->source_line - gz->decl_line;
uint32_t column = ag->source_column;
emitDbgStmt(gz, line, column);
}
// Emit field_val instruction with Field payload (AstGen.zig:6186-6189).
ensureExtraCapacity(ag, 2);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = lhs; // Field.lhs
ag->extra[ag->extra_len++] = str_index; // Field.field_name_start
// .ref → field_ptr, else → field_val (AstGen.zig:6160-6164).
ZirInstTag ftag
= (RL_IS_REF(rl)) ? ZIR_INST_FIELD_PTR : ZIR_INST_FIELD_VAL;
ZirInstData data;
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
data.pl_node.payload_index = payload_index;
uint32_t access = addInstruction(gz, ftag, data);
// For ref, return directly; otherwise apply rvalue (AstGen.zig:6161-6164).
if (RL_IS_REF(rl))
return access;
return rvalue(gz, rl, access, node);
}
// --- ptrType (AstGen.zig:3833) ---
static uint32_t ptrTypeExpr(GenZir* gz, Scope* scope, uint32_t node) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstNodeTag tag = tree->nodes.tags[node];
AstData nd = tree->nodes.datas[node];
uint32_t main_tok = tree->nodes.main_tokens[node];
// child_type is always in rhs for all ptr_type variants.
uint32_t child_type_node = nd.rhs;
// Determine size from main_token (Ast.zig:2122-2131).
// Pointer.Size: one=0, many=1, slice=2, c=3.
uint8_t size;
TokenizerTag main_tok_tag = tree->tokens.tags[main_tok];
if (main_tok_tag == TOKEN_ASTERISK
|| main_tok_tag == TOKEN_ASTERISK_ASTERISK) {
size = 0; // one
} else {
assert(main_tok_tag == TOKEN_L_BRACKET);
TokenizerTag next_tag = tree->tokens.tags[main_tok + 1];
if (next_tag == TOKEN_ASTERISK) {
// [*c]T vs [*]T: c-pointer if next-next is identifier.
if (tree->tokens.tags[main_tok + 2] == TOKEN_IDENTIFIER)
size = 3; // c
else
size = 1; // many
} else {
size = 2; // slice
}
}
// Determine sentinel, align, addrspace, bit_range nodes from AST variant
// (Ast.zig:1656-1696).
uint32_t sentinel_node = UINT32_MAX;
uint32_t align_node = UINT32_MAX;
uint32_t addrspace_node = UINT32_MAX;
uint32_t bit_range_start = UINT32_MAX;
uint32_t bit_range_end = UINT32_MAX;
if (tag == AST_NODE_PTR_TYPE_ALIGNED) {
// opt_node_and_node: lhs = optional align_node (0=none), rhs = child.
if (nd.lhs != 0)
align_node = nd.lhs;
} else if (tag == AST_NODE_PTR_TYPE_SENTINEL) {
// opt_node_and_node: lhs = optional sentinel (0=none), rhs = child.
if (nd.lhs != 0)
sentinel_node = nd.lhs;
} else if (tag == AST_NODE_PTR_TYPE) {
// extra_and_node: lhs = extra index to AstPtrType, rhs = child_type.
const AstPtrType* pt
= (const AstPtrType*)(tree->extra_data.arr + nd.lhs);
if (pt->sentinel != UINT32_MAX)
sentinel_node = pt->sentinel;
if (pt->align_node != UINT32_MAX)
align_node = pt->align_node;
if (pt->addrspace_node != UINT32_MAX)
addrspace_node = pt->addrspace_node;
} else if (tag == AST_NODE_PTR_TYPE_BIT_RANGE) {
// extra_and_node: lhs = extra index to AstPtrTypeBitRange.
const AstPtrTypeBitRange* pt
= (const AstPtrTypeBitRange*)(tree->extra_data.arr + nd.lhs);
if (pt->sentinel != UINT32_MAX)
sentinel_node = pt->sentinel;
align_node = pt->align_node;
if (pt->addrspace_node != UINT32_MAX)
addrspace_node = pt->addrspace_node;
bit_range_start = pt->bit_range_start;
bit_range_end = pt->bit_range_end;
}
// Scan tokens between main_token and child_type to find const/volatile/
// allowzero (Ast.zig:2139-2164).
bool has_const = false;
bool has_volatile = false;
bool has_allowzero = false;
{
uint32_t i;
if (sentinel_node != UINT32_MAX) {
i = lastToken(tree, sentinel_node) + 1;
} else if (size == 1 || size == 3) {
// many or c: start after main_token.
i = main_tok + 1;
} else {
i = main_tok;
}
uint32_t end = firstToken(tree, child_type_node);
while (i < end) {
TokenizerTag tt = tree->tokens.tags[i];
if (tt == TOKEN_KEYWORD_ALLOWZERO) {
has_allowzero = true;
} else if (tt == TOKEN_KEYWORD_CONST) {
has_const = true;
} else if (tt == TOKEN_KEYWORD_VOLATILE) {
has_volatile = true;
} else if (tt == TOKEN_KEYWORD_ALIGN) {
// Skip over align expression.
if (bit_range_end != UINT32_MAX)
i = lastToken(tree, bit_range_end) + 1;
else if (align_node != UINT32_MAX)
i = lastToken(tree, align_node) + 1;
}
i++;
}
}
// Evaluate element type (AstGen.zig:3847).
uint32_t elem_type = typeExpr(gz, scope, child_type_node);
// Evaluate trailing expressions (AstGen.zig:3856-3897).
uint32_t sentinel_ref = ZIR_REF_NONE;
uint32_t align_ref = ZIR_REF_NONE;
uint32_t addrspace_ref = ZIR_REF_NONE;
uint32_t bit_start_ref = ZIR_REF_NONE;
uint32_t bit_end_ref = ZIR_REF_NONE;
uint32_t trailing_count = 0;
if (sentinel_node != UINT32_MAX) {
uint32_t reason = (size == 2) ? COMPTIME_REASON_SLICE_SENTINEL
: COMPTIME_REASON_POINTER_SENTINEL;
ResultLoc srl = {
.tag = RL_TY, .data = elem_type, .src_node = 0, .ctx = RI_CTX_NONE
};
sentinel_ref = comptimeExpr(gz, scope, srl, sentinel_node, reason);
trailing_count++;
}
if (addrspace_node != UINT32_MAX) {
// Upstream creates addrspace_ty via addBuiltinValue, we don't have
// that yet, so pass RL_NONE (matching previous behavior).
addrspace_ref = comptimeExpr(
gz, scope, RL_NONE_VAL, addrspace_node, COMPTIME_REASON_ADDRSPACE);
trailing_count++;
}
if (align_node != UINT32_MAX) {
ResultLoc arl = { .tag = RL_COERCED_TY,
.data = ZIR_REF_U29_TYPE,
.src_node = 0,
.ctx = RI_CTX_NONE };
align_ref
= comptimeExpr(gz, scope, arl, align_node, COMPTIME_REASON_ALIGN);
trailing_count++;
}
if (bit_range_start != UINT32_MAX) {
ResultLoc brl = { .tag = RL_COERCED_TY,
.data = ZIR_REF_U16_TYPE,
.src_node = 0,
.ctx = RI_CTX_NONE };
bit_start_ref = comptimeExpr(
gz, scope, brl, bit_range_start, COMPTIME_REASON_TYPE);
bit_end_ref = comptimeExpr(
gz, scope, brl, bit_range_end, COMPTIME_REASON_TYPE);
trailing_count += 2;
}
// Build PtrType payload: { elem_type, src_node } + trailing
// (AstGen.zig:3905-3921).
ensureExtraCapacity(ag, 2 + trailing_count);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = elem_type;
ag->extra[ag->extra_len++]
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
if (sentinel_ref != ZIR_REF_NONE)
ag->extra[ag->extra_len++] = sentinel_ref;
if (align_ref != ZIR_REF_NONE)
ag->extra[ag->extra_len++] = align_ref;
if (addrspace_ref != ZIR_REF_NONE)
ag->extra[ag->extra_len++] = addrspace_ref;
if (bit_start_ref != ZIR_REF_NONE) {
ag->extra[ag->extra_len++] = bit_start_ref;
ag->extra[ag->extra_len++] = bit_end_ref;
}
// Build flags packed byte (AstGen.zig:3927-3934).
uint8_t flags = 0;
if (has_allowzero)
flags |= (1 << 0); // is_allowzero
if (!has_const)
flags |= (1 << 1); // is_mutable
if (has_volatile)
flags |= (1 << 2); // is_volatile
if (sentinel_ref != ZIR_REF_NONE)
flags |= (1 << 3); // has_sentinel
if (align_ref != ZIR_REF_NONE)
flags |= (1 << 4); // has_align
if (addrspace_ref != ZIR_REF_NONE)
flags |= (1 << 5); // has_addrspace
if (bit_start_ref != ZIR_REF_NONE)
flags |= (1 << 6); // has_bit_range
ZirInstData data;
data.ptr_type.flags = flags;
data.ptr_type.size = size;
data.ptr_type._pad = 0;
data.ptr_type.payload_index = payload_index;
return addInstruction(gz, ZIR_INST_PTR_TYPE, data);
}
// --- arrayType (AstGen.zig:940) ---
static uint32_t arrayTypeExpr(GenZir* gz, Scope* scope, uint32_t node) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstData nd = tree->nodes.datas[node];
// data.lhs = length expr node, data.rhs = element type node.
// Check for `_` identifier → compile error (AstGen.zig:3950-3953).
if (tree->nodes.tags[nd.lhs] == AST_NODE_IDENTIFIER
&& isUnderscoreIdent(tree, nd.lhs)) {
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
ResultLoc len_rl = { .tag = RL_COERCED_TY,
.data = ZIR_REF_USIZE_TYPE,
.src_node = 0,
.ctx = RI_CTX_NONE };
uint32_t len
= comptimeExpr(gz, scope, len_rl, nd.lhs, COMPTIME_REASON_TYPE);
uint32_t elem_type = typeExpr(gz, scope, nd.rhs);
return addPlNodeBin(gz, ZIR_INST_ARRAY_TYPE, node, len, elem_type);
}
// --- arrayInitExpr (AstGen.zig:1431) ---
// Simplified: handles typed array init with inferred [_] length.
static uint32_t arrayInitExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstNodeTag tag = tree->nodes.tags[node];
AstData nd = tree->nodes.datas[node];
// Get elements and type expression based on the variant.
uint32_t type_expr_node = 0;
uint32_t elem_buf[2];
const uint32_t* elements = NULL;
uint32_t elem_count = 0;
switch (tag) {
case AST_NODE_ARRAY_INIT_ONE:
case AST_NODE_ARRAY_INIT_ONE_COMMA: {
type_expr_node = nd.lhs;
if (nd.rhs != 0) {
elem_buf[0] = nd.rhs;
elements = elem_buf;
elem_count = 1;
}
break;
}
case AST_NODE_ARRAY_INIT:
case AST_NODE_ARRAY_INIT_COMMA: {
// data = node_and_extra: lhs = type_expr, rhs = extra_index.
// extra[rhs] = SubRange.start, extra[rhs+1] = SubRange.end.
// Elements are extra_data[start..end].
type_expr_node = nd.lhs;
uint32_t extra_idx = nd.rhs;
uint32_t range_start = tree->extra_data.arr[extra_idx];
uint32_t range_end = tree->extra_data.arr[extra_idx + 1];
elements = tree->extra_data.arr + range_start;
elem_count = range_end - range_start;
break;
}
default:
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
if (type_expr_node == 0 || elem_count == 0) {
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
// Check if the type is [_]T (inferred length) (AstGen.zig:1446-1474).
if (tree->nodes.tags[type_expr_node] == AST_NODE_ARRAY_TYPE) {
AstData type_nd = tree->nodes.datas[type_expr_node];
uint32_t elem_count_node = type_nd.lhs;
uint32_t elem_type_node = type_nd.rhs;
// Check if elem_count is `_` identifier.
if (tree->nodes.tags[elem_count_node] == AST_NODE_IDENTIFIER
&& isUnderscoreIdent(tree, elem_count_node)) {
// Inferred length: addInt(elem_count) (AstGen.zig:1452).
uint32_t len_inst = addInt(gz, elem_count);
uint32_t elem_type = typeExpr(gz, scope, elem_type_node);
uint32_t array_type_inst = addPlNodeBin(
gz, ZIR_INST_ARRAY_TYPE, type_expr_node, len_inst, elem_type);
// arrayInitExprTyped (AstGen.zig:1484-1513, 1598-1642).
// Only RL_REF produces array_init_ref; all other RLs use
// array_init + rvalue (AstGen.zig:1507-1511).
bool is_ref = (rl.tag == RL_REF);
uint32_t operands_len = elem_count + 1;
ensureExtraCapacity(ag, 1 + operands_len);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = operands_len;
ag->extra[ag->extra_len++] = array_type_inst;
uint32_t extra_start = ag->extra_len;
ag->extra_len += elem_count;
for (uint32_t i = 0; i < elem_count; i++) {
// Use elem_type as coercion target for each element.
ResultLoc elem_rl = {
.tag = RL_COERCED_TY, .data = elem_type, .src_node = 0
};
uint32_t elem_ref = exprRl(gz, scope, elem_rl, elements[i]);
ag->extra[extra_start + i] = elem_ref;
}
ZirInstTag init_tag
= is_ref ? ZIR_INST_ARRAY_INIT_REF : ZIR_INST_ARRAY_INIT;
ZirInstData idata;
idata.pl_node.src_node
= (int32_t)node - (int32_t)gz->decl_node_index;
idata.pl_node.payload_index = payload_index;
uint32_t result = addInstruction(gz, init_tag, idata);
if (is_ref)
return result;
return rvalue(gz, rl, result, node);
}
}
// Non-inferred length: evaluate type normally.
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
// --- simpleBinOp (AstGen.zig:2204) ---
static uint32_t simpleBinOp(
GenZir* gz, Scope* scope, uint32_t node, ZirInstTag op_tag) {
AstGenCtx* ag = gz->astgen;
AstData nd = ag->tree->nodes.datas[node];
uint32_t lhs = exprRl(gz, scope, RL_NONE_VAL, nd.lhs);
// For arithmetic ops, advance cursor before RHS (AstGen.zig:6245-6256).
uint32_t saved_line = 0, saved_col = 0;
bool need_dbg = false;
if (op_tag == ZIR_INST_ADD || op_tag == ZIR_INST_SUB
|| op_tag == ZIR_INST_MUL || op_tag == ZIR_INST_DIV
|| op_tag == ZIR_INST_MOD_REM) {
if (!gz->is_comptime) {
advanceSourceCursorToMainToken(ag, gz, node);
}
saved_line = ag->source_line - gz->decl_line;
saved_col = ag->source_column;
need_dbg = true;
}
uint32_t rhs = exprRl(gz, scope, RL_NONE_VAL, nd.rhs);
if (need_dbg) {
emitDbgStmt(gz, saved_line, saved_col);
}
return addPlNodeBin(gz, op_tag, node, lhs, rhs);
}
// --- shiftOp (AstGen.zig:9978) ---
static uint32_t shiftOp(
GenZir* gz, Scope* scope, uint32_t node, ZirInstTag tag) {
AstGenCtx* ag = gz->astgen;
AstData nd = ag->tree->nodes.datas[node];
uint32_t lhs = exprRl(gz, scope, RL_NONE_VAL, nd.lhs);
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t log2_int_type
= addUnNode(gz, ZIR_INST_TYPEOF_LOG2_INT_TYPE, lhs, nd.lhs);
ResultLoc rhs_rl = { .tag = RL_TY,
.data = log2_int_type,
.src_node = 0,
.ctx = RI_CTX_SHIFT_OP };
uint32_t rhs = exprRl(gz, scope, rhs_rl, nd.rhs);
emitDbgStmt(gz, saved_line, saved_col);
return addPlNodeBin(gz, tag, node, lhs, rhs);
}
// --- multilineStringLiteral (AstGen.zig:8645) ---
// Port of strLitNodeAsString for multiline strings.
static uint32_t multilineStringLiteral(
GenZir* gz, Scope* scope, uint32_t node) {
(void)scope;
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstData nd = tree->nodes.datas[node];
uint32_t start_tok = nd.lhs;
uint32_t end_tok = nd.rhs;
uint32_t str_index = ag->string_bytes_len;
// First line: no preceding newline.
for (uint32_t tok_i = start_tok; tok_i <= end_tok; tok_i++) {
uint32_t tok_start = tree->tokens.starts[tok_i];
const char* source = tree->source;
// Skip leading `\\` (2 chars).
uint32_t content_start = tok_start + 2;
// Find end of line.
uint32_t content_end = content_start;
while (content_end < tree->source_len && source[content_end] != '\n')
content_end++;
uint32_t line_len = content_end - content_start;
if (tok_i > start_tok) {
// Prepend newline for lines after the first.
ensureStringBytesCapacity(ag, line_len + 1);
ag->string_bytes[ag->string_bytes_len++] = '\n';
} else {
ensureStringBytesCapacity(ag, line_len);
}
memcpy(ag->string_bytes + ag->string_bytes_len, source + content_start,
line_len);
ag->string_bytes_len += line_len;
}
uint32_t len = ag->string_bytes_len - str_index;
ensureStringBytesCapacity(ag, 1);
ag->string_bytes[ag->string_bytes_len++] = 0; // null terminator
ZirInstData data;
data.str.start = str_index;
data.str.len = len;
return addInstruction(gz, ZIR_INST_STR, data);
}
// --- ret (AstGen.zig:8119) ---
static uint32_t retExpr(GenZir* gz, Scope* scope, uint32_t node) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
// Ensure debug line/column information is emitted for this return
// expression (AstGen.zig:8141-8144).
if (!gz->is_comptime) {
emitDbgNode(gz, node);
}
uint32_t ret_lc_line = ag->source_line - gz->decl_line;
uint32_t ret_lc_column = ag->source_column;
// AstGen.zig:8123: return outside function is an error.
if (ag->fn_block == NULL) {
SET_ERROR(ag);
return ZIR_REF_UNREACHABLE_VALUE;
}
const Scope* defer_outer = &((GenZir*)ag->fn_block)->base;
AstData nd = tree->nodes.datas[node];
uint32_t operand_node = nd.lhs; // optional
if (operand_node == 0) {
// Void return (AstGen.zig:8148-8156).
genDefers(gz, defer_outer, scope, DEFER_NORMAL_ONLY);
// Restore error trace unconditionally (AstGen.zig:8153).
ZirInstData rdata;
rdata.un_node.operand = ZIR_REF_NONE;
rdata.un_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
addInstruction(
gz, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata);
addUnNode(gz, ZIR_INST_RET_NODE, ZIR_REF_VOID_VALUE, node);
return ZIR_REF_UNREACHABLE_VALUE;
}
// Fast path: return error.Foo (AstGen.zig:8159-8175).
if (tree->nodes.tags[operand_node] == AST_NODE_ERROR_VALUE) {
uint32_t error_token = tree->nodes.main_tokens[operand_node] + 2;
uint32_t err_name_str = identAsString(ag, error_token);
DeferCounts dc = countDefers(defer_outer, scope);
if (!dc.need_err_code) {
genDefers(gz, defer_outer, scope, DEFER_BOTH_SANS_ERR);
emitDbgStmt(gz, ret_lc_line, ret_lc_column);
addStrTok(gz, ZIR_INST_RET_ERR_VALUE, err_name_str, error_token);
return ZIR_REF_UNREACHABLE_VALUE;
}
// need_err_code path: not implemented yet, fall through to general.
}
// Evaluate operand with result location (AstGen.zig:8178-8186).
// If nodes_need_rl contains this return node, use ptr-based RL;
// otherwise use coerced_ty.
ResultLoc ret_rl = RL_NONE_VAL;
bool use_ptr = nodesNeedRlContains(ag, node);
uint32_t ret_ptr_inst = 0;
if (use_ptr) {
// Create ret_ptr instruction (AstGen.zig:8179).
ZirInstData rpdata;
rpdata.node = (int32_t)node - (int32_t)gz->decl_node_index;
ret_ptr_inst = addInstruction(gz, ZIR_INST_RET_PTR, rpdata);
ret_rl.tag = RL_PTR;
ret_rl.data = ret_ptr_inst;
} else if (ag->fn_ret_ty != 0) {
ret_rl.tag = RL_COERCED_TY;
ret_rl.data = ag->fn_ret_ty;
}
ret_rl.ctx = RI_CTX_RETURN;
uint32_t operand = exprRl(gz, scope, ret_rl, operand_node);
// Emit RESTORE_ERR_RET_INDEX based on nodeMayEvalToError
// (AstGen.zig:8188-8253).
int eval_to_err = nodeMayEvalToError(tree, operand_node);
if (eval_to_err == EVAL_TO_ERROR_NEVER) {
// Returning non-error: pop error trace unconditionally
// (AstGen.zig:8190-8198).
genDefers(gz, defer_outer, scope, DEFER_NORMAL_ONLY);
ZirInstData rdata;
rdata.un_node.operand = ZIR_REF_NONE;
rdata.un_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
addInstruction(
gz, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata);
emitDbgStmt(gz, ret_lc_line, ret_lc_column);
// addRet (AstGen.zig:13188-13194).
if (use_ptr) {
addUnNode(gz, ZIR_INST_RET_LOAD, ret_ptr_inst, node);
} else {
addUnNode(gz, ZIR_INST_RET_NODE, operand, node);
}
return ZIR_REF_UNREACHABLE_VALUE;
} else if (eval_to_err == EVAL_TO_ERROR_ALWAYS) {
// .always: emit both error defers and regular defers
// (AstGen.zig:8200-8206).
uint32_t err_code = use_ptr
? addUnNode(gz, ZIR_INST_LOAD, ret_ptr_inst, node)
: operand;
(void)err_code;
// TODO: genDefers with .both = err_code when errdefer is implemented.
genDefers(gz, defer_outer, scope, DEFER_NORMAL_ONLY);
emitDbgStmt(gz, ret_lc_line, ret_lc_column);
if (use_ptr) {
addUnNode(gz, ZIR_INST_RET_LOAD, ret_ptr_inst, node);
} else {
addUnNode(gz, ZIR_INST_RET_NODE, operand, node);
}
return ZIR_REF_UNREACHABLE_VALUE;
} else {
// .maybe (AstGen.zig:8208-8252).
DeferCounts dc = countDefers(defer_outer, scope);
if (!dc.have_err) {
// Only regular defers; no branch needed (AstGen.zig:8210-8220).
genDefers(gz, defer_outer, scope, DEFER_NORMAL_ONLY);
emitDbgStmt(gz, ret_lc_line, ret_lc_column);
uint32_t result = use_ptr
? addUnNode(gz, ZIR_INST_LOAD, ret_ptr_inst, node)
: operand;
ZirInstData rdata;
rdata.un_node.operand = result;
rdata.un_node.src_node
= (int32_t)node - (int32_t)gz->decl_node_index;
addInstruction(gz, ZIR_INST_RESTORE_ERR_RET_INDEX_FN_ENTRY, rdata);
if (use_ptr) {
addUnNode(gz, ZIR_INST_RET_LOAD, ret_ptr_inst, node);
} else {
addUnNode(gz, ZIR_INST_RET_NODE, operand, node);
}
return ZIR_REF_UNREACHABLE_VALUE;
}
// have_err path: emit conditional branch (not yet implemented).
// Fall through to simplified path.
genDefers(gz, defer_outer, scope, DEFER_NORMAL_ONLY);
emitDbgStmt(gz, ret_lc_line, ret_lc_column);
if (use_ptr) {
addUnNode(gz, ZIR_INST_RET_LOAD, ret_ptr_inst, node);
} else {
addUnNode(gz, ZIR_INST_RET_NODE, operand, node);
}
return ZIR_REF_UNREACHABLE_VALUE;
}
}
// --- calleeExpr (AstGen.zig:10183) ---
// Returns: 0 = direct call, 1 = field call.
typedef struct {
bool is_field;
uint32_t obj_ptr; // for field calls: ref to object
uint32_t field_name_start; // for field calls: string index
uint32_t direct; // for direct calls: ref to callee
} Callee;
static Callee calleeExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t fn_expr_node) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstNodeTag tag = tree->nodes.tags[fn_expr_node];
if (tag == AST_NODE_FIELD_ACCESS) {
AstData nd = tree->nodes.datas[fn_expr_node];
uint32_t object_node = nd.lhs;
uint32_t field_ident = nd.rhs;
uint32_t str_index = identAsString(ag, field_ident);
// Evaluate object with .ref rl (AstGen.zig:10207).
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, object_node);
// Advance to main token (the `.` dot) — not first token
// (AstGen.zig:10209).
advanceSourceCursorToMainToken(ag, gz, fn_expr_node);
{
uint32_t line = ag->source_line - gz->decl_line;
uint32_t column = ag->source_column;
emitDbgStmt(gz, line, column);
}
Callee c;
c.is_field = true;
c.obj_ptr = lhs;
c.field_name_start = str_index;
c.direct = 0;
return c;
}
// enum_literal callee: decl literal call syntax (AstGen.zig:10217-10233).
if (tag == AST_NODE_ENUM_LITERAL) {
uint32_t res_ty = rlResultType(gz, rl, fn_expr_node);
if (res_ty != 0) {
uint32_t str_index
= identAsString(ag, tree->nodes.main_tokens[fn_expr_node]);
uint32_t callee = addPlNodeBin(gz, ZIR_INST_DECL_LITERAL_NO_COERCE,
fn_expr_node, res_ty, str_index);
Callee c;
c.is_field = false;
c.direct = callee;
c.obj_ptr = 0;
c.field_name_start = 0;
return c;
}
// No result type: fall through to expr with rl=none.
}
// Default: direct call (AstGen.zig:10235).
Callee c;
c.is_field = false;
c.direct = expr(gz, scope, fn_expr_node);
c.obj_ptr = 0;
c.field_name_start = 0;
return c;
}
// --- callExpr (AstGen.zig:10058) ---
static uint32_t callExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstNodeTag tag = tree->nodes.tags[node];
AstData nd = tree->nodes.datas[node];
// Extract callee and args from AST.
uint32_t fn_expr_node;
uint32_t arg_buf[2];
const uint32_t* args = NULL;
uint32_t args_len = 0;
uint32_t lparen_tok;
switch (tag) {
case AST_NODE_CALL_ONE:
case AST_NODE_CALL_ONE_COMMA: {
fn_expr_node = nd.lhs;
lparen_tok = tree->nodes.main_tokens[node];
if (nd.rhs != 0) {
arg_buf[0] = nd.rhs;
args = arg_buf;
args_len = 1;
}
break;
}
case AST_NODE_CALL:
case AST_NODE_CALL_COMMA: {
fn_expr_node = nd.lhs;
lparen_tok = tree->nodes.main_tokens[node];
uint32_t extra_idx = nd.rhs;
uint32_t range_start = tree->extra_data.arr[extra_idx];
uint32_t range_end = tree->extra_data.arr[extra_idx + 1];
args = tree->extra_data.arr + range_start;
args_len = range_end - range_start;
break;
}
default:
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
Callee callee = calleeExpr(gz, scope, rl, fn_expr_node);
// dbg_stmt before call (AstGen.zig:10078-10083).
{
advanceSourceCursor(ag, tree->tokens.starts[lparen_tok]);
uint32_t line = ag->source_line - gz->decl_line;
uint32_t column = ag->source_column;
emitDbgStmtForceCurrentIndex(gz, line, column);
}
// Reserve instruction slot for call (AstGen.zig:10093).
uint32_t call_index = ag->inst_len;
ensureInstCapacity(ag, 1);
memset(&ag->inst_datas[call_index], 0, sizeof(ZirInstData));
ag->inst_tags[call_index] = (ZirInstTag)0;
ag->inst_len++;
gzAppendInstruction(gz, call_index);
// Process arguments in sub-blocks (AstGen.zig:10096-10116).
// Upstream uses a separate scratch array; we use a local buffer for body
// lengths and append body instructions to scratch_extra, then copy all
// to extra after the call payload.
uint32_t call_inst = call_index + ZIR_REF_START_INDEX;
ResultLoc arg_rl = { .tag = RL_COERCED_TY,
.data = call_inst,
.src_node = 0,
.ctx = RI_CTX_FN_ARG };
// Use scratch_extra to collect body lengths + body instructions,
// mirroring upstream's scratch array (AstGen.zig:10096-10116).
uint32_t scratch_top = ag->scratch_extra_len;
// Reserve space for cumulative body lengths (one per arg).
ensureScratchExtraCapacity(ag, args_len);
ag->scratch_extra_len += args_len;
for (uint32_t i = 0; i < args_len; i++) {
GenZir arg_block = makeSubBlock(gz, scope);
uint32_t arg_ref
= exprRl(&arg_block, &arg_block.base, arg_rl, args[i]);
// break_inline with param_node src (AstGen.zig:10108).
int32_t param_src
= (int32_t)args[i] - (int32_t)arg_block.decl_node_index;
makeBreakInline(&arg_block, call_index, arg_ref, param_src);
// Append arg_block body to scratch_extra (with ref_table fixups).
uint32_t raw_body_len = gzInstructionsLen(&arg_block);
const uint32_t* body = gzInstructionsSlice(&arg_block);
uint32_t fixup_len = countBodyLenAfterFixups(ag, body, raw_body_len);
ensureScratchExtraCapacity(ag, fixup_len);
for (uint32_t j = 0; j < raw_body_len; j++) {
appendPossiblyRefdBodyInstScratch(ag, body[j]);
}
// Record cumulative body length (AstGen.zig:10114).
ag->scratch_extra[scratch_top + i]
= ag->scratch_extra_len - scratch_top;
gzUnstack(&arg_block);
}
// Build call payload (AstGen.zig:10118-10168).
// Upstream layout: [flags, callee/obj_ptr, field_name_start], then
// body_lengths + body_instructions from scratch.
// Flags layout (packed): modifier:u3, ensure_result_used:bool,
// pop_error_return_trace:bool, args_len:u27.
// pop_error_return_trace = !propagate_error_trace
// (AstGen.zig:10121-10124).
bool propagate_error_trace
= (rl.ctx == RI_CTX_ERROR_HANDLING_EXPR || rl.ctx == RI_CTX_RETURN
|| rl.ctx == RI_CTX_FN_ARG || rl.ctx == RI_CTX_CONST_INIT);
uint32_t flags = (propagate_error_trace ? 0u : (1u << 4))
| ((args_len & 0x7FFFFFFu) << 5); // args_len
if (callee.is_field) {
// FieldCall: {flags, obj_ptr, field_name_start} (AstGen.zig:10148).
ensureExtraCapacity(ag, 3 + (ag->scratch_extra_len - scratch_top));
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = flags;
ag->extra[ag->extra_len++] = callee.obj_ptr;
ag->extra[ag->extra_len++] = callee.field_name_start;
// Append scratch data (body lengths + body instructions).
if (args_len != 0) {
memcpy(ag->extra + ag->extra_len, ag->scratch_extra + scratch_top,
(ag->scratch_extra_len - scratch_top) * sizeof(uint32_t));
ag->extra_len += ag->scratch_extra_len - scratch_top;
}
ag->inst_tags[call_index] = ZIR_INST_FIELD_CALL;
ag->inst_datas[call_index].pl_node.src_node
= (int32_t)node - (int32_t)gz->decl_node_index;
ag->inst_datas[call_index].pl_node.payload_index = payload_index;
} else {
// Call: {flags, callee} (AstGen.zig:10128).
ensureExtraCapacity(ag, 2 + (ag->scratch_extra_len - scratch_top));
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = flags;
ag->extra[ag->extra_len++] = callee.direct;
// Append scratch data (body lengths + body instructions).
if (args_len != 0) {
memcpy(ag->extra + ag->extra_len, ag->scratch_extra + scratch_top,
(ag->scratch_extra_len - scratch_top) * sizeof(uint32_t));
ag->extra_len += ag->scratch_extra_len - scratch_top;
}
ag->inst_tags[call_index] = ZIR_INST_CALL;
ag->inst_datas[call_index].pl_node.src_node
= (int32_t)node - (int32_t)gz->decl_node_index;
ag->inst_datas[call_index].pl_node.payload_index = payload_index;
}
// Restore scratch (AstGen.zig:10097 defer).
ag->scratch_extra_len = scratch_top;
return call_index + ZIR_REF_START_INDEX;
}
// --- structInitExpr (AstGen.zig:1674) ---
// Simplified: handles .{} (empty tuple), .{.a = b} (anon init).
static uint32_t structInitExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstNodeTag tag = tree->nodes.tags[node];
AstData nd = tree->nodes.datas[node];
// Extract type_expr and fields.
uint32_t type_expr_node = 0; // 0 = anonymous (.{...})
uint32_t field_buf[2];
const uint32_t* fields = NULL;
uint32_t fields_len = 0;
switch (tag) {
case AST_NODE_STRUCT_INIT_DOT_TWO:
case AST_NODE_STRUCT_INIT_DOT_TWO_COMMA: {
// .{.a = lhs, .b = rhs}
uint32_t idx = 0;
if (nd.lhs != 0)
field_buf[idx++] = nd.lhs;
if (nd.rhs != 0)
field_buf[idx++] = nd.rhs;
fields = field_buf;
fields_len = idx;
break;
}
case AST_NODE_STRUCT_INIT_DOT:
case AST_NODE_STRUCT_INIT_DOT_COMMA: {
uint32_t start = nd.lhs;
uint32_t end = nd.rhs;
fields = tree->extra_data.arr + start;
fields_len = end - start;
break;
}
case AST_NODE_STRUCT_INIT_ONE:
case AST_NODE_STRUCT_INIT_ONE_COMMA: {
type_expr_node = nd.lhs;
if (nd.rhs != 0) {
field_buf[0] = nd.rhs;
fields = field_buf;
fields_len = 1;
}
break;
}
case AST_NODE_STRUCT_INIT:
case AST_NODE_STRUCT_INIT_COMMA: {
type_expr_node = nd.lhs;
uint32_t extra_idx = nd.rhs;
uint32_t range_start = tree->extra_data.arr[extra_idx];
uint32_t range_end = tree->extra_data.arr[extra_idx + 1];
fields = tree->extra_data.arr + range_start;
fields_len = range_end - range_start;
break;
}
default:
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
if (type_expr_node == 0 && fields_len == 0) {
// .{} — depends on result location (AstGen.zig:1687-1698).
if (rl.tag == RL_REF_COERCED_TY) {
return addUnNode(
gz, ZIR_INST_STRUCT_INIT_EMPTY_REF_RESULT, rl.data, node);
}
if (rl.tag == RL_TY || rl.tag == RL_COERCED_TY) {
return addUnNode(
gz, ZIR_INST_STRUCT_INIT_EMPTY_RESULT, rl.data, node);
}
if (rl.tag == RL_DISCARD) {
return ZIR_REF_VOID_VALUE;
}
return ZIR_REF_EMPTY_TUPLE;
}
// Pre-register all field names to match upstream string ordering.
// Upstream has a duplicate name check (AstGen.zig:1756-1806) that
// adds all field names to string_bytes before evaluating values.
for (uint32_t i = 0; i < fields_len; i++) {
uint32_t name_token = firstToken(tree, fields[i]) - 2;
identAsString(ag, name_token);
}
if (type_expr_node == 0 && fields_len > 0) {
// structInitExprPtr for RL_PTR (AstGen.zig:1843-1846, 1934-1964).
if (rl.tag == RL_PTR) {
uint32_t struct_ptr_inst
= addUnNode(gz, ZIR_INST_OPT_EU_BASE_PTR_INIT, rl.data, node);
// Block payload: body_len = fields_len.
ensureExtraCapacity(ag, 1 + fields_len);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = fields_len;
uint32_t items_start = ag->extra_len;
ag->extra_len += fields_len;
for (uint32_t i = 0; i < fields_len; i++) {
uint32_t field_init = fields[i];
uint32_t name_token = firstToken(tree, field_init) - 2;
uint32_t str_index = identAsString(ag, name_token);
// struct_init_field_ptr (AstGen.zig:1954-1957).
uint32_t field_ptr
= addPlNodeBin(gz, ZIR_INST_STRUCT_INIT_FIELD_PTR,
field_init, struct_ptr_inst, str_index);
ag->extra[items_start + i]
= field_ptr - ZIR_REF_START_INDEX; // .toIndex()
// Evaluate init with ptr RL (AstGen.zig:1960).
ResultLoc ptr_rl = { .tag = RL_PTR,
.data = field_ptr,
.src_node = 0,
.ctx = rl.ctx };
exprRl(gz, scope, ptr_rl, field_init);
}
addPlNodePayloadIndex(
gz, ZIR_INST_VALIDATE_PTR_STRUCT_INIT, node, payload_index);
return ZIR_REF_VOID_VALUE;
}
// Anonymous struct init with RL type (AstGen.zig:1706-1731).
if (rl.tag == RL_TY || rl.tag == RL_COERCED_TY) {
uint32_t ty_inst = rl.data;
// validate_struct_init_result_ty (AstGen.zig:1840).
addUnNode(
gz, ZIR_INST_VALIDATE_STRUCT_INIT_RESULT_TY, ty_inst, node);
// structInitExprTyped (AstGen.zig:1896-1931).
ensureExtraCapacity(ag, 3 + fields_len * 2);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = node;
ag->extra[ag->extra_len++] = ag->source_line;
ag->extra[ag->extra_len++] = fields_len;
uint32_t items_start = ag->extra_len;
ag->extra_len += fields_len * 2;
for (uint32_t i = 0; i < fields_len; i++) {
uint32_t field_init = fields[i];
uint32_t name_token = firstToken(tree, field_init) - 2;
uint32_t str_index = identAsString(ag, name_token);
uint32_t field_ty_inst
= addPlNodeBin(gz, ZIR_INST_STRUCT_INIT_FIELD_TYPE,
field_init, ty_inst, str_index);
ResultLoc elem_rl = {
.tag = RL_COERCED_TY, .data = field_ty_inst, .src_node = 0
};
uint32_t init_ref = exprRl(gz, scope, elem_rl, field_init);
ag->extra[items_start + i * 2]
= field_ty_inst - ZIR_REF_START_INDEX;
ag->extra[items_start + i * 2 + 1] = init_ref;
}
return addPlNodePayloadIndex(
gz, ZIR_INST_STRUCT_INIT, node, payload_index);
}
// Anonymous struct init without RL type (AstGen.zig:1864).
// StructInitAnon payload: abs_node, abs_line, fields_len.
ensureExtraCapacity(ag, 3 + fields_len * 2);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = node; // abs_node
ag->extra[ag->extra_len++] = ag->source_line; // abs_line
ag->extra[ag->extra_len++] = fields_len;
// Reserve space for field entries.
uint32_t items_start = ag->extra_len;
ag->extra_len += fields_len * 2;
for (uint32_t i = 0; i < fields_len; i++) {
uint32_t field_init = fields[i];
// field name is 2 tokens before the field init's first token.
uint32_t name_token = firstToken(tree, field_init) - 2;
uint32_t str_index = identAsString(ag, name_token);
uint32_t init_ref = expr(gz, scope, field_init);
ag->extra[items_start + i * 2] = str_index;
ag->extra[items_start + i * 2 + 1] = init_ref;
}
return addPlNodePayloadIndex(
gz, ZIR_INST_STRUCT_INIT_ANON, node, payload_index);
}
// Typed init: evaluate type, emit struct_init_empty or struct_init.
if (type_expr_node != 0 && fields_len == 0) {
// Check for [_]T{} pattern (AstGen.zig:1707-1753).
AstNodeTag type_tag = tree->nodes.tags[type_expr_node];
if (type_tag == AST_NODE_ARRAY_TYPE
|| type_tag == AST_NODE_ARRAY_TYPE_SENTINEL) {
AstData type_nd = tree->nodes.datas[type_expr_node];
uint32_t elem_count_node = type_nd.lhs;
if (tree->nodes.tags[elem_count_node] == AST_NODE_IDENTIFIER
&& isUnderscoreIdent(tree, elem_count_node)) {
// Inferred length with 0 fields → length 0.
if (type_tag == AST_NODE_ARRAY_TYPE) {
uint32_t elem_type = typeExpr(gz, scope, type_nd.rhs);
uint32_t array_type_inst
= addPlNodeBin(gz, ZIR_INST_ARRAY_TYPE, type_expr_node,
ZIR_REF_ZERO_USIZE, elem_type);
return rvalue(gz, rl,
addUnNode(gz, ZIR_INST_STRUCT_INIT_EMPTY,
array_type_inst, node),
node);
}
// ARRAY_TYPE_SENTINEL: extra[rhs] = sentinel, extra[rhs+1]
// = elem_type
uint32_t sentinel_node = tree->extra_data.arr[type_nd.rhs];
uint32_t elem_type_node
= tree->extra_data.arr[type_nd.rhs + 1];
uint32_t elem_type = typeExpr(gz, scope, elem_type_node);
ResultLoc sent_rl = { .tag = RL_COERCED_TY,
.data = elem_type,
.src_node = 0,
.ctx = RI_CTX_NONE };
uint32_t sentinel = comptimeExpr(gz, scope, sent_rl,
sentinel_node, COMPTIME_REASON_ARRAY_SENTINEL);
uint32_t array_type_inst = addPlNodeTriple(gz,
ZIR_INST_ARRAY_TYPE_SENTINEL, type_expr_node,
ZIR_REF_ZERO_USIZE, elem_type, sentinel);
return rvalue(gz, rl,
addUnNode(
gz, ZIR_INST_STRUCT_INIT_EMPTY, array_type_inst, node),
node);
}
}
uint32_t ty_inst = typeExpr(gz, scope, type_expr_node);
return rvalue(gz, rl,
addUnNode(gz, ZIR_INST_STRUCT_INIT_EMPTY, ty_inst, node), node);
}
// Typed struct init with fields (AstGen.zig:1808-1818).
if (type_expr_node != 0 && fields_len > 0) {
uint32_t ty_inst = typeExpr(gz, scope, type_expr_node);
addUnNode(gz, ZIR_INST_VALIDATE_STRUCT_INIT_TY, ty_inst, node);
// structInitExprTyped (AstGen.zig:1896-1931).
// StructInit payload: abs_node, abs_line, fields_len.
ensureExtraCapacity(ag, 3 + fields_len * 2);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = node; // abs_node
ag->extra[ag->extra_len++] = ag->source_line; // abs_line
ag->extra[ag->extra_len++] = fields_len;
// Reserve space for field items (field_type + init each).
uint32_t items_start = ag->extra_len;
ag->extra_len += fields_len * 2;
for (uint32_t i = 0; i < fields_len; i++) {
uint32_t field_init = fields[i];
uint32_t name_token = firstToken(tree, field_init) - 2;
uint32_t str_index = identAsString(ag, name_token);
// struct_init_field_type (AstGen.zig:1918-1921).
uint32_t field_ty_inst
= addPlNodeBin(gz, ZIR_INST_STRUCT_INIT_FIELD_TYPE, field_init,
ty_inst, str_index);
// Evaluate init with coerced_ty (AstGen.zig:1924).
ResultLoc elem_rl = { .tag = RL_COERCED_TY,
.data = field_ty_inst,
.src_node = 0,
.ctx = rl.ctx };
uint32_t init_ref = exprRl(gz, scope, elem_rl, field_init);
ag->extra[items_start + i * 2]
= field_ty_inst - ZIR_REF_START_INDEX; // .toIndex()
ag->extra[items_start + i * 2 + 1] = init_ref;
}
bool is_ref = (RL_IS_REF(rl));
ZirInstTag init_tag
= is_ref ? ZIR_INST_STRUCT_INIT_REF : ZIR_INST_STRUCT_INIT;
return addPlNodePayloadIndex(gz, init_tag, node, payload_index);
}
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
// --- tryExpr (AstGen.zig:5957) ---
static uint32_t tryExpr(GenZir* gz, Scope* scope, uint32_t node) {
AstGenCtx* ag = gz->astgen;
AstData nd = ag->tree->nodes.datas[node];
uint32_t operand_node = nd.lhs;
if (!gz->is_comptime) {
emitDbgNode(gz, node);
}
uint32_t try_lc_line = ag->source_line - gz->decl_line;
uint32_t try_lc_column = ag->source_column;
// Evaluate operand (AstGen.zig:5993-6001).
ResultLoc operand_rl = RL_NONE_VAL;
operand_rl.ctx = RI_CTX_ERROR_HANDLING_EXPR;
uint32_t operand = exprRl(gz, scope, operand_rl, operand_node);
// Create try block instruction (AstGen.zig:6007).
uint32_t try_inst = makeBlockInst(ag, ZIR_INST_TRY, gz, node);
gzAppendInstruction(gz, try_inst);
// Else scope: extract error code, return it (AstGen.zig:6012-6025).
GenZir else_scope = makeSubBlock(gz, scope);
uint32_t err_code
= addUnNode(&else_scope, ZIR_INST_ERR_UNION_CODE, operand, node);
// Emit defers for error path (AstGen.zig:6019).
if (ag->fn_block != NULL) {
const Scope* fn_block_scope = &((GenZir*)ag->fn_block)->base;
genDefers(&else_scope, fn_block_scope, scope, DEFER_BOTH_SANS_ERR);
}
// Emit dbg_stmt at try keyword for error return tracing (AstGen.zig:6020).
emitDbgStmt(&else_scope, try_lc_line, try_lc_column);
// ret_node with error code (AstGen.zig:6021).
addUnNode(&else_scope, ZIR_INST_RET_NODE, err_code, node);
setTryBody(ag, &else_scope, try_inst, operand);
// else_scope unstacked by setTryBody.
return try_inst + ZIR_REF_START_INDEX; // toRef()
}
// --- boolBinOp (AstGen.zig:6274) ---
// Short-circuiting boolean and/or.
static uint32_t boolBinOp(
GenZir* gz, Scope* scope, uint32_t node, ZirInstTag zir_tag) {
AstGenCtx* ag = gz->astgen;
AstData nd = ag->tree->nodes.datas[node];
uint32_t lhs_node = nd.lhs;
uint32_t rhs_node = nd.rhs;
// Evaluate LHS (AstGen.zig:6285).
uint32_t lhs = expr(gz, scope, lhs_node);
// Reserve the bool_br instruction (payload set later)
// (AstGen.zig:6286).
uint32_t bool_br = reserveInstructionIndex(ag);
gzAppendInstruction(gz, bool_br);
// Evaluate RHS in sub-block (AstGen.zig:6288-6293).
GenZir rhs_scope = makeSubBlock(gz, scope);
uint32_t rhs = expr(&rhs_scope, &rhs_scope.base, rhs_node);
if (!ag->has_compile_errors) {
// break_inline from rhs to bool_br (AstGen.zig:6292).
makeBreakInline(&rhs_scope, bool_br, rhs,
(int32_t)rhs_node - (int32_t)rhs_scope.decl_node_index);
}
// setBoolBrBody (AstGen.zig:6294, 11929-11944).
uint32_t raw_body_len = gzInstructionsLen(&rhs_scope);
const uint32_t* body = gzInstructionsSlice(&rhs_scope);
uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_body_len);
ensureExtraCapacity(ag, 2 + body_len);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = lhs; // BoolBr.lhs
ag->extra[ag->extra_len++] = body_len; // BoolBr.body_len
for (uint32_t i = 0; i < raw_body_len; i++)
appendPossiblyRefdBodyInst(ag, body[i]);
gzUnstack(&rhs_scope);
// Fill in the bool_br instruction.
ag->inst_tags[bool_br] = zir_tag;
ag->inst_datas[bool_br].pl_node.src_node
= (int32_t)node - (int32_t)gz->decl_node_index;
ag->inst_datas[bool_br].pl_node.payload_index = payload_index;
return bool_br + ZIR_REF_START_INDEX;
}
// Mirrors expr (AstGen.zig:634) — main expression dispatcher.
static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
AstGenCtx* ag = gz->astgen;
if (node == 0) {
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
AstNodeTag tag = ag->tree->nodes.tags[node];
AstData nd = ag->tree->nodes.datas[node];
switch (tag) {
case AST_NODE_NUMBER_LITERAL:
return rvalue(gz, rl, numberLiteral(gz, node), node);
case AST_NODE_BUILTIN_CALL_TWO:
case AST_NODE_BUILTIN_CALL_TWO_COMMA:
return rvalue(gz, rl, builtinCall(gz, scope, rl, node), node);
case AST_NODE_FIELD_ACCESS:
return fieldAccessExpr(gz, scope, rl, node);
case AST_NODE_IDENTIFIER:
return identifierExpr(gz, scope, rl, node);
case AST_NODE_STRING_LITERAL: {
// Mirrors stringLiteral (AstGen.zig:8626).
uint32_t str_lit_token = ag->tree->nodes.main_tokens[node];
uint32_t str_index, str_len;
strLitAsString(ag, str_lit_token, &str_index, &str_len);
ZirInstData data;
data.str.start = str_index;
data.str.len = str_len;
uint32_t str_result = addInstruction(gz, ZIR_INST_STR, data);
return rvalue(gz, rl, str_result, node);
}
// address_of (AstGen.zig:953-960): evaluate operand with .ref rl.
case AST_NODE_ADDRESS_OF: {
uint32_t operand_node = ag->tree->nodes.datas[node].lhs;
// Check for result type to emit validate_ref_ty (AstGen.zig:954-956).
uint32_t res_ty = rlResultType(gz, rl, node);
ResultLoc operand_rl;
if (res_ty != 0) {
addUnTok(gz, ZIR_INST_VALIDATE_REF_TY, res_ty,
firstToken(ag->tree, node));
// Pass ref_coerced_ty so init expressions can use the type
// (AstGen.zig:958).
operand_rl = (ResultLoc) {
.tag = RL_REF_COERCED_TY, .data = res_ty, .src_node = 0
};
} else {
operand_rl = RL_REF_VAL;
}
uint32_t result = exprRl(gz, scope, operand_rl, operand_node);
return rvalue(gz, rl, result, node);
}
// ptr_type (AstGen.zig:1077-1081).
case AST_NODE_PTR_TYPE_ALIGNED:
case AST_NODE_PTR_TYPE_SENTINEL:
case AST_NODE_PTR_TYPE:
case AST_NODE_PTR_TYPE_BIT_RANGE:
return rvalue(gz, rl, ptrTypeExpr(gz, scope, node), node);
// array_type (AstGen.zig:940).
case AST_NODE_ARRAY_TYPE:
return rvalue(gz, rl, arrayTypeExpr(gz, scope, node), node);
// array_init variants (AstGen.zig:836-856).
case AST_NODE_ARRAY_INIT:
case AST_NODE_ARRAY_INIT_COMMA:
case AST_NODE_ARRAY_INIT_ONE:
case AST_NODE_ARRAY_INIT_ONE_COMMA:
return arrayInitExpr(gz, scope, rl, node);
// array_cat (AstGen.zig:772): ++ binary operator.
case AST_NODE_ARRAY_CAT:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_ARRAY_CAT), node);
// grouped_expression (AstGen.zig:1100): passthrough.
case AST_NODE_GROUPED_EXPRESSION:
return exprRl(gz, scope, rl, ag->tree->nodes.datas[node].lhs);
// unreachable_literal (AstGen.zig:846-854).
case AST_NODE_UNREACHABLE_LITERAL: {
emitDbgNode(gz, node);
ZirInstData udata;
memset(&udata, 0, sizeof(udata));
udata.unreachable_data.src_node
= (int32_t)node - (int32_t)gz->decl_node_index;
addInstruction(gz, ZIR_INST_UNREACHABLE, udata);
return ZIR_REF_UNREACHABLE_VALUE;
}
// enum_literal (AstGen.zig:993).
case AST_NODE_ENUM_LITERAL: {
uint32_t ident_token = ag->tree->nodes.main_tokens[node];
uint32_t str_index = identAsString(ag, ident_token);
// If result type available, emit decl_literal (AstGen.zig:993-1003).
uint32_t res_ty = rlResultType(gz, rl, node);
if (res_ty != 0) {
uint32_t res = addPlNodeBin(
gz, ZIR_INST_DECL_LITERAL, node, res_ty, str_index);
// decl_literal does the coercion for us (AstGen.zig:1001).
// Only need rvalue for ptr/inferred_ptr/ref_coerced_ty.
if (rl.tag == RL_TY || rl.tag == RL_COERCED_TY)
return res;
return rvalue(gz, rl, res, node);
}
return rvalue(gz, rl,
addStrTok(gz, ZIR_INST_ENUM_LITERAL, str_index, ident_token),
node);
}
// multiline_string_literal (AstGen.zig:8645).
case AST_NODE_MULTILINE_STRING_LITERAL:
return rvalue(gz, rl, multilineStringLiteral(gz, scope, node), node);
// return (AstGen.zig:856).
case AST_NODE_RETURN:
return retExpr(gz, scope, node);
// call (AstGen.zig:783-790).
case AST_NODE_CALL_ONE:
case AST_NODE_CALL_ONE_COMMA:
case AST_NODE_CALL:
case AST_NODE_CALL_COMMA:
return rvalue(gz, rl, callExpr(gz, scope, rl, node), node);
// struct_init (AstGen.zig:836-839).
case AST_NODE_STRUCT_INIT_DOT_TWO:
case AST_NODE_STRUCT_INIT_DOT_TWO_COMMA:
case AST_NODE_STRUCT_INIT_DOT:
case AST_NODE_STRUCT_INIT_DOT_COMMA:
case AST_NODE_STRUCT_INIT_ONE:
case AST_NODE_STRUCT_INIT_ONE_COMMA:
case AST_NODE_STRUCT_INIT:
case AST_NODE_STRUCT_INIT_COMMA:
return structInitExpr(gz, scope, rl, node);
// container_decl (AstGen.zig:1083-1098).
case AST_NODE_CONTAINER_DECL:
case AST_NODE_CONTAINER_DECL_TRAILING:
case AST_NODE_CONTAINER_DECL_TWO:
case AST_NODE_CONTAINER_DECL_TWO_TRAILING:
case AST_NODE_CONTAINER_DECL_ARG:
case AST_NODE_CONTAINER_DECL_ARG_TRAILING:
case AST_NODE_TAGGED_UNION:
case AST_NODE_TAGGED_UNION_TRAILING:
case AST_NODE_TAGGED_UNION_TWO:
case AST_NODE_TAGGED_UNION_TWO_TRAILING:
case AST_NODE_TAGGED_UNION_ENUM_TAG:
case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING:
return rvalue(gz, rl, containerDecl(gz, scope, node), node);
// try (AstGen.zig:831).
case AST_NODE_TRY:
return rvalue(gz, rl, tryExpr(gz, scope, node), node);
// Comparison operators (AstGen.zig:714-726).
case AST_NODE_EQUAL_EQUAL:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_EQ), node);
case AST_NODE_BANG_EQUAL:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_NEQ), node);
case AST_NODE_LESS_THAN:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_LT), node);
case AST_NODE_GREATER_THAN:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_GT), node);
case AST_NODE_LESS_OR_EQUAL:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_LTE), node);
case AST_NODE_GREATER_OR_EQUAL:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_GTE), node);
// Arithmetic (AstGen.zig:656-698).
case AST_NODE_ADD:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_ADD), node);
case AST_NODE_SUB:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_SUB), node);
case AST_NODE_MUL:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_MUL), node);
case AST_NODE_DIV:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_DIV), node);
case AST_NODE_MOD:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_MOD), node);
// Bitwise (AstGen.zig:700-712).
case AST_NODE_BIT_AND:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_BIT_AND), node);
case AST_NODE_BIT_OR:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_BIT_OR), node);
case AST_NODE_BIT_XOR:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_XOR), node);
case AST_NODE_SHL:
return rvalue(gz, rl, shiftOp(gz, scope, node, ZIR_INST_SHL), node);
case AST_NODE_SHR:
return rvalue(gz, rl, shiftOp(gz, scope, node, ZIR_INST_SHR), node);
// Boolean operators (AstGen.zig:728-731) — special: boolBinOp.
case AST_NODE_BOOL_AND:
return rvalue(
gz, rl, boolBinOp(gz, scope, node, ZIR_INST_BOOL_BR_AND), node);
case AST_NODE_BOOL_OR:
return rvalue(
gz, rl, boolBinOp(gz, scope, node, ZIR_INST_BOOL_BR_OR), node);
// Unary operators (AstGen.zig:919-938).
case AST_NODE_BOOL_NOT:
return rvalue(gz, rl,
addUnNode(gz, ZIR_INST_BOOL_NOT, expr(gz, scope, nd.lhs), node),
node);
case AST_NODE_BIT_NOT:
return rvalue(gz, rl,
addUnNode(gz, ZIR_INST_BIT_NOT, expr(gz, scope, nd.lhs), node),
node);
case AST_NODE_NEGATION:
return rvalue(gz, rl,
addUnNode(gz, ZIR_INST_NEGATE, expr(gz, scope, nd.lhs), node),
node);
case AST_NODE_NEGATION_WRAP:
return rvalue(gz, rl,
addUnNode(gz, ZIR_INST_NEGATE_WRAP, expr(gz, scope, nd.lhs), node),
node);
// deref (AstGen.zig:942-951).
case AST_NODE_DEREF: {
uint32_t lhs = expr(gz, scope, nd.lhs);
addUnNode(gz, ZIR_INST_VALIDATE_DEREF, lhs, node);
if (RL_IS_REF(rl))
return lhs;
return rvalue(gz, rl, addUnNode(gz, ZIR_INST_LOAD, lhs, node), node);
}
// optional_type (AstGen.zig:961-964).
case AST_NODE_OPTIONAL_TYPE:
return rvalue(gz, rl,
addUnNode(
gz, ZIR_INST_OPTIONAL_TYPE, typeExpr(gz, scope, nd.lhs), node),
node);
// unwrap_optional (AstGen.zig:966-985).
case AST_NODE_UNWRAP_OPTIONAL: {
uint32_t lhs = expr(gz, scope, nd.lhs);
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
emitDbgStmt(gz, saved_line, saved_col);
return rvalue(gz, rl,
addUnNode(gz, ZIR_INST_OPTIONAL_PAYLOAD_SAFE, lhs, node), node);
}
// error_union type (AstGen.zig:788-797).
case AST_NODE_ERROR_UNION: {
uint32_t lhs = typeExpr(gz, scope, nd.lhs);
uint32_t rhs = typeExpr(gz, scope, nd.rhs);
return rvalue(gz, rl,
addPlNodeBin(gz, ZIR_INST_ERROR_UNION_TYPE, node, lhs, rhs), node);
}
// char_literal (AstGen.zig:8662-8675).
case AST_NODE_CHAR_LITERAL: {
uint32_t main_tok = ag->tree->nodes.main_tokens[node];
uint32_t tok_start = ag->tree->tokens.starts[main_tok];
const char* src = ag->tree->source;
uint32_t ci = tok_start + 1; // skip opening quote
uint64_t char_val;
if (src[ci] == '\\') {
// Escape sequence (AstGen.zig:8668-8675).
ci++;
switch (src[ci]) {
case 'n':
char_val = '\n';
break;
case 'r':
char_val = '\r';
break;
case 't':
char_val = '\t';
break;
case '\\':
char_val = '\\';
break;
case '\'':
char_val = '\'';
break;
case '"':
char_val = '"';
break;
case 'x': {
// \xNN hex escape.
uint8_t val = 0;
for (int k = 0; k < 2; k++) {
ci++;
char c = src[ci];
if (c >= '0' && c <= '9')
val = (uint8_t)(val * 16 + (uint8_t)(c - '0'));
else if (c >= 'a' && c <= 'f')
val = (uint8_t)(val * 16 + 10 + (uint8_t)(c - 'a'));
else if (c >= 'A' && c <= 'F')
val = (uint8_t)(val * 16 + 10 + (uint8_t)(c - 'A'));
}
char_val = val;
break;
}
case 'u': {
// \u{NNNNNN} unicode escape (string_literal.zig:194-231).
// Skip past '{'.
ci++;
uint32_t codepoint = 0;
while (true) {
ci++;
char c = src[ci];
if (c >= '0' && c <= '9')
codepoint = codepoint * 16 + (uint32_t)(c - '0');
else if (c >= 'a' && c <= 'f')
codepoint = codepoint * 16 + 10 + (uint32_t)(c - 'a');
else if (c >= 'A' && c <= 'F')
codepoint = codepoint * 16 + 10 + (uint32_t)(c - 'A');
else
break; // Must be '}'.
}
char_val = codepoint;
break;
}
default:
char_val = (uint8_t)src[ci];
break;
}
} else {
char_val = (uint64_t)(uint8_t)src[ci];
}
return rvalue(gz, rl, addInt(gz, char_val), node);
}
// arrayAccess (AstGen.zig:6192-6221).
case AST_NODE_ARRAY_ACCESS: {
if (RL_IS_REF(rl)) {
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs);
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t rhs = expr(gz, scope, nd.rhs);
emitDbgStmt(gz, saved_line, saved_col);
return addPlNodeBin(gz, ZIR_INST_ELEM_PTR_NODE, node, lhs, rhs);
}
uint32_t lhs = expr(gz, scope, nd.lhs);
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t rhs = expr(gz, scope, nd.rhs);
emitDbgStmt(gz, saved_line, saved_col);
return rvalue(gz, rl,
addPlNodeBin(gz, ZIR_INST_ELEM_VAL_NODE, node, lhs, rhs), node);
}
// slice (AstGen.zig:882-939).
case AST_NODE_SLICE_OPEN: {
// (AstGen.zig:908-937).
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs);
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
ResultLoc usize_rl = { .tag = RL_COERCED_TY,
.data = ZIR_REF_USIZE_TYPE,
.src_node = 0,
.ctx = RI_CTX_NONE };
uint32_t start = exprRl(gz, scope, usize_rl, nd.rhs);
emitDbgStmt(gz, saved_line, saved_col);
return rvalue(gz, rl,
addPlNodeBin(gz, ZIR_INST_SLICE_START, node, lhs, start), node);
}
case AST_NODE_SLICE: {
// Slice[rhs]: { start, end } (AstGen.zig:908-937).
const Ast* stree = ag->tree;
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs);
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t start_node = stree->extra_data.arr[nd.rhs];
uint32_t end_node = stree->extra_data.arr[nd.rhs + 1];
ResultLoc usize_rl = { .tag = RL_COERCED_TY,
.data = ZIR_REF_USIZE_TYPE,
.src_node = 0,
.ctx = RI_CTX_NONE };
uint32_t start_ref = exprRl(gz, scope, usize_rl, start_node);
uint32_t end_ref = exprRl(gz, scope, usize_rl, end_node);
emitDbgStmt(gz, saved_line, saved_col);
ensureExtraCapacity(ag, 3);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = lhs;
ag->extra[ag->extra_len++] = start_ref;
ag->extra[ag->extra_len++] = end_ref;
ZirInstData data;
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
data.pl_node.payload_index = payload_index;
return rvalue(
gz, rl, addInstruction(gz, ZIR_INST_SLICE_END, data), node);
}
case AST_NODE_SLICE_SENTINEL: {
// SliceSentinel[rhs]: { start, end, sentinel }
// (AstGen.zig:908-925).
const Ast* stree = ag->tree;
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs);
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t start_node = stree->extra_data.arr[nd.rhs];
uint32_t end_node = stree->extra_data.arr[nd.rhs + 1];
uint32_t sentinel_node = stree->extra_data.arr[nd.rhs + 2];
// start/end coerced to usize (AstGen.zig:911-912).
ResultLoc usize_rl = { .tag = RL_COERCED_TY,
.data = ZIR_REF_USIZE_TYPE,
.src_node = 0,
.ctx = RI_CTX_NONE };
uint32_t start_ref = exprRl(gz, scope, usize_rl, start_node);
uint32_t end_ref = (end_node != 0)
? exprRl(gz, scope, usize_rl, end_node)
: ZIR_REF_NONE;
// sentinel: create slice_sentinel_ty and coerce (AstGen.zig:913-916).
uint32_t sentinel_ty
= addUnNode(gz, ZIR_INST_SLICE_SENTINEL_TY, lhs, node);
ResultLoc sent_rl = { .tag = RL_COERCED_TY,
.data = sentinel_ty,
.src_node = 0,
.ctx = RI_CTX_NONE };
uint32_t sentinel_ref = exprRl(gz, scope, sent_rl, sentinel_node);
emitDbgStmt(gz, saved_line, saved_col);
ensureExtraCapacity(ag, 4);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = lhs;
ag->extra[ag->extra_len++] = start_ref;
ag->extra[ag->extra_len++] = end_ref;
ag->extra[ag->extra_len++] = sentinel_ref;
ZirInstData data;
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
data.pl_node.payload_index = payload_index;
return rvalue(
gz, rl, addInstruction(gz, ZIR_INST_SLICE_SENTINEL, data), node);
}
// orelse (AstGen.zig:6031-6142).
case AST_NODE_ORELSE:
return orelseCatchExpr(gz, scope, rl, node, false);
// catch (AstGen.zig:6031-6142).
case AST_NODE_CATCH:
return orelseCatchExpr(gz, scope, rl, node, true);
// Block expressions (AstGen.zig:984-992).
case AST_NODE_BLOCK_TWO:
case AST_NODE_BLOCK_TWO_SEMICOLON:
case AST_NODE_BLOCK:
case AST_NODE_BLOCK_SEMICOLON:
return rvalue(gz, rl, blockExprExpr(gz, scope, rl, node), node);
// Anonymous array init (AstGen.zig:1119-1127).
case AST_NODE_ARRAY_INIT_DOT_TWO:
case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA:
case AST_NODE_ARRAY_INIT_DOT:
case AST_NODE_ARRAY_INIT_DOT_COMMA:
return arrayInitDotExpr(gz, scope, rl, node);
// if (AstGen.zig:1013-1024).
case AST_NODE_IF_SIMPLE:
case AST_NODE_IF:
return ifExpr(gz, scope, rlBr(rl), node);
// for (AstGen.zig:1043-1060).
case AST_NODE_FOR_SIMPLE:
case AST_NODE_FOR:
return rvalue(gz, rl, forExpr(gz, scope, node, false), node);
// Merge error sets (AstGen.zig:788-797).
case AST_NODE_MERGE_ERROR_SETS: {
uint32_t lhs = typeExpr(gz, scope, nd.lhs);
uint32_t rhs = typeExpr(gz, scope, nd.rhs);
return rvalue(gz, rl,
addPlNodeBin(gz, ZIR_INST_MERGE_ERROR_SETS, node, lhs, rhs), node);
}
// Wrapping arithmetic.
case AST_NODE_ADD_WRAP:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_ADDWRAP), node);
case AST_NODE_SUB_WRAP:
return rvalue(
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_SUBWRAP), node);
// break (AstGen.zig:2150-2237).
case AST_NODE_BREAK: {
uint32_t opt_break_label = nd.lhs; // UINT32_MAX = none
uint32_t opt_rhs = nd.rhs; // 0 = none
// Walk scope chain to find target block (AstGen.zig:2157-2187).
for (Scope* s = scope; s != NULL;) {
if (s->tag == SCOPE_GEN_ZIR) {
GenZir* block_gz = (GenZir*)s;
uint32_t block_inst = UINT32_MAX;
if (opt_break_label != UINT32_MAX) {
// Labeled break: check label on GenZir.
// Use direct source text comparison, not identAsString,
// to avoid adding label names to string_bytes
// (AstGen.zig:2176 uses tokenIdentEql).
if (block_gz->label_token != UINT32_MAX
&& tokenIdentEql(ag->tree, opt_break_label,
block_gz->label_token)) {
block_inst = block_gz->label_block_inst;
}
} else {
// Unlabeled break: check break_block.
if (block_gz->break_block != UINT32_MAX)
block_inst = block_gz->break_block;
}
if (block_inst != UINT32_MAX) {
// Found target (AstGen.zig:2188-2228).
ZirInstTag break_tag = block_gz->is_inline
? ZIR_INST_BREAK_INLINE
: ZIR_INST_BREAK;
if (opt_rhs == 0) {
// Void break (AstGen.zig:2195-2206).
rvalue(gz, block_gz->break_result_info,
ZIR_REF_VOID_VALUE, node);
genDefers(gz, s, scope, DEFER_NORMAL_ONLY);
if (!block_gz->is_comptime) {
ZirInstData rdata;
rdata.un_node.operand
= block_inst + ZIR_REF_START_INDEX;
rdata.un_node.src_node
= (int32_t)node - (int32_t)gz->decl_node_index;
addInstruction(gz,
ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL,
rdata);
}
addBreak(gz, break_tag, block_inst, ZIR_REF_VOID_VALUE,
AST_NODE_OFFSET_NONE);
} else {
// Value break (AstGen.zig:2208-2228).
uint32_t operand = exprRl(
gz, scope, block_gz->break_result_info, opt_rhs);
genDefers(gz, s, scope, DEFER_NORMAL_ONLY);
if (!block_gz->is_comptime)
restoreErrRetIndex(gz, block_inst,
block_gz->break_result_info, opt_rhs, operand);
switch (block_gz->break_result_info.tag) {
case RL_PTR:
case RL_DISCARD:
addBreak(gz, break_tag, block_inst,
ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
break;
default:
addBreak(gz, break_tag, block_inst, operand,
(int32_t)opt_rhs
- (int32_t)gz->decl_node_index);
break;
}
}
return ZIR_REF_UNREACHABLE_VALUE;
}
s = block_gz->parent;
} else if (s->tag == SCOPE_LOCAL_VAL) {
s = ((ScopeLocalVal*)s)->parent;
} else if (s->tag == SCOPE_LOCAL_PTR) {
s = ((ScopeLocalPtr*)s)->parent;
} else if (s->tag == SCOPE_DEFER_NORMAL
|| s->tag == SCOPE_DEFER_ERROR) {
s = ((ScopeDefer*)s)->parent;
} else if (s->tag == SCOPE_LABEL) {
s = ((ScopeLabel*)s)->parent;
} else {
break;
}
}
SET_ERROR(ag);
return ZIR_REF_UNREACHABLE_VALUE;
}
// continue (AstGen.zig:2246-2340).
case AST_NODE_CONTINUE: {
// Walk scope chain to find GenZir with continue_block.
for (Scope* s = scope; s != NULL;) {
if (s->tag == SCOPE_GEN_ZIR) {
GenZir* gz2 = (GenZir*)s;
if (gz2->continue_block != UINT32_MAX) {
genDefers(gz, s, scope, DEFER_NORMAL_ONLY);
ZirInstTag break_tag = gz2->is_inline
? ZIR_INST_BREAK_INLINE
: ZIR_INST_BREAK;
if (break_tag == ZIR_INST_BREAK_INLINE) {
// AstGen.zig:2328-2330.
addUnNode(gz, ZIR_INST_CHECK_COMPTIME_CONTROL_FLOW,
gz2->continue_block + ZIR_REF_START_INDEX, node);
}
// Restore error return index (AstGen.zig:2333-2334).
if (!gz2->is_comptime) {
ZirInstData rdata;
rdata.un_node.operand
= gz2->continue_block + ZIR_REF_START_INDEX;
rdata.un_node.src_node
= (int32_t)node - (int32_t)gz->decl_node_index;
addInstruction(gz,
ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL,
rdata);
}
addBreak(gz, break_tag, gz2->continue_block,
ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
return ZIR_REF_UNREACHABLE_VALUE;
}
s = gz2->parent;
} else if (s->tag == SCOPE_LOCAL_VAL) {
s = ((ScopeLocalVal*)s)->parent;
} else if (s->tag == SCOPE_LOCAL_PTR) {
s = ((ScopeLocalPtr*)s)->parent;
} else if (s->tag == SCOPE_DEFER_NORMAL
|| s->tag == SCOPE_DEFER_ERROR) {
s = ((ScopeDefer*)s)->parent;
} else if (s->tag == SCOPE_LABEL) {
s = ((ScopeLabel*)s)->parent;
} else {
break;
}
}
SET_ERROR(ag);
return ZIR_REF_UNREACHABLE_VALUE;
}
// comptime (AstGen.zig:1104-1105).
case AST_NODE_COMPTIME: {
// comptimeExprAst / comptimeExpr2 (AstGen.zig:2104, 1982).
uint32_t body_node = nd.lhs;
// If already comptime, just pass through (AstGen.zig:1990-1992).
if (gz->is_comptime)
return exprRl(gz, scope, rl, body_node);
// Create comptime block (AstGen.zig:2078-2098).
uint32_t block_inst
= makeBlockInst(ag, ZIR_INST_BLOCK_COMPTIME, gz, node);
GenZir block_scope = makeSubBlock(gz, scope);
block_scope.is_comptime = true;
// Transform RL to type-only (AstGen.zig:2084-2090).
// Runtime-to-comptime boundary: can't pass runtime pointers.
ResultLoc ty_only_rl;
uint32_t res_ty = rlResultType(gz, rl, node);
if (res_ty != 0)
ty_only_rl = (ResultLoc) { .tag = RL_COERCED_TY,
.data = res_ty,
.src_node = 0,
.ctx = rl.ctx };
else
ty_only_rl = (ResultLoc) {
.tag = RL_NONE, .data = 0, .src_node = 0, .ctx = rl.ctx
};
uint32_t result = exprRl(&block_scope, scope, ty_only_rl, body_node);
addBreak(&block_scope, ZIR_INST_BREAK_INLINE, block_inst, result,
AST_NODE_OFFSET_NONE);
setBlockComptimeBody(
ag, &block_scope, block_inst, COMPTIME_REASON_COMPTIME_KEYWORD);
gzAppendInstruction(gz, block_inst);
// Apply rvalue to handle RL_PTR etc (AstGen.zig:2098).
return rvalue(gz, rl, block_inst + ZIR_REF_START_INDEX, node);
}
// switch (AstGen.zig:1072-1078).
case AST_NODE_SWITCH:
case AST_NODE_SWITCH_COMMA:
return switchExpr(gz, scope, rlBr(rl), node);
// while (AstGen.zig:1037-1042).
case AST_NODE_WHILE_SIMPLE:
case AST_NODE_WHILE_CONT:
case AST_NODE_WHILE:
return rvalue(gz, rl, whileExpr(gz, scope, node, false), node);
// error_value (AstGen.zig:1005-1010).
case AST_NODE_ERROR_VALUE: {
uint32_t error_token = nd.rhs;
uint32_t str = identAsString(ag, error_token);
return rvalue(gz, rl,
addStrTok(gz, ZIR_INST_ERROR_VALUE, str, error_token), node);
}
// error_set_decl (AstGen.zig:5905-5955).
case AST_NODE_ERROR_SET_DECL: {
AstData esd = ag->tree->nodes.datas[node];
uint32_t lbrace = esd.lhs;
uint32_t rbrace = esd.rhs;
// Reserve 1 extra word for ErrorSetDecl.fields_len.
ensureExtraCapacity(ag, 1 + (rbrace - lbrace));
uint32_t payload_index = ag->extra_len;
ag->extra_len++; // placeholder for fields_len
uint32_t fields_len = 0;
for (uint32_t tok = lbrace + 1; tok < rbrace; tok++) {
TokenizerTag ttag = ag->tree->tokens.tags[tok];
if (ttag == TOKEN_DOC_COMMENT || ttag == TOKEN_COMMA)
continue;
if (ttag == TOKEN_IDENTIFIER) {
uint32_t str_index = identAsString(ag, tok);
ensureExtraCapacity(ag, 1);
ag->extra[ag->extra_len++] = str_index;
fields_len++;
}
}
ag->extra[payload_index] = fields_len;
return rvalue(gz, rl,
addPlNodePayloadIndex(
gz, ZIR_INST_ERROR_SET_DECL, node, payload_index),
node);
}
// assign in expr context (AstGen.zig:1011-1014).
case AST_NODE_ASSIGN:
assignStmt(gz, scope, node);
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
// Compound assignment operators (AstGen.zig:685-744).
case AST_NODE_ASSIGN_ADD:
assignOp(gz, scope, node, ZIR_INST_ADD);
return ZIR_REF_VOID_VALUE;
case AST_NODE_ASSIGN_SUB:
assignOp(gz, scope, node, ZIR_INST_SUB);
return ZIR_REF_VOID_VALUE;
case AST_NODE_ASSIGN_MUL:
assignOp(gz, scope, node, ZIR_INST_MUL);
return ZIR_REF_VOID_VALUE;
case AST_NODE_ASSIGN_DIV:
assignOp(gz, scope, node, ZIR_INST_DIV);
return ZIR_REF_VOID_VALUE;
case AST_NODE_ASSIGN_MOD:
assignOp(gz, scope, node, ZIR_INST_MOD_REM);
return ZIR_REF_VOID_VALUE;
case AST_NODE_ASSIGN_BIT_AND:
assignOp(gz, scope, node, ZIR_INST_BIT_AND);
return ZIR_REF_VOID_VALUE;
case AST_NODE_ASSIGN_BIT_OR:
assignOp(gz, scope, node, ZIR_INST_BIT_OR);
return ZIR_REF_VOID_VALUE;
case AST_NODE_ASSIGN_BIT_XOR:
assignOp(gz, scope, node, ZIR_INST_XOR);
return ZIR_REF_VOID_VALUE;
case AST_NODE_ASSIGN_ADD_WRAP:
assignOp(gz, scope, node, ZIR_INST_ADDWRAP);
return ZIR_REF_VOID_VALUE;
case AST_NODE_ASSIGN_SUB_WRAP:
assignOp(gz, scope, node, ZIR_INST_SUBWRAP);
return ZIR_REF_VOID_VALUE;
case AST_NODE_ASSIGN_MUL_WRAP:
assignOp(gz, scope, node, ZIR_INST_MULWRAP);
return ZIR_REF_VOID_VALUE;
case AST_NODE_ASSIGN_ADD_SAT:
assignOp(gz, scope, node, ZIR_INST_ADD_SAT);
return ZIR_REF_VOID_VALUE;
case AST_NODE_ASSIGN_SUB_SAT:
assignOp(gz, scope, node, ZIR_INST_SUB_SAT);
return ZIR_REF_VOID_VALUE;
case AST_NODE_ASSIGN_MUL_SAT:
assignOp(gz, scope, node, ZIR_INST_MUL_SAT);
return ZIR_REF_VOID_VALUE;
default:
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
}
static uint32_t expr(GenZir* gz, Scope* scope, uint32_t node) {
return exprRl(gz, scope, RL_NONE_VAL, node);
}
// --- blockExprExpr (AstGen.zig:2388-2536) ---
// Handles block expressions (labeled and unlabeled).
// Unlabeled blocks just execute statements and return void.
// Labeled blocks (blk: { ... break :blk val; }) need a block instruction.
static uint32_t blockExprExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
(void)rl;
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstNodeTag tag = tree->nodes.tags[node];
AstData nd = tree->nodes.datas[node];
// Extract statements.
uint32_t stmt_buf[2];
const uint32_t* statements = NULL;
uint32_t stmt_count = 0;
switch (tag) {
case AST_NODE_BLOCK_TWO:
case AST_NODE_BLOCK_TWO_SEMICOLON: {
uint32_t idx = 0;
if (nd.lhs != 0)
stmt_buf[idx++] = nd.lhs;
if (nd.rhs != 0)
stmt_buf[idx++] = nd.rhs;
statements = stmt_buf;
stmt_count = idx;
break;
}
case AST_NODE_BLOCK:
case AST_NODE_BLOCK_SEMICOLON: {
uint32_t start = nd.lhs;
uint32_t end = nd.rhs;
statements = tree->extra_data.arr + start;
stmt_count = end - start;
break;
}
default:
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
// Check if labeled (AstGen.zig:2397-2402).
// A labeled block has: identifier colon before the lbrace.
uint32_t lbrace = tree->nodes.main_tokens[node];
bool is_labeled
= (lbrace >= 2 && tree->tokens.tags[lbrace - 1] == TOKEN_COLON
&& tree->tokens.tags[lbrace - 2] == TOKEN_IDENTIFIER);
if (!is_labeled) {
if (!gz->is_comptime) {
// Non-comptime unlabeled block (AstGen.zig:2404-2425).
// Create block_inst FIRST, add to gz, then process body.
uint32_t block_inst = makeBlockInst(ag, ZIR_INST_BLOCK, gz, node);
gzAppendInstruction(gz, block_inst);
GenZir block_scope = makeSubBlock(gz, scope);
blockExprStmts(
&block_scope, &block_scope.base, statements, stmt_count);
if (!endsWithNoReturn(&block_scope)) {
// restore_err_ret_index on gz (AstGen.zig:2420).
ZirInstData rdata;
rdata.un_node.operand = block_inst + ZIR_REF_START_INDEX;
rdata.un_node.src_node
= (int32_t)node - (int32_t)gz->decl_node_index;
addInstruction(
gz, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata);
// break on block_scope (AstGen.zig:2422).
addBreak(&block_scope, ZIR_INST_BREAK, block_inst,
ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
}
setBlockBody(ag, &block_scope, block_inst);
} else {
// Comptime unlabeled block: inline statements
// (AstGen.zig:2426-2429).
GenZir sub_gz = makeSubBlock(gz, scope);
blockExprStmts(&sub_gz, &sub_gz.base, statements, stmt_count);
}
return ZIR_REF_VOID_VALUE;
}
// Labeled block (AstGen.zig:2466-2536).
// Note: upstream blockExpr always passes force_comptime=false.
uint32_t label_token = lbrace - 2;
// Compute break result info (AstGen.zig:2484-2492).
bool need_rl = nodesNeedRlContains(ag, node);
ResultLoc break_ri = breakResultInfo(gz, rl, node, need_rl);
bool need_result_rvalue = (break_ri.tag != rl.tag);
// Reserve the block instruction (AstGen.zig:2500-2501).
uint32_t block_inst = makeBlockInst(ag, ZIR_INST_BLOCK, gz, node);
gzAppendInstruction(gz, block_inst);
GenZir block_scope = makeSubBlock(gz, scope);
// Set label on block_scope (AstGen.zig:2504-2508).
block_scope.label_token = label_token;
block_scope.label_block_inst = block_inst;
block_scope.break_result_info = break_ri;
// Process statements (AstGen.zig:2512).
blockExprStmts(&block_scope, &block_scope.base, statements, stmt_count);
if (!endsWithNoReturn(&block_scope)) {
// Emit restore_err_ret_index (AstGen.zig:2515).
ZirInstData rdata;
rdata.un_node.operand = block_inst + ZIR_REF_START_INDEX;
rdata.un_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
addInstruction(
gz, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata);
// rvalue + break (AstGen.zig:2516-2518).
uint32_t result = rvalue(
gz, block_scope.break_result_info, ZIR_REF_VOID_VALUE, node);
addBreak(&block_scope, ZIR_INST_BREAK, block_inst, result,
AST_NODE_OFFSET_NONE);
}
setBlockBody(ag, &block_scope, block_inst);
// AstGen.zig:2531-2534.
if (need_result_rvalue)
return rvalue(gz, rl, block_inst + ZIR_REF_START_INDEX, node);
return block_inst + ZIR_REF_START_INDEX;
}
// --- arrayInitDotExpr (AstGen.zig:1576-1595) ---
// Handles anonymous array init: `.{a, b, c}`.
// Emits array_init_anon instruction with MultiOp payload.
static uint32_t arrayInitDotExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstNodeTag tag = tree->nodes.tags[node];
AstData nd = tree->nodes.datas[node];
// Extract elements.
uint32_t elem_buf[2];
const uint32_t* elements = NULL;
uint32_t elem_count = 0;
switch (tag) {
case AST_NODE_ARRAY_INIT_DOT_TWO:
case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA: {
uint32_t idx = 0;
if (nd.lhs != 0)
elem_buf[idx++] = nd.lhs;
if (nd.rhs != 0)
elem_buf[idx++] = nd.rhs;
elements = elem_buf;
elem_count = idx;
break;
}
case AST_NODE_ARRAY_INIT_DOT:
case AST_NODE_ARRAY_INIT_DOT_COMMA: {
uint32_t start = nd.lhs;
uint32_t end = nd.rhs;
elements = tree->extra_data.arr + start;
elem_count = end - start;
break;
}
default:
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
// Dispatch based on RL (AstGen.zig:1515-1572).
switch (rl.tag) {
case RL_NONE: {
// arrayInitExprAnon (AstGen.zig:1576-1595).
ensureExtraCapacity(ag, 1 + elem_count);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = elem_count;
uint32_t extra_start = ag->extra_len;
ag->extra_len += elem_count;
for (uint32_t i = 0; i < elem_count; i++) {
uint32_t elem_ref = expr(gz, scope, elements[i]);
ag->extra[extra_start + i] = elem_ref;
}
return addPlNodePayloadIndex(
gz, ZIR_INST_ARRAY_INIT_ANON, node, payload_index);
}
case RL_TY:
case RL_COERCED_TY: {
// validate_array_init_result_ty + arrayInitExprTyped
// (AstGen.zig:1534-1539).
uint32_t result_ty = rl.data;
// Emit ArrayInit { ty, init_count } payload for
// validate_array_init_result_ty.
ensureExtraCapacity(ag, 2);
uint32_t val_payload = ag->extra_len;
ag->extra[ag->extra_len++] = result_ty;
ag->extra[ag->extra_len++] = elem_count;
addPlNodePayloadIndex(
gz, ZIR_INST_VALIDATE_ARRAY_INIT_RESULT_TY, node, val_payload);
// arrayInitExprTyped (AstGen.zig:1598-1642) with elem_ty=none.
uint32_t operands_len = elem_count + 1; // +1 for type
ensureExtraCapacity(ag, 1 + operands_len);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = operands_len;
ag->extra[ag->extra_len++] = result_ty;
uint32_t extra_start = ag->extra_len;
ag->extra_len += elem_count;
for (uint32_t i = 0; i < elem_count; i++) {
// array_init_elem_type uses bin data (AstGen.zig:1626-1632).
uint32_t elem_ty
= addBin(gz, ZIR_INST_ARRAY_INIT_ELEM_TYPE, result_ty, i);
ResultLoc elem_rl
= { .tag = RL_COERCED_TY, .data = elem_ty, .src_node = 0 };
uint32_t elem_ref = exprRl(gz, scope, elem_rl, elements[i]);
ag->extra[extra_start + i] = elem_ref;
}
return addPlNodePayloadIndex(
gz, ZIR_INST_ARRAY_INIT, node, payload_index);
}
case RL_INFERRED_PTR: {
// arrayInitExprAnon + rvalue (AstGen.zig:1545-1551).
ensureExtraCapacity(ag, 1 + elem_count);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = elem_count;
uint32_t extra_start = ag->extra_len;
ag->extra_len += elem_count;
for (uint32_t i = 0; i < elem_count; i++) {
uint32_t elem_ref = expr(gz, scope, elements[i]);
ag->extra[extra_start + i] = elem_ref;
}
uint32_t result = addPlNodePayloadIndex(
gz, ZIR_INST_ARRAY_INIT_ANON, node, payload_index);
return rvalue(gz, rl, result, node);
}
case RL_DISCARD: {
// Evaluate and discard each element (AstGen.zig:1517-1522).
for (uint32_t i = 0; i < elem_count; i++) {
exprRl(gz, scope, RL_DISCARD_VAL, elements[i]);
}
return ZIR_REF_VOID_VALUE;
}
case RL_REF: {
// arrayInitExprAnon + ref (AstGen.zig:1523-1526).
ensureExtraCapacity(ag, 1 + elem_count);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = elem_count;
uint32_t extra_start = ag->extra_len;
ag->extra_len += elem_count;
for (uint32_t i = 0; i < elem_count; i++) {
uint32_t elem_ref = expr(gz, scope, elements[i]);
ag->extra[extra_start + i] = elem_ref;
}
uint32_t result = addPlNodePayloadIndex(
gz, ZIR_INST_ARRAY_INIT_ANON, node, payload_index);
return rvalue(gz, rl, result, node);
}
case RL_REF_COERCED_TY: {
// validate_array_init_ref_ty + arrayInitExprTyped
// (AstGen.zig:1527-1532).
uint32_t ptr_ty_inst = rl.data;
ensureExtraCapacity(ag, 2);
uint32_t val_payload = ag->extra_len;
ag->extra[ag->extra_len++] = ptr_ty_inst;
ag->extra[ag->extra_len++] = elem_count;
uint32_t dest_arr_ty_inst = addPlNodePayloadIndex(
gz, ZIR_INST_VALIDATE_ARRAY_INIT_REF_TY, node, val_payload);
// arrayInitExprTyped with elem_ty=none, is_ref=true.
uint32_t operands_len = elem_count + 1;
ensureExtraCapacity(ag, 1 + operands_len);
uint32_t ai_payload = ag->extra_len;
ag->extra[ag->extra_len++] = operands_len;
ag->extra[ag->extra_len++] = dest_arr_ty_inst;
uint32_t extra_start2 = ag->extra_len;
ag->extra_len += elem_count;
for (uint32_t i = 0; i < elem_count; i++) {
// array_init_elem_type uses bin data (AstGen.zig:1626-1632).
uint32_t elem_ty = addBin(
gz, ZIR_INST_ARRAY_INIT_ELEM_TYPE, dest_arr_ty_inst, i);
ResultLoc elem_rl
= { .tag = RL_COERCED_TY, .data = elem_ty, .src_node = 0 };
uint32_t elem_ref = exprRl(gz, scope, elem_rl, elements[i]);
ag->extra[extra_start2 + i] = elem_ref;
}
return addPlNodePayloadIndex(
gz, ZIR_INST_ARRAY_INIT_REF, node, ai_payload);
}
case RL_PTR: {
// arrayInitExprPtr (AstGen.zig:1541-1543, 1645-1672).
uint32_t array_ptr_inst
= addUnNode(gz, ZIR_INST_OPT_EU_BASE_PTR_INIT, rl.data, node);
// Block payload: body_len = elem_count.
ensureExtraCapacity(ag, 1 + elem_count);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = elem_count;
uint32_t items_start = ag->extra_len;
ag->extra_len += elem_count;
for (uint32_t i = 0; i < elem_count; i++) {
// array_init_elem_ptr: ElemPtrImm{ptr, index}.
uint32_t elem_ptr_inst = addPlNodeBin(gz,
ZIR_INST_ARRAY_INIT_ELEM_PTR, elements[i], array_ptr_inst, i);
ag->extra[items_start + i]
= elem_ptr_inst - ZIR_REF_START_INDEX; // .toIndex()
// Evaluate element with ptr RL (AstGen.zig:1668).
ResultLoc ptr_rl = { .tag = RL_PTR,
.data = elem_ptr_inst,
.src_node = 0,
.ctx = rl.ctx };
exprRl(gz, scope, ptr_rl, elements[i]);
}
addPlNodePayloadIndex(
gz, ZIR_INST_VALIDATE_PTR_ARRAY_INIT, node, payload_index);
return ZIR_REF_VOID_VALUE;
}
}
// Fallback: anon init + rvalue.
ensureExtraCapacity(ag, 1 + elem_count);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = elem_count;
uint32_t extra_start = ag->extra_len;
ag->extra_len += elem_count;
for (uint32_t i = 0; i < elem_count; i++) {
uint32_t elem_ref = expr(gz, scope, elements[i]);
ag->extra[extra_start + i] = elem_ref;
}
uint32_t result = addPlNodePayloadIndex(
gz, ZIR_INST_ARRAY_INIT_ANON, node, payload_index);
return rvalue(gz, rl, result, node);
}
// --- ifExpr (AstGen.zig:6300-6528) ---
// Handles if and if_simple expressions.
// Pattern: block_scope with condbr → then/else branches → setCondBrPayload.
static uint32_t ifExpr(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
bool need_rl = nodesNeedRlContains(ag, node);
ResultLoc break_rl = breakResultInfo(gz, rl, node, need_rl);
AstNodeTag tag = tree->nodes.tags[node];
AstData nd = tree->nodes.datas[node];
uint32_t cond_node = nd.lhs;
uint32_t then_node, else_node;
if (tag == AST_NODE_IF_SIMPLE) {
then_node = nd.rhs;
else_node = 0;
} else {
// AST_NODE_IF: rhs is index into extra → If{then_expr, else_expr}
then_node = tree->extra_data.arr[nd.rhs];
else_node = tree->extra_data.arr[nd.rhs + 1];
}
// Detect payload capture: if (cond) |x| (AstGen.zig Ast.fullIf).
// payload_pipe = lastToken(cond_expr) + 2; if pipe → payload_token + 1.
uint32_t payload_token = 0; // 0 = no payload
uint32_t last_cond_tok = lastToken(tree, cond_node);
uint32_t pipe_tok = last_cond_tok + 2;
if (pipe_tok < tree->tokens.len
&& tree->tokens.tags[pipe_tok] == TOKEN_PIPE) {
payload_token = pipe_tok + 1; // identifier token
}
// Detect error token: then_expr else |e| (AstGen.zig Ast.fullIf).
uint32_t error_token = 0;
if (else_node != 0) {
uint32_t else_tok = lastToken(tree, then_node) + 1; // "else" keyword
if (else_tok + 1 < tree->tokens.len
&& tree->tokens.tags[else_tok + 1] == TOKEN_PIPE) {
error_token = else_tok + 2;
}
}
// Create block_scope (AstGen.zig:6326-6328).
GenZir block_scope = makeSubBlock(gz, scope);
// Emit DBG_STMT for condition (AstGen.zig:6335).
// NOTE: upstream emits into parent_gz AFTER block_scope is created,
// so the dbg_stmt ends up in block_scope's range (shared array).
emitDbgNode(gz, cond_node);
// Evaluate condition (AstGen.zig:6335-6363).
uint32_t cond_inst; // the value (optional/err-union/bool)
uint32_t bool_bit; // the boolean for condbr
if (error_token != 0) {
// Error union condition: if (err_union) |val| else |err|.
// (AstGen.zig:6341).
ResultLoc cond_rl = RL_NONE_VAL;
cond_rl.ctx = RI_CTX_ERROR_HANDLING_EXPR;
cond_inst
= exprRl(&block_scope, &block_scope.base, cond_rl, cond_node);
bool_bit = addUnNode(
&block_scope, ZIR_INST_IS_NON_ERR, cond_inst, cond_node);
} else if (payload_token != 0) {
// Optional condition: if (optional) |val|.
cond_inst = expr(&block_scope, &block_scope.base, cond_node);
bool_bit = addUnNode(
&block_scope, ZIR_INST_IS_NON_NULL, cond_inst, cond_node);
} else {
// Bool condition (AstGen.zig:6356-6362).
cond_inst = expr(&block_scope, &block_scope.base, cond_node);
bool_bit = cond_inst;
}
uint32_t condbr = addCondBr(&block_scope, ZIR_INST_CONDBR, node);
uint32_t block_inst = makeBlockInst(ag, ZIR_INST_BLOCK, gz, node);
setBlockBody(ag, &block_scope, block_inst);
gzAppendInstruction(gz, block_inst);
// Then branch (AstGen.zig:6372-6441).
GenZir then_scope = makeSubBlock(gz, scope);
Scope* then_sub_scope = &then_scope.base;
ScopeLocalVal payload_val_scope;
memset(&payload_val_scope, 0, sizeof(payload_val_scope));
if (error_token != 0 && payload_token != 0) {
// Error union with payload: unwrap payload (AstGen.zig:6379-6407).
uint32_t payload_inst = addUnNode(&then_scope,
ZIR_INST_ERR_UNION_PAYLOAD_UNSAFE, cond_inst, then_node);
uint32_t ident_name = identAsString(ag, payload_token);
payload_val_scope = (ScopeLocalVal) {
.base = { .tag = SCOPE_LOCAL_VAL },
.parent = &then_scope.base,
.gen_zir = &then_scope,
.inst = payload_inst,
.token_src = payload_token,
.name = ident_name,
};
addDbgVar(&then_scope, ZIR_INST_DBG_VAR_VAL, ident_name, payload_inst);
then_sub_scope = &payload_val_scope.base;
} else if (payload_token != 0) {
// Optional with payload: unwrap optional (AstGen.zig:6408-6431).
uint32_t payload_inst = addUnNode(&then_scope,
ZIR_INST_OPTIONAL_PAYLOAD_UNSAFE, cond_inst, then_node);
uint32_t ident_name = identAsString(ag, payload_token);
payload_val_scope = (ScopeLocalVal) {
.base = { .tag = SCOPE_LOCAL_VAL },
.parent = &then_scope.base,
.gen_zir = &then_scope,
.inst = payload_inst,
.token_src = payload_token,
.name = ident_name,
};
addDbgVar(&then_scope, ZIR_INST_DBG_VAR_VAL, ident_name, payload_inst);
then_sub_scope = &payload_val_scope.base;
}
// Use fullBodyExpr for then body (AstGen.zig:6437).
uint32_t then_result
= fullBodyExpr(&then_scope, then_sub_scope, break_rl, then_node);
if (!endsWithNoReturn(&then_scope)) {
addBreak(&then_scope, ZIR_INST_BREAK, block_inst, then_result,
(int32_t)then_node - (int32_t)gz->decl_node_index);
}
// Else branch (AstGen.zig:6443-6489).
GenZir else_scope = makeSubBlock(gz, scope);
// save_err_ret_index (AstGen.zig:6448-6449).
bool do_err_trace = ag->fn_ret_ty != 0 && error_token != 0;
if (do_err_trace && nodeMayAppendToErrorTrace(tree, cond_node))
addSaveErrRetIndex(&else_scope, ZIR_REF_NONE);
if (else_node != 0) {
Scope* else_sub_scope = &else_scope.base;
ScopeLocalVal error_val_scope;
memset(&error_val_scope, 0, sizeof(error_val_scope));
if (error_token != 0) {
// Error capture: else |err| (AstGen.zig:6452-6475).
uint32_t err_inst = addUnNode(
&else_scope, ZIR_INST_ERR_UNION_CODE, cond_inst, cond_node);
uint32_t err_name = identAsString(ag, error_token);
error_val_scope = (ScopeLocalVal) {
.base = { .tag = SCOPE_LOCAL_VAL },
.parent = &else_scope.base,
.gen_zir = &else_scope,
.inst = err_inst,
.token_src = error_token,
.name = err_name,
};
addDbgVar(&else_scope, ZIR_INST_DBG_VAR_VAL, err_name, err_inst);
else_sub_scope = &error_val_scope.base;
}
// Use fullBodyExpr for else body (AstGen.zig:6478).
uint32_t else_result
= fullBodyExpr(&else_scope, else_sub_scope, break_rl, else_node);
if (!endsWithNoReturn(&else_scope)) {
// Restore error return index (AstGen.zig:6480-6482).
if (do_err_trace)
restoreErrRetIndex(
&else_scope, block_inst, break_rl, else_node, else_result);
addBreak(&else_scope, ZIR_INST_BREAK, block_inst, else_result,
(int32_t)else_node - (int32_t)gz->decl_node_index);
}
} else {
addBreak(&else_scope, ZIR_INST_BREAK, block_inst, ZIR_REF_VOID_VALUE,
AST_NODE_OFFSET_NONE);
}
// Wire up condbr (AstGen.zig:6491).
setCondBrPayload(ag, condbr, bool_bit, &then_scope, &else_scope);
// AstGen.zig:6493-6497.
bool need_result_rvalue = (break_rl.tag != rl.tag);
if (need_result_rvalue)
return rvalue(gz, rl, block_inst + ZIR_REF_START_INDEX, node);
return block_inst + ZIR_REF_START_INDEX;
}
// --- forExpr (AstGen.zig:6819-7125) ---
// Handles for_simple and for (multi-input).
// Supports both indexable and for_range inputs.
#define FOR_MAX_INPUTS 16
static uint32_t forExpr(
GenZir* gz, Scope* scope, uint32_t node, bool is_statement) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstData nd = tree->nodes.datas[node];
AstNodeTag node_tag = tree->nodes.tags[node];
// Detect inline keyword (AstGen.zig:6847).
uint32_t main_token = tree->nodes.main_tokens[node];
bool is_inline = (main_token > 0
&& tree->tokens.tags[main_token - 1] == TOKEN_KEYWORD_INLINE);
// Extract input nodes and body/else nodes.
// FOR_SIMPLE: lhs = input node, rhs = body (Ast.zig:1960-1968).
// FOR: lhs = extra_data index, rhs = packed AstFor (Ast.zig:1970-1981).
uint32_t input_nodes[FOR_MAX_INPUTS];
uint32_t num_inputs;
uint32_t body_node;
if (node_tag == AST_NODE_FOR_SIMPLE) {
input_nodes[0] = nd.lhs;
num_inputs = 1;
body_node = nd.rhs;
} else {
uint32_t extra_idx = nd.lhs;
AstFor for_data;
memcpy(&for_data, &nd.rhs, sizeof(AstFor));
num_inputs = for_data.inputs;
if (num_inputs == 0 || num_inputs > FOR_MAX_INPUTS) {
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
for (uint32_t i = 0; i < num_inputs; i++)
input_nodes[i] = tree->extra_data.arr[extra_idx + i];
body_node = tree->extra_data.arr[extra_idx + num_inputs];
}
// Per-input arrays (AstGen.zig:6858-6862).
uint32_t indexables[FOR_MAX_INPUTS];
uint32_t lens[FOR_MAX_INPUTS][2]; // [ref0, ref1] per input
// Allocate index counter (AstGen.zig:6865-6874).
ZirInstTag alloc_tag
= is_inline ? ZIR_INST_ALLOC_COMPTIME_MUT : ZIR_INST_ALLOC;
uint32_t index_ptr = addUnNode(gz, alloc_tag, ZIR_REF_USIZE_TYPE, node);
addPlNodeBin(gz, ZIR_INST_STORE_NODE, node, index_ptr, ZIR_REF_ZERO_USIZE);
// Compute payload_token (AstGen.zig fullForComponents:2349-2350).
// payload_token = lastToken(inputs[last]) + 3 + has_comma
uint32_t last_cond_tok = lastToken(tree, input_nodes[num_inputs - 1]);
bool has_comma = (last_cond_tok + 1 < tree->tokens.len
&& tree->tokens.tags[last_cond_tok + 1] == TOKEN_COMMA);
uint32_t payload_token = last_cond_tok + 3 + (has_comma ? 1 : 0);
// Process each input (AstGen.zig:6878-6925).
uint32_t capture_token = payload_token;
for (uint32_t i = 0; i < num_inputs; i++) {
uint32_t input = input_nodes[i];
// Advance capture_token past this capture's ident (+comma).
bool capture_is_ref
= (tree->tokens.tags[capture_token] == TOKEN_ASTERISK);
uint32_t ident_tok = capture_token + (capture_is_ref ? 1u : 0u);
capture_token = ident_tok + 2; // skip ident + comma/pipe
emitDbgNode(gz, input);
if (tree->nodes.tags[input] == AST_NODE_FOR_RANGE) {
// Range input (AstGen.zig:6892-6916).
AstData range_nd = tree->nodes.datas[input];
uint32_t start_node = range_nd.lhs;
uint32_t end_node = range_nd.rhs;
// AstGen.zig:6897-6902: expr with .rl = .{ .ty = .usize_type }
ResultLoc usize_rl
= { .tag = RL_TY, .data = ZIR_REF_USIZE_TYPE, .src_node = 0 };
uint32_t start_val = exprRl(gz, scope, usize_rl, start_node);
uint32_t end_val = ZIR_REF_NONE;
if (end_node != 0) {
end_val = exprRl(gz, scope, usize_rl, end_node);
}
if (end_val == ZIR_REF_NONE) {
lens[i][0] = ZIR_REF_NONE;
lens[i][1] = ZIR_REF_NONE;
} else {
lens[i][0] = start_val;
lens[i][1] = end_val;
}
// Check if start is trivially zero.
bool start_is_zero = false;
if (tree->nodes.tags[start_node] == AST_NODE_NUMBER_LITERAL) {
uint32_t tok = tree->nodes.main_tokens[start_node];
uint32_t ts = tree->tokens.starts[tok];
if (tree->source[ts] == '0'
&& (ts + 1 >= tree->source_len
|| tree->source[ts + 1] < '0'
|| tree->source[ts + 1] > '9'))
start_is_zero = true;
}
indexables[i] = start_is_zero ? ZIR_REF_NONE : start_val;
} else {
// Regular indexable (AstGen.zig:6918-6923).
uint32_t indexable = expr(gz, scope, input);
indexables[i] = indexable;
lens[i][0] = indexable;
lens[i][1] = ZIR_REF_NONE;
}
}
// Emit for_len as MultiOp (AstGen.zig:6933-6942).
uint32_t len;
{
uint32_t operands_len = num_inputs * 2;
ensureExtraCapacity(ag, 1 + operands_len);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = operands_len;
for (uint32_t i = 0; i < num_inputs; i++) {
ag->extra[ag->extra_len++] = lens[i][0];
ag->extra[ag->extra_len++] = lens[i][1];
}
ZirInstData data;
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
data.pl_node.payload_index = payload_index;
len = addInstruction(gz, ZIR_INST_FOR_LEN, data);
}
// Create loop (AstGen.zig:6944-6956).
ZirInstTag loop_tag = is_inline ? ZIR_INST_BLOCK_INLINE : ZIR_INST_LOOP;
uint32_t loop_inst = makeBlockInst(ag, loop_tag, gz, node);
GenZir loop_scope = makeSubBlock(gz, scope);
loop_scope.is_inline = is_inline;
// Load index (AstGen.zig:6955-6956).
// We need to finish loop_scope later once we have the deferred refs from
// then_scope. However, the load must be removed from instructions in the
// meantime or it appears to be part of parent_gz.
uint32_t index = addUnNode(&loop_scope, ZIR_INST_LOAD, index_ptr, node);
ag->scratch_inst_len--; // pop from loop_scope (AstGen.zig:6956)
// Condition: added to cond_scope (AstGen.zig:6958-6962).
GenZir cond_scope = makeSubBlock(gz, &loop_scope.base);
uint32_t cond
= addPlNodeBin(&cond_scope, ZIR_INST_CMP_LT, node, index, len);
// Create condbr + block (AstGen.zig:6967-6974).
ZirInstTag condbr_tag
= is_inline ? ZIR_INST_CONDBR_INLINE : ZIR_INST_CONDBR;
uint32_t condbr = addCondBr(&cond_scope, condbr_tag, node);
ZirInstTag block_tag = is_inline ? ZIR_INST_BLOCK_INLINE : ZIR_INST_BLOCK;
uint32_t cond_block = makeBlockInst(ag, block_tag, &loop_scope, node);
setBlockBody(ag, &cond_scope, cond_block);
loop_scope.break_block = loop_inst;
loop_scope.continue_block = cond_block; // AstGen.zig:6974
// Then branch: loop body (AstGen.zig:6982-7065).
GenZir then_scope = makeSubBlock(gz, &cond_scope.base);
// Set up capture scopes for all inputs (AstGen.zig:6986-7045).
ScopeLocalVal capture_scopes[FOR_MAX_INPUTS];
Scope* body_scope_parent = &then_scope.base;
{
capture_token = payload_token;
for (uint32_t i = 0; i < num_inputs; i++) {
uint32_t input = input_nodes[i];
bool capture_is_ref
= (tree->tokens.tags[capture_token] == TOKEN_ASTERISK);
uint32_t ident_tok = capture_token + (capture_is_ref ? 1u : 0u);
capture_token = ident_tok + 2;
// Check if discard (AstGen.zig:6999).
uint32_t ts = tree->tokens.starts[ident_tok];
bool is_discard = (tree->source[ts] == '_'
&& (ts + 1 >= tree->source_len
|| !((tree->source[ts + 1] >= 'a'
&& tree->source[ts + 1] <= 'z')
|| (tree->source[ts + 1] >= 'A'
&& tree->source[ts + 1] <= 'Z')
|| tree->source[ts + 1] == '_'
|| (tree->source[ts + 1] >= '0'
&& tree->source[ts + 1] <= '9'))));
if (is_discard)
continue;
// Compute capture inst (AstGen.zig:7004-7028).
uint32_t capture_inst;
bool is_counter = (tree->nodes.tags[input] == AST_NODE_FOR_RANGE);
if (indexables[i] == ZIR_REF_NONE) {
// Start=0 counter: use index directly.
capture_inst = index;
} else if (is_counter) {
// Counter with nonzero start: add.
capture_inst = addPlNodeBin(
&then_scope, ZIR_INST_ADD, input, indexables[i], index);
} else if (capture_is_ref) {
// Indexable by ref: elem_ptr.
capture_inst = addPlNodeBin(&then_scope, ZIR_INST_ELEM_PTR,
input, indexables[i], index);
} else {
// Indexable by val: elem_val.
capture_inst = addPlNodeBin(&then_scope, ZIR_INST_ELEM_VAL,
input, indexables[i], index);
}
uint32_t name_str = identAsString(ag, ident_tok);
capture_scopes[i] = (ScopeLocalVal) {
.base = { .tag = SCOPE_LOCAL_VAL },
.parent = body_scope_parent,
.gen_zir = &then_scope,
.inst = capture_inst,
.token_src = ident_tok,
.name = name_str,
};
// AstGen.zig:7040.
addDbgVar(
&then_scope, ZIR_INST_DBG_VAR_VAL, name_str, capture_inst);
body_scope_parent = &capture_scopes[i].base;
}
}
// Execute body (AstGen.zig:7047-7048).
uint32_t then_result
= fullBodyExpr(&then_scope, body_scope_parent, RL_NONE_VAL, body_node);
addEnsureResult(&then_scope, then_result, body_node);
// dbg_stmt + dbg_empty_stmt (AstGen.zig:7052-7061).
advanceSourceCursor(ag, tree->tokens.starts[lastToken(tree, body_node)]);
emitDbgStmt(gz, ag->source_line - gz->decl_line, ag->source_column);
{
ZirInstData ext_data;
ext_data.extended.opcode = (uint16_t)ZIR_EXT_DBG_EMPTY_STMT;
ext_data.extended.small = 0;
ext_data.extended.operand = 0;
addInstruction(gz, ZIR_INST_EXTENDED, ext_data);
}
ZirInstTag break_tag = is_inline ? ZIR_INST_BREAK_INLINE : ZIR_INST_BREAK;
addBreak(&then_scope, break_tag, cond_block, ZIR_REF_VOID_VALUE,
AST_NODE_OFFSET_NONE);
// Else branch: break out of loop (AstGen.zig:7066-7091).
GenZir else_scope = makeSubBlock(gz, &cond_scope.base);
addBreak(&else_scope, break_tag, loop_inst, ZIR_REF_VOID_VALUE,
AST_NODE_OFFSET_NONE);
setCondBrPayload(ag, condbr, cond, &then_scope, &else_scope);
// then_scope and else_scope unstacked now. Resurrect loop_scope to
// finally finish it (AstGen.zig:7095-7113).
{
// Reset loop_scope instructions and re-add index + cond_block.
loop_scope.instructions_top = ag->scratch_inst_len;
gzAppendInstruction(&loop_scope, index - ZIR_REF_START_INDEX);
gzAppendInstruction(&loop_scope, cond_block);
// Increment the index variable (AstGen.zig:7100-7108).
uint32_t index_plus_one = addPlNodeBin(
&loop_scope, ZIR_INST_ADD_UNSAFE, node, index, ZIR_REF_ONE_USIZE);
addPlNodeBin(
&loop_scope, ZIR_INST_STORE_NODE, node, index_ptr, index_plus_one);
// Repeat (AstGen.zig:7110-7111).
ZirInstTag repeat_tag
= is_inline ? ZIR_INST_REPEAT_INLINE : ZIR_INST_REPEAT;
ZirInstData repeat_data;
memset(&repeat_data, 0, sizeof(repeat_data));
repeat_data.node = (int32_t)node - (int32_t)loop_scope.decl_node_index;
addInstruction(&loop_scope, repeat_tag, repeat_data);
setBlockBody(ag, &loop_scope, loop_inst);
}
gzAppendInstruction(gz, loop_inst);
uint32_t result = loop_inst + ZIR_REF_START_INDEX;
// Emit ensure_result_used when used as statement (AstGen.zig:7121-7123).
if (is_statement) {
addUnNode(gz, ZIR_INST_ENSURE_RESULT_USED, result, node);
}
return result;
}
// --- orelseCatchExpr (AstGen.zig:6031-6142) ---
// Handles `lhs orelse rhs` and `lhs catch rhs`.
static uint32_t orelseCatchExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node, bool is_catch) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstData nd = tree->nodes.datas[node];
bool do_err_trace = is_catch && ag->fn_ret_ty != 0;
// breakResultInfo (AstGen.zig:6046-6058).
bool need_rl = nodesNeedRlContains(ag, node);
ResultLoc break_rl = breakResultInfo(gz, rl, node, need_rl);
bool need_result_rvalue = (break_rl.tag != rl.tag);
// Create block_scope (AstGen.zig:6062-6063).
GenZir block_scope = makeSubBlock(gz, scope);
// Evaluate operand in block_scope (AstGen.zig:6066-6074).
ResultLoc operand_rl = RL_NONE_VAL;
if (do_err_trace) {
operand_rl.ctx = RI_CTX_ERROR_HANDLING_EXPR;
}
uint32_t operand
= exprRl(&block_scope, &block_scope.base, operand_rl, nd.lhs);
// Check condition in block_scope (AstGen.zig:6075).
ZirInstTag test_tag
= is_catch ? ZIR_INST_IS_NON_ERR : ZIR_INST_IS_NON_NULL;
uint32_t condition = addUnNode(&block_scope, test_tag, operand, node);
// condbr in block_scope (AstGen.zig:6076).
uint32_t condbr = addCondBr(&block_scope, ZIR_INST_CONDBR, node);
// Create block in parent gz (AstGen.zig:6078-6081).
uint32_t block_inst = makeBlockInst(ag, ZIR_INST_BLOCK, gz, node);
setBlockBody(ag, &block_scope, block_inst);
// block_scope unstacked now.
gzAppendInstruction(gz, block_inst);
// Then branch: unwrap payload (AstGen.zig:6083-6092).
GenZir then_scope = makeSubBlock(&block_scope, scope);
ZirInstTag unwrap_tag = is_catch ? ZIR_INST_ERR_UNION_PAYLOAD_UNSAFE
: ZIR_INST_OPTIONAL_PAYLOAD_UNSAFE;
uint32_t unwrapped = addUnNode(&then_scope, unwrap_tag, operand, node);
// Apply rvalue coercion unless rl is ref/ref_coerced_ty
// (AstGen.zig:6088-6091).
uint32_t then_result = (rl.tag == RL_REF || rl.tag == RL_REF_COERCED_TY)
? unwrapped
: rvalue(&then_scope, break_rl, unwrapped, node);
addBreak(&then_scope, ZIR_INST_BREAK, block_inst, then_result,
(int32_t)node - (int32_t)gz->decl_node_index);
// Else branch: evaluate RHS (AstGen.zig:6094-6131).
GenZir else_scope = makeSubBlock(&block_scope, scope);
// save_err_ret_index (AstGen.zig:6099-6100).
if (do_err_trace && nodeMayAppendToErrorTrace(tree, nd.lhs))
addSaveErrRetIndex(&else_scope, ZIR_REF_NONE);
// Use fullBodyExpr (not expr) to inline unlabeled blocks
// (AstGen.zig:6125).
uint32_t else_result
= fullBodyExpr(&else_scope, &else_scope.base, break_rl, nd.rhs);
if (!endsWithNoReturn(&else_scope)) {
// restoreErrRetIndex (AstGen.zig:6128-6129).
if (do_err_trace)
restoreErrRetIndex(
&else_scope, block_inst, break_rl, nd.rhs, else_result);
addBreak(&else_scope, ZIR_INST_BREAK, block_inst, else_result,
(int32_t)nd.rhs - (int32_t)gz->decl_node_index);
}
setCondBrPayload(ag, condbr, condition, &then_scope, &else_scope);
// AstGen.zig:6137-6141.
if (need_result_rvalue)
return rvalue(gz, rl, block_inst + ZIR_REF_START_INDEX, node);
return block_inst + ZIR_REF_START_INDEX;
}
// --- whileExpr (AstGen.zig:6529-6805) ---
// Handles while_simple.
// Structure: loop { cond_block { cond, condbr }, repeat }
// condbr → then { continue_block { body, break continue }, break cond }
// → else { break loop }
static uint32_t whileExpr(
GenZir* gz, Scope* scope, uint32_t node, bool is_statement) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstData nd = tree->nodes.datas[node];
// Detect inline keyword (AstGen.zig:6558).
uint32_t main_token = tree->nodes.main_tokens[node];
bool is_inline = (main_token > 0
&& tree->tokens.tags[main_token - 1] == TOKEN_KEYWORD_INLINE);
// WHILE_SIMPLE: lhs = cond_expr, rhs = body.
uint32_t cond_node = nd.lhs;
uint32_t body_node = nd.rhs;
// Create loop instruction (AstGen.zig:6562-6564).
ZirInstTag loop_tag = is_inline ? ZIR_INST_BLOCK_INLINE : ZIR_INST_LOOP;
uint32_t loop_inst = makeBlockInst(ag, loop_tag, gz, node);
gzAppendInstruction(gz, loop_inst);
GenZir loop_scope = makeSubBlock(gz, scope);
loop_scope.is_inline = is_inline;
// Evaluate condition in cond_scope (AstGen.zig:6571-6607).
GenZir cond_scope = makeSubBlock(&loop_scope, &loop_scope.base);
// Emit debug node for the condition expression (AstGen.zig:6579).
emitDbgNode(&cond_scope, cond_node);
uint32_t cond = expr(&cond_scope, &cond_scope.base, cond_node);
// Create condbr + cond_block (AstGen.zig:6609-6615).
ZirInstTag condbr_tag
= is_inline ? ZIR_INST_CONDBR_INLINE : ZIR_INST_CONDBR;
uint32_t condbr = addCondBr(&cond_scope, condbr_tag, node);
ZirInstTag block_tag = is_inline ? ZIR_INST_BLOCK_INLINE : ZIR_INST_BLOCK;
uint32_t cond_block = makeBlockInst(ag, block_tag, &loop_scope, node);
setBlockBody(ag, &cond_scope, cond_block); // unstacks cond_scope
gzAppendInstruction(&loop_scope, cond_block);
// Create continue_block (AstGen.zig:6694).
uint32_t continue_block = makeBlockInst(ag, block_tag, &loop_scope, node);
// Add repeat to loop_scope (AstGen.zig:6696-6697).
{
ZirInstTag repeat_tag
= is_inline ? ZIR_INST_REPEAT_INLINE : ZIR_INST_REPEAT;
ZirInstData repeat_data;
memset(&repeat_data, 0, sizeof(repeat_data));
repeat_data.node = (int32_t)node - (int32_t)loop_scope.decl_node_index;
addInstruction(&loop_scope, repeat_tag, repeat_data);
}
// Set loop body and configure break/continue (AstGen.zig:6699-6701).
setBlockBody(ag, &loop_scope, loop_inst); // unstacks loop_scope
loop_scope.break_block = loop_inst;
loop_scope.continue_block = continue_block;
// Stack then_scope (AstGen.zig:6708-6709).
GenZir then_scope = makeSubBlock(gz, &cond_scope.base);
// Add continue_block to then_scope (AstGen.zig:6716).
gzAppendInstruction(&then_scope, continue_block);
// Create continue_scope inside then_scope (AstGen.zig:6725).
GenZir continue_scope = makeSubBlock(&then_scope, &then_scope.base);
// Execute body (AstGen.zig:6727-6730).
emitDbgNode(&continue_scope, body_node);
fullBodyExpr(
&continue_scope, &continue_scope.base, RL_NONE_VAL, body_node);
// Break continue_block if not noreturn (AstGen.zig:6735-6747).
if (!endsWithNoReturn(&continue_scope)) {
// dbg_stmt + dbg_empty_stmt (AstGen.zig:6737-6745).
advanceSourceCursor(
ag, tree->tokens.starts[lastToken(tree, body_node)]);
emitDbgStmt(gz, ag->source_line - gz->decl_line, ag->source_column);
{
ZirInstData ext_data;
ext_data.extended.opcode = (uint16_t)ZIR_EXT_DBG_EMPTY_STMT;
ext_data.extended.small = 0;
ext_data.extended.operand = 0;
addInstruction(gz, ZIR_INST_EXTENDED, ext_data);
}
ZirInstTag break_tag
= is_inline ? ZIR_INST_BREAK_INLINE : ZIR_INST_BREAK;
addBreak(&continue_scope, break_tag, continue_block,
ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
}
setBlockBody(ag, &continue_scope, continue_block);
// Break cond_block from then_scope (AstGen.zig:7064).
{
ZirInstTag break_tag
= is_inline ? ZIR_INST_BREAK_INLINE : ZIR_INST_BREAK;
addBreak(&then_scope, break_tag, cond_block, ZIR_REF_VOID_VALUE,
AST_NODE_OFFSET_NONE);
}
// Else scope: break loop with void (AstGen.zig:6785-6788).
GenZir else_scope = makeSubBlock(gz, &cond_scope.base);
{
ZirInstTag break_tag
= is_inline ? ZIR_INST_BREAK_INLINE : ZIR_INST_BREAK;
addBreak(&else_scope, break_tag, loop_inst, ZIR_REF_VOID_VALUE,
AST_NODE_OFFSET_NONE);
}
// Wire up condbr (AstGen.zig:6795).
setCondBrPayload(ag, condbr, cond, &then_scope, &else_scope);
uint32_t result = loop_inst + ZIR_REF_START_INDEX;
// Emit ensure_result_used when used as statement (AstGen.zig:6812-6813).
if (is_statement) {
addUnNode(gz, ZIR_INST_ENSURE_RESULT_USED, result, node);
}
return result;
}
// --- switchExpr (AstGen.zig:7625-8117) ---
// Handles switch and switch_comma expressions.
// Encoding: switch_block pl_node with SwitchBlock extra payload.
static uint32_t switchExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
bool need_rl = nodesNeedRlContains(ag, node);
ResultLoc break_rl = breakResultInfo(gz, rl, node, need_rl);
AstData nd = tree->nodes.datas[node];
// AST_NODE_SWITCH: lhs = condition node, rhs = extra index for SubRange.
// SubRange[rhs] = { cases_start, cases_end }.
// Case nodes are at extra_data[cases_start..cases_end].
uint32_t cond_node = nd.lhs;
uint32_t extra_idx = nd.rhs;
uint32_t cases_start = tree->extra_data.arr[extra_idx];
uint32_t cases_end = tree->extra_data.arr[extra_idx + 1];
const uint32_t* case_nodes_arr = tree->extra_data.arr + cases_start;
uint32_t case_count = cases_end - cases_start;
// Save operand source location before evaluating (AstGen.zig:7774-7775).
advanceSourceCursorToNode(ag, cond_node);
uint32_t operand_lc_line = ag->source_line - gz->decl_line;
uint32_t operand_lc_col = ag->source_column;
// Evaluate switch operand (AstGen.zig:7777).
uint32_t cond_ref = expr(gz, scope, cond_node);
// --- First pass: categorize cases (AstGen.zig:7671-7762) ---
uint32_t scalar_cases_len = 0;
uint32_t multi_cases_len = 0;
bool has_else = false;
for (uint32_t ci = 0; ci < case_count; ci++) {
uint32_t cn = case_nodes_arr[ci];
AstNodeTag ct = tree->nodes.tags[cn];
AstData cd = tree->nodes.datas[cn];
switch (ct) {
case AST_NODE_SWITCH_CASE_ONE:
case AST_NODE_SWITCH_CASE_INLINE_ONE:
if (cd.lhs == 0)
has_else = true;
else if (tree->nodes.tags[cd.lhs] == AST_NODE_SWITCH_RANGE)
multi_cases_len++;
else
scalar_cases_len++;
break;
case AST_NODE_SWITCH_CASE:
case AST_NODE_SWITCH_CASE_INLINE:
multi_cases_len++;
break;
default:
break;
}
}
// Sema expects a dbg_stmt immediately before switch_block
// (AstGen.zig:7806).
emitDbgStmtForceCurrentIndex(gz, operand_lc_line, operand_lc_col);
// --- Create switch_block instruction (AstGen.zig:7809) ---
uint32_t switch_inst = makeBlockInst(ag, ZIR_INST_SWITCH_BLOCK, gz, node);
// --- Single-pass evaluation in source order (AstGen.zig:7849-8027) ---
// Case table + payload buffer pattern (like upstream scratch).
// Table layout: [else?] [scalar_0..N] [multi_0..N]
// Each entry points to the start of that case's data in the buffer.
uint32_t table_size
= (has_else ? 1 : 0) + scalar_cases_len + multi_cases_len;
uint32_t else_tbl = 0;
uint32_t scalar_tbl = (has_else ? 1 : 0);
uint32_t multi_tbl = scalar_tbl + scalar_cases_len;
uint32_t pay_cap = table_size + case_count * 16;
uint32_t* pay = malloc(pay_cap * sizeof(uint32_t));
uint32_t pay_len = table_size;
uint32_t scalar_ci = 0;
uint32_t multi_ci = 0;
for (uint32_t ci = 0; ci < case_count; ci++) {
uint32_t cn = case_nodes_arr[ci];
AstNodeTag ct = tree->nodes.tags[cn];
AstData cd = tree->nodes.datas[cn];
uint32_t hdr = pay_len;
uint32_t prong_info_slot = 0;
// Ensure capacity for items (generous estimate).
if (pay_len + 32 > pay_cap) {
pay_cap *= 2;
uint32_t* p = realloc(pay, pay_cap * sizeof(uint32_t));
if (!p)
abort();
pay = p;
}
switch (ct) {
case AST_NODE_SWITCH_CASE_ONE:
case AST_NODE_SWITCH_CASE_INLINE_ONE:
if (cd.lhs == 0) {
// Else: [prong_info, body...]
pay[else_tbl] = hdr;
prong_info_slot = pay_len++;
} else if (tree->nodes.tags[cd.lhs] == AST_NODE_SWITCH_RANGE) {
// Single range → multi case:
// [items_len=0, ranges_len=1, prong_info, first, last]
pay[multi_tbl + multi_ci++] = hdr;
pay[pay_len++] = 0;
pay[pay_len++] = 1;
prong_info_slot = pay_len++;
AstData rng = tree->nodes.datas[cd.lhs];
pay[pay_len++] = comptimeExpr(gz, scope, RL_NONE_VAL, rng.lhs,
COMPTIME_REASON_SWITCH_ITEM);
pay[pay_len++] = comptimeExpr(gz, scope, RL_NONE_VAL, rng.rhs,
COMPTIME_REASON_SWITCH_ITEM);
} else {
// Scalar: [item_ref, prong_info, body...]
pay[scalar_tbl + scalar_ci++] = hdr;
pay[pay_len++] = comptimeExpr(gz, scope, RL_NONE_VAL, cd.lhs,
COMPTIME_REASON_SWITCH_ITEM);
prong_info_slot = pay_len++;
}
break;
case AST_NODE_SWITCH_CASE:
case AST_NODE_SWITCH_CASE_INLINE: {
// Multi-item: SubRange[lhs] of items, rhs = body.
pay[multi_tbl + multi_ci++] = hdr;
uint32_t ist = tree->extra_data.arr[cd.lhs];
uint32_t ien = tree->extra_data.arr[cd.lhs + 1];
uint32_t nitems = 0, nranges = 0;
for (uint32_t j = ist; j < ien; j++) {
if (tree->nodes.tags[tree->extra_data.arr[j]]
== AST_NODE_SWITCH_RANGE)
nranges++;
else
nitems++;
}
pay[pay_len++] = nitems;
pay[pay_len++] = nranges;
prong_info_slot = pay_len++;
// Non-range items.
for (uint32_t j = ist; j < ien; j++) {
uint32_t item = tree->extra_data.arr[j];
if (tree->nodes.tags[item] != AST_NODE_SWITCH_RANGE) {
if (pay_len + 2 > pay_cap) {
pay_cap *= 2;
uint32_t* p = realloc(pay, pay_cap * sizeof(uint32_t));
if (!p)
abort();
pay = p;
}
pay[pay_len++] = comptimeExpr(gz, scope, RL_NONE_VAL, item,
COMPTIME_REASON_SWITCH_ITEM);
}
}
// Range pairs.
for (uint32_t j = ist; j < ien; j++) {
uint32_t item = tree->extra_data.arr[j];
if (tree->nodes.tags[item] == AST_NODE_SWITCH_RANGE) {
AstData rng = tree->nodes.datas[item];
if (pay_len + 2 > pay_cap) {
pay_cap *= 2;
uint32_t* p = realloc(pay, pay_cap * sizeof(uint32_t));
if (!p)
abort();
pay = p;
}
pay[pay_len++] = comptimeExpr(gz, scope, RL_NONE_VAL,
rng.lhs, COMPTIME_REASON_SWITCH_ITEM);
pay[pay_len++] = comptimeExpr(gz, scope, RL_NONE_VAL,
rng.rhs, COMPTIME_REASON_SWITCH_ITEM);
}
}
break;
}
default:
continue;
}
// Evaluate body (AstGen.zig:7997-8026).
uint32_t body_node = cd.rhs;
GenZir case_scope = makeSubBlock(gz, scope);
// Note: upstream regular switchExpr (AstGen.zig:7625) does NOT emit
// save_err_ret_index. Only switchExprErrUnion (AstGen.zig:7524) does.
// Use fullBodyExpr to process body inline (AstGen.zig:8009).
uint32_t result
= fullBodyExpr(&case_scope, &case_scope.base, break_rl, body_node);
if (!refIsNoReturn(gz, result)) {
addBreak(&case_scope, ZIR_INST_BREAK, switch_inst, result,
(int32_t)body_node - (int32_t)gz->decl_node_index);
}
uint32_t body_len = gzInstructionsLen(&case_scope);
const uint32_t* body = gzInstructionsSlice(&case_scope);
pay[prong_info_slot] = body_len & 0x0FFFFFFFu;
if (pay_len + body_len > pay_cap) {
while (pay_len + body_len > pay_cap)
pay_cap *= 2;
uint32_t* p = realloc(pay, pay_cap * sizeof(uint32_t));
if (!p)
abort();
pay = p;
}
for (uint32_t i = 0; i < body_len; i++)
pay[pay_len++] = body[i];
gzUnstack(&case_scope);
}
// --- Serialize to extra in payload order (AstGen.zig:8036-8110) ---
ensureExtraCapacity(ag,
2 + (uint32_t)(multi_cases_len > 0 ? 1 : 0) + pay_len - table_size);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = cond_ref;
uint32_t bits = 0;
if (multi_cases_len > 0)
bits |= 1u;
if (has_else)
bits |= (1u << 1);
bits |= (scalar_cases_len & 0x1FFFFFFu) << 7;
ag->extra[ag->extra_len++] = bits;
if (multi_cases_len > 0)
ag->extra[ag->extra_len++] = multi_cases_len;
// Else prong.
if (has_else) {
uint32_t si = pay[else_tbl];
uint32_t bl = pay[si] & 0x0FFFFFFFu;
for (uint32_t i = 0; i < 1 + bl; i++)
ag->extra[ag->extra_len++] = pay[si + i];
}
// Scalar cases.
for (uint32_t i = 0; i < scalar_cases_len; i++) {
uint32_t si = pay[scalar_tbl + i];
uint32_t bl = pay[si + 1] & 0x0FFFFFFFu;
for (uint32_t j = 0; j < 2 + bl; j++)
ag->extra[ag->extra_len++] = pay[si + j];
}
// Multi cases.
for (uint32_t i = 0; i < multi_cases_len; i++) {
uint32_t si = pay[multi_tbl + i];
uint32_t ni = pay[si];
uint32_t nr = pay[si + 1];
uint32_t bl = pay[si + 2] & 0x0FFFFFFFu;
uint32_t total = 3 + ni + nr * 2 + bl;
for (uint32_t j = 0; j < total; j++)
ag->extra[ag->extra_len++] = pay[si + j];
}
free(pay);
ag->inst_datas[switch_inst].pl_node.payload_index = payload_index;
gzAppendInstruction(gz, switch_inst);
// AstGen.zig:8112-8115.
bool need_result_rvalue = (break_rl.tag != rl.tag);
if (need_result_rvalue)
return rvalue(gz, rl, switch_inst + ZIR_REF_START_INDEX, node);
return switch_inst + ZIR_REF_START_INDEX;
}
// --- rvalue (AstGen.zig:11029) ---
// Simplified: handles .none and .discard result locations.
static uint32_t rvalueDiscard(GenZir* gz, uint32_t result, uint32_t src_node) {
// .discard => emit ensure_result_non_error, return .void_value
// (AstGen.zig:11071-11074)
ZirInstData data;
data.un_node.src_node = (int32_t)src_node - (int32_t)gz->decl_node_index;
data.un_node.operand = result;
addInstruction(gz, ZIR_INST_ENSURE_RESULT_NON_ERROR, data);
return ZIR_REF_VOID_VALUE;
}
// --- emitDbgNode / emitDbgStmt (AstGen.zig:3422, 13713) ---
static void emitDbgStmt(GenZir* gz, uint32_t line, uint32_t column) {
if (gz->is_comptime)
return;
// Check if last instruction is already dbg_stmt; if so, update it.
// (AstGen.zig:13715-13724)
AstGenCtx* ag = gz->astgen;
uint32_t gz_len = gzInstructionsLen(gz);
if (gz_len > 0) {
uint32_t last = gzInstructionsSlice(gz)[gz_len - 1];
if (ag->inst_tags[last] == ZIR_INST_DBG_STMT) {
ag->inst_datas[last].dbg_stmt.line = line;
ag->inst_datas[last].dbg_stmt.column = column;
return;
}
}
ZirInstData data;
data.dbg_stmt.line = line;
data.dbg_stmt.column = column;
addInstruction(gz, ZIR_INST_DBG_STMT, data);
}
// Mirrors emitDbgStmtForceCurrentIndex (AstGen.zig:13739-13760).
static void emitDbgStmtForceCurrentIndex(
GenZir* gz, uint32_t line, uint32_t column) {
AstGenCtx* ag = gz->astgen;
uint32_t gz_len = gzInstructionsLen(gz);
if (gz_len > 0
&& gzInstructionsSlice(gz)[gz_len - 1] == ag->inst_len - 1) {
uint32_t last = ag->inst_len - 1;
if (ag->inst_tags[last] == ZIR_INST_DBG_STMT) {
ag->inst_datas[last].dbg_stmt.line = line;
ag->inst_datas[last].dbg_stmt.column = column;
return;
}
}
ZirInstData data;
data.dbg_stmt.line = line;
data.dbg_stmt.column = column;
addInstruction(gz, ZIR_INST_DBG_STMT, data);
}
static void emitDbgNode(GenZir* gz, uint32_t node) {
if (gz->is_comptime)
return;
AstGenCtx* ag = gz->astgen;
advanceSourceCursorToNode(ag, node);
uint32_t line = ag->source_line - gz->decl_line;
uint32_t column = ag->source_column;
emitDbgStmt(gz, line, column);
}
// --- assign (AstGen.zig:3434) ---
// Handles `_ = expr` discard pattern.
static void assignStmt(GenZir* gz, Scope* scope, uint32_t infix_node) {
emitDbgNode(gz, infix_node);
const AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstData nd = tree->nodes.datas[infix_node];
uint32_t lhs = nd.lhs;
uint32_t rhs = nd.rhs;
// Check if LHS is `_` identifier for discard (AstGen.zig:3440-3446).
if (tree->nodes.tags[lhs] == AST_NODE_IDENTIFIER) {
uint32_t ident_tok = tree->nodes.main_tokens[lhs];
uint32_t tok_start = tree->tokens.starts[ident_tok];
if (tree->source[tok_start] == '_'
&& (tok_start + 1 >= tree->source_len
|| !((tree->source[tok_start + 1] >= 'a'
&& tree->source[tok_start + 1] <= 'z')
|| (tree->source[tok_start + 1] >= 'A'
&& tree->source[tok_start + 1] <= 'Z')
|| tree->source[tok_start + 1] == '_'
|| (tree->source[tok_start + 1] >= '0'
&& tree->source[tok_start + 1] <= '9')))) {
// Discard: evaluate RHS with .discard result location.
uint32_t result = expr(gz, scope, rhs);
rvalueDiscard(gz, result, rhs);
return;
}
}
// Non-discard assignment: evaluate LHS as lvalue, pass ptr rl to RHS.
// (AstGen.zig:3448-3452).
{
uint32_t lhs_ptr = exprRl(gz, scope, RL_REF_VAL, lhs);
ResultLoc ptr_rl
= { .tag = RL_PTR, .data = lhs_ptr, .src_node = infix_node };
(void)exprRl(gz, scope, ptr_rl, rhs);
}
}
// --- assignOp (AstGen.zig:3731) ---
// Handles compound assignment operators (+=, -=, *=, etc.).
static void assignOp(
GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag) {
emitDbgNode(gz, infix_node);
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstData nd = tree->nodes.datas[infix_node];
uint32_t lhs_node = nd.lhs;
uint32_t rhs_node = nd.rhs;
// Evaluate LHS as lvalue pointer (AstGen.zig:3742).
uint32_t lhs_ptr = exprRl(gz, scope, RL_REF_VAL, lhs_node);
// Advance cursor for add/sub/mul/div/mod_rem (AstGen.zig:3744-3747).
uint32_t cursor_line = 0, cursor_col = 0;
bool need_dbg = false;
if (op_tag == ZIR_INST_ADD || op_tag == ZIR_INST_SUB
|| op_tag == ZIR_INST_MUL || op_tag == ZIR_INST_DIV
|| op_tag == ZIR_INST_MOD_REM) {
if (!gz->is_comptime) {
advanceSourceCursorToMainToken(ag, gz, infix_node);
}
cursor_line = ag->source_line - gz->decl_line;
cursor_col = ag->source_column;
need_dbg = true;
}
// Load current value (AstGen.zig:3748).
uint32_t lhs = addUnNode(gz, ZIR_INST_LOAD, lhs_ptr, infix_node);
// Determine RHS result type (AstGen.zig:3750-3766).
uint32_t rhs_res_ty;
if (op_tag == ZIR_INST_ADD || op_tag == ZIR_INST_SUB) {
// Emit inplace_arith_result_ty extended instruction.
uint16_t inplace_op
= (op_tag == ZIR_INST_ADD) ? 0 : 1; // add_eq=0, sub_eq=1
ZirInstData ext_data;
memset(&ext_data, 0, sizeof(ext_data));
ext_data.extended.opcode = (uint16_t)ZIR_EXT_INPLACE_ARITH_RESULT_TY;
ext_data.extended.small = inplace_op;
ext_data.extended.operand = lhs;
rhs_res_ty = addInstruction(gz, ZIR_INST_EXTENDED, ext_data);
} else {
rhs_res_ty = addUnNode(gz, ZIR_INST_TYPEOF, lhs, infix_node);
}
// Evaluate RHS with type coercion (AstGen.zig:3768).
uint32_t rhs_raw = expr(gz, scope, rhs_node);
uint32_t rhs
= addPlNodeBin(gz, ZIR_INST_AS_NODE, rhs_node, rhs_res_ty, rhs_raw);
// Emit debug statement for arithmetic ops (AstGen.zig:3770-3775).
if (need_dbg) {
emitDbgStmt(gz, cursor_line, cursor_col);
}
// Emit the operation (AstGen.zig:3776-3779).
uint32_t result = addPlNodeBin(gz, op_tag, infix_node, lhs, rhs);
// Store result back (AstGen.zig:3780-3783).
addPlNodeBin(gz, ZIR_INST_STORE_NODE, infix_node, lhs_ptr, result);
}
// --- builtinEvalToError (BuiltinFn.zig) ---
// Returns per-builtin eval_to_error. Default is .never; only a few are
// .maybe or .always. Mirrors BuiltinFn.list lookup in AstGen.zig:10539.
static int builtinEvalToError(const Ast* tree, uint32_t node) {
uint32_t main_tok = tree->nodes.main_tokens[node];
uint32_t tok_start = tree->tokens.starts[main_tok];
const char* source = tree->source;
uint32_t name_start = tok_start + 1; // skip '@'
uint32_t name_end = name_start;
while (name_end < tree->source_len
&& ((source[name_end] >= 'a' && source[name_end] <= 'z')
|| (source[name_end] >= 'A' && source[name_end] <= 'Z')
|| source[name_end] == '_')) {
name_end++;
}
uint32_t name_len = name_end - name_start;
const char* name = source + name_start;
// clang-format off
// .always:
if (name_len == 12 && memcmp(name, "errorFromInt", 12) == 0)
return 1; // EVAL_TO_ERROR_ALWAYS
// .maybe:
if (name_len == 2 && memcmp(name, "as", 2) == 0) return 2;
if (name_len == 4 && memcmp(name, "call", 4) == 0) return 2;
if (name_len == 5 && memcmp(name, "field", 5) == 0) return 2;
if (name_len == 9 && memcmp(name, "errorCast", 9) == 0) return 2;
// clang-format on
// Default: .never
return 0;
}
// --- nodeMayEvalToError (AstGen.zig:10340) ---
// Three-way result: 0=never, 1=always, 2=maybe.
#define EVAL_TO_ERROR_NEVER 0
#define EVAL_TO_ERROR_ALWAYS 1
#define EVAL_TO_ERROR_MAYBE 2
static int nodeMayEvalToError(const Ast* tree, uint32_t node) {
uint32_t n = node;
while (true) {
AstNodeTag tag = tree->nodes.tags[n];
switch (tag) {
case AST_NODE_ERROR_VALUE:
return EVAL_TO_ERROR_ALWAYS;
// These may evaluate to errors.
case AST_NODE_IDENTIFIER:
case AST_NODE_FIELD_ACCESS:
case AST_NODE_DEREF:
case AST_NODE_ARRAY_ACCESS:
case AST_NODE_WHILE_SIMPLE:
case AST_NODE_WHILE_CONT:
case AST_NODE_WHILE:
case AST_NODE_FOR_SIMPLE:
case AST_NODE_FOR:
case AST_NODE_IF_SIMPLE:
case AST_NODE_IF:
case AST_NODE_SWITCH:
case AST_NODE_SWITCH_COMMA:
case AST_NODE_CALL_ONE:
case AST_NODE_CALL_ONE_COMMA:
case AST_NODE_CALL:
case AST_NODE_CALL_COMMA:
case AST_NODE_ASM_SIMPLE:
case AST_NODE_ASM_LEGACY:
case AST_NODE_ASM:
case AST_NODE_CATCH:
case AST_NODE_ORELSE:
return EVAL_TO_ERROR_MAYBE;
// Forward to sub-expression.
case AST_NODE_TRY:
case AST_NODE_COMPTIME:
case AST_NODE_NOSUSPEND:
n = tree->nodes.datas[n].lhs;
continue;
case AST_NODE_GROUPED_EXPRESSION:
case AST_NODE_UNWRAP_OPTIONAL:
n = tree->nodes.datas[n].lhs;
continue;
// Labeled blocks may need a memory location.
case AST_NODE_BLOCK_TWO:
case AST_NODE_BLOCK_TWO_SEMICOLON:
case AST_NODE_BLOCK:
case AST_NODE_BLOCK_SEMICOLON: {
uint32_t lbrace = tree->nodes.main_tokens[n];
if (lbrace > 0 && tree->tokens.tags[lbrace - 1] == TOKEN_COLON)
return EVAL_TO_ERROR_MAYBE;
return EVAL_TO_ERROR_NEVER;
}
// Builtins: look up per-builtin eval_to_error
// (AstGen.zig:10530-10541).
case AST_NODE_BUILTIN_CALL:
case AST_NODE_BUILTIN_CALL_COMMA:
case AST_NODE_BUILTIN_CALL_TWO:
case AST_NODE_BUILTIN_CALL_TWO_COMMA:
return builtinEvalToError(tree, n);
// Everything else: .never
default:
return EVAL_TO_ERROR_NEVER;
}
}
}
// --- nodeMayAppendToErrorTrace (AstGen.zig:10315) ---
// Returns true if the expression may append to the error return trace.
static bool nodeMayAppendToErrorTrace(const Ast* tree, uint32_t node) {
uint32_t n = node;
while (true) {
AstNodeTag tag = tree->nodes.tags[n];
switch (tag) {
// These don't call runtime functions.
case AST_NODE_ERROR_VALUE:
case AST_NODE_IDENTIFIER:
case AST_NODE_COMPTIME:
return false;
// Forward to sub-expression.
case AST_NODE_TRY:
case AST_NODE_NOSUSPEND:
n = tree->nodes.datas[n].lhs;
continue;
case AST_NODE_GROUPED_EXPRESSION:
case AST_NODE_UNWRAP_OPTIONAL:
n = tree->nodes.datas[n].lhs;
continue;
// Anything else: check if it may eval to error.
default:
return nodeMayEvalToError(tree, n) != EVAL_TO_ERROR_NEVER;
}
}
}
// --- addSaveErrRetIndex (AstGen.zig:12556) ---
// Emits SAVE_ERR_RET_INDEX instruction.
// operand is the init inst ref (or ZIR_REF_NONE for .always).
static void addSaveErrRetIndex(GenZir* gz, uint32_t operand) {
ZirInstData data;
data.save_err_ret_index.operand = operand;
data.save_err_ret_index._pad = 0;
addInstruction(gz, ZIR_INST_SAVE_ERR_RET_INDEX, data);
}
// --- addRestoreErrRetIndexBlock (AstGen.zig:12607-12614) ---
// Emits extended RESTORE_ERR_RET_INDEX with block target (if_non_error
// condition). Payload: src_node, block_ref, operand.
static void addRestoreErrRetIndexBlock(
GenZir* gz, uint32_t block_inst, uint32_t operand, uint32_t node) {
AstGenCtx* ag = gz->astgen;
ensureExtraCapacity(ag, 3);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++]
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
ag->extra[ag->extra_len++] = block_inst + ZIR_REF_START_INDEX;
ag->extra[ag->extra_len++] = operand;
ZirInstData ext_data;
ext_data.extended.opcode = (uint16_t)ZIR_EXT_RESTORE_ERR_RET_INDEX;
ext_data.extended.small = 0;
ext_data.extended.operand = payload_index;
addInstruction(gz, ZIR_INST_EXTENDED, ext_data);
}
// --- restoreErrRetIndex (AstGen.zig:2121-2148) ---
// Emits restore_err_ret_index for block target based on nodeMayEvalToError.
static void restoreErrRetIndex(GenZir* gz, uint32_t block_inst, ResultLoc rl,
uint32_t node, uint32_t result) {
const Ast* tree = gz->astgen->tree;
int eval = nodeMayEvalToError(tree, node);
if (eval == EVAL_TO_ERROR_ALWAYS)
return; // never restore/pop
uint32_t op;
if (eval == EVAL_TO_ERROR_NEVER) {
op = ZIR_REF_NONE; // always restore/pop
} else {
// EVAL_TO_ERROR_MAYBE
// Simplified: without ri.ctx, treat non-ptr RL as result
// (AstGen.zig:2131-2144).
if (rl.tag == RL_PTR) {
op = addUnNode(gz, ZIR_INST_LOAD, rl.data, node);
} else if (rl.tag == RL_INFERRED_PTR) {
op = ZIR_REF_NONE;
} else {
op = result;
}
}
addRestoreErrRetIndexBlock(gz, block_inst, op, node);
}
// --- varDecl (AstGen.zig:3189) ---
// Handles local const/var declarations. Returns new scope with the variable.
// scope_out: set to new scope if variable is added; unchanged otherwise.
static void varDecl(GenZir* gz, Scope* scope, uint32_t node,
ScopeLocalVal* val_out, ScopeLocalPtr* ptr_out, Scope** scope_out) {
AstGenCtx* ag = gz->astgen;
emitDbgNode(gz, node); // AstGen.zig:3196
const Ast* tree = ag->tree;
AstData nd = tree->nodes.datas[node];
AstNodeTag tag = tree->nodes.tags[node];
uint32_t mut_token = tree->nodes.main_tokens[node];
uint32_t name_token = mut_token + 1;
bool is_const = (tree->source[tree->tokens.starts[mut_token]] == 'c');
uint32_t ident_name = identAsString(ag, name_token);
// Extract type_node and init_node based on variant.
uint32_t type_node = 0;
uint32_t init_node = 0;
if (tag == AST_NODE_SIMPLE_VAR_DECL) {
// lhs = type (optional), rhs = init (optional).
type_node = nd.lhs;
init_node = nd.rhs;
} else if (tag == AST_NODE_LOCAL_VAR_DECL) {
// lhs = extra_data index, rhs = init.
// extra: {type_node, align_node, addrspace_node, section_node}
// Simplified: just extract type_node.
uint32_t extra_idx = nd.lhs;
type_node = tree->extra_data.arr[extra_idx]; // type_node
init_node = nd.rhs;
} else if (tag == AST_NODE_ALIGNED_VAR_DECL) {
// lhs = align expr, rhs = init.
// No type node in this variant.
init_node = nd.rhs;
} else {
// global_var_decl or unknown — bail.
SET_ERROR(ag);
return;
}
if (init_node == 0) {
// Variables must be initialized (AstGen.zig:3228).
SET_ERROR(ag);
return;
}
if (is_const) {
// --- CONST path (AstGen.zig:3232-3340) ---
if (!nodesNeedRlContains(ag, node)) {
// Rvalue path (AstGen.zig:3246-3271).
// Evaluate type annotation and build result_info
// (AstGen.zig:3247-3250).
ResultLoc result_info;
if (type_node != 0) {
uint32_t type_ref = typeExpr(gz, scope, type_node);
result_info = (ResultLoc) { .tag = RL_TY,
.data = type_ref,
.src_node = 0,
.ctx = RI_CTX_CONST_INIT };
} else {
result_info = (ResultLoc) { .tag = RL_NONE,
.data = 0,
.src_node = 0,
.ctx = RI_CTX_CONST_INIT };
}
// Evaluate init expression (AstGen.zig:3251-3252).
uint32_t init_ref = exprRl(gz, scope, result_info, init_node);
if (ag->has_compile_errors)
return;
// validate_const (AstGen.zig:3266).
addUnNode(gz, ZIR_INST_VALIDATE_CONST, init_ref, init_node);
// dbg_var_val (AstGen.zig:3269).
addDbgVar(gz, ZIR_INST_DBG_VAR_VAL, ident_name, init_ref);
// save_err_ret_index (AstGen.zig:3259-3260).
if (nodeMayAppendToErrorTrace(tree, init_node))
addSaveErrRetIndex(gz, init_ref);
// Create ScopeLocalVal (AstGen.zig:3276-3284).
val_out->base.tag = SCOPE_LOCAL_VAL;
val_out->parent = *scope_out;
val_out->gen_zir = gz;
val_out->inst = init_ref;
val_out->token_src = name_token;
val_out->name = ident_name;
*scope_out = &val_out->base;
} else {
// Alloc path (AstGen.zig:3277-3340).
// The init expression needs a result pointer (nodes_need_rl).
bool is_comptime_init = gz->is_comptime
|| tree->nodes.tags[init_node] == AST_NODE_COMPTIME;
uint32_t var_ptr;
bool resolve_inferred;
if (type_node != 0) {
// Typed const: alloc (AstGen.zig:3280).
uint32_t type_ref = typeExpr(gz, scope, type_node);
var_ptr = addUnNode(gz, ZIR_INST_ALLOC, type_ref, node);
resolve_inferred = false;
} else {
// Inferred type: alloc_inferred (AstGen.zig:3291-3296).
ZirInstTag alloc_tag = is_comptime_init
? ZIR_INST_ALLOC_INFERRED_COMPTIME
: ZIR_INST_ALLOC_INFERRED;
ZirInstData adata;
adata.node = (int32_t)node - (int32_t)gz->decl_node_index;
var_ptr = addInstruction(gz, alloc_tag, adata);
resolve_inferred = true;
}
// Evaluate init with RL pointing to alloc (AstGen.zig:3313-3316).
ResultLoc init_rl;
if (type_node != 0) {
init_rl.tag = RL_PTR;
init_rl.data = var_ptr;
init_rl.src_node = 0; // upstream: .none (PtrResultLoc.src_node
// defaults to null)
} else {
init_rl.tag = RL_INFERRED_PTR;
init_rl.data = var_ptr;
init_rl.src_node = 0;
}
init_rl.ctx = RI_CTX_CONST_INIT;
uint32_t init_ref = exprRl(gz, scope, init_rl, init_node);
if (ag->has_compile_errors)
return;
// save_err_ret_index (AstGen.zig:3320-3321).
if (nodeMayAppendToErrorTrace(tree, init_node))
addSaveErrRetIndex(gz, init_ref);
// resolve_inferred_alloc or make_ptr_const (AstGen.zig:3323-3326).
uint32_t const_ptr;
if (resolve_inferred)
const_ptr = addUnNode(
gz, ZIR_INST_RESOLVE_INFERRED_ALLOC, var_ptr, node);
else
const_ptr
= addUnNode(gz, ZIR_INST_MAKE_PTR_CONST, var_ptr, node);
// dbg_var_ptr (AstGen.zig:3328).
addDbgVar(gz, ZIR_INST_DBG_VAR_PTR, ident_name, const_ptr);
// Create ScopeLocalPtr (AstGen.zig:3330-3340).
ptr_out->base.tag = SCOPE_LOCAL_PTR;
ptr_out->parent = *scope_out;
ptr_out->gen_zir = gz;
ptr_out->ptr = const_ptr;
ptr_out->token_src = name_token;
ptr_out->name = ident_name;
ptr_out->maybe_comptime = true;
*scope_out = &ptr_out->base;
}
} else {
// --- VAR path (AstGen.zig:3342-3416) ---
uint32_t alloc_ref;
bool resolve_inferred = false;
if (type_node != 0) {
// Typed var: alloc_mut (AstGen.zig:3361-3375).
uint32_t type_ref = typeExpr(gz, scope, type_node);
ZirInstTag alloc_tag = gz->is_comptime
? ZIR_INST_ALLOC_COMPTIME_MUT
: ZIR_INST_ALLOC_MUT;
alloc_ref = addUnNode(gz, alloc_tag, type_ref, node);
} else {
// Inferred type var: alloc_inferred_mut
// (AstGen.zig:3384-3392).
ZirInstTag alloc_tag = gz->is_comptime
? ZIR_INST_ALLOC_INFERRED_COMPTIME_MUT
: ZIR_INST_ALLOC_INFERRED_MUT;
ZirInstData adata;
adata.node = (int32_t)node - (int32_t)gz->decl_node_index;
alloc_ref = addInstruction(gz, alloc_tag, adata);
resolve_inferred = true;
}
// Evaluate init with RL pointing to alloc (AstGen.zig:3395-3402).
ResultLoc var_init_rl;
if (type_node != 0) {
var_init_rl.tag = RL_PTR;
var_init_rl.data = alloc_ref;
var_init_rl.src_node = 0; // upstream: .none (PtrResultLoc.src_node
// defaults to null)
} else {
var_init_rl.tag = RL_INFERRED_PTR;
var_init_rl.data = alloc_ref;
var_init_rl.src_node = 0;
}
var_init_rl.ctx = RI_CTX_NONE;
uint32_t init_ref = exprRl(gz, scope, var_init_rl, init_node);
(void)init_ref;
if (ag->has_compile_errors)
return;
// resolve_inferred_alloc if type was inferred
// (AstGen.zig:3407-3408).
uint32_t final_ptr = alloc_ref;
if (resolve_inferred)
final_ptr = addUnNode(
gz, ZIR_INST_RESOLVE_INFERRED_ALLOC, alloc_ref, node);
// dbg_var_ptr (AstGen.zig:3411).
addDbgVar(gz, ZIR_INST_DBG_VAR_PTR, ident_name, final_ptr);
// Create ScopeLocalPtr (AstGen.zig:3413-3422).
ptr_out->base.tag = SCOPE_LOCAL_PTR;
ptr_out->parent = *scope_out;
ptr_out->gen_zir = gz;
ptr_out->ptr = final_ptr;
ptr_out->token_src = name_token;
ptr_out->name = ident_name;
ptr_out->maybe_comptime = gz->is_comptime;
*scope_out = &ptr_out->base;
}
}
// --- addEnsureResult (AstGen.zig:2649) ---
// After evaluating an expression as a statement, optionally emits
// ensure_result_used. For call/field_call, sets flag in extra data instead.
// Returns true if the result is noreturn (AstGen.zig:2909).
static bool addEnsureResult(
GenZir* gz, uint32_t maybe_unused_result, uint32_t statement) {
AstGenCtx* ag = gz->astgen;
bool elide_check;
bool is_noreturn = false;
if (maybe_unused_result >= ZIR_REF_START_INDEX) {
uint32_t inst = maybe_unused_result - ZIR_REF_START_INDEX;
ZirInstTag tag = ag->inst_tags[inst];
switch (tag) {
// For call/field_call: set ensure_result_used flag
// (bit 3 of flags at offset 0). Flags *must* be at offset 0
// (AstGen.zig:2658-2665, Zir.zig:3022).
case ZIR_INST_CALL:
case ZIR_INST_FIELD_CALL: {
uint32_t pi = ag->inst_datas[inst].pl_node.payload_index;
ag->extra[pi] |= (1u << 3); // ensure_result_used
elide_check = true;
break;
}
// For builtin_call: ensure_result_used is at bit 1, not bit 3.
case ZIR_INST_BUILTIN_CALL: {
uint32_t pi = ag->inst_datas[inst].pl_node.payload_index;
ag->extra[pi] |= (1u << 1); // ensure_result_used
elide_check = true;
break;
}
// Always noreturn → elide (AstGen.zig:2909).
case ZIR_INST_BREAK:
case ZIR_INST_BREAK_INLINE:
case ZIR_INST_CONDBR:
case ZIR_INST_CONDBR_INLINE:
case ZIR_INST_RET_NODE:
case ZIR_INST_RET_LOAD:
case ZIR_INST_RET_IMPLICIT:
case ZIR_INST_RET_ERR_VALUE:
case ZIR_INST_UNREACHABLE:
case ZIR_INST_REPEAT:
case ZIR_INST_REPEAT_INLINE:
case ZIR_INST_PANIC:
case ZIR_INST_TRAP:
case ZIR_INST_CHECK_COMPTIME_CONTROL_FLOW:
case ZIR_INST_SWITCH_CONTINUE:
case ZIR_INST_COMPILE_ERROR:
is_noreturn = true;
elide_check = true;
break;
// Always void → elide.
case ZIR_INST_DBG_STMT:
case ZIR_INST_DBG_VAR_PTR:
case ZIR_INST_DBG_VAR_VAL:
case ZIR_INST_ENSURE_RESULT_USED:
case ZIR_INST_ENSURE_RESULT_NON_ERROR:
case ZIR_INST_ENSURE_ERR_UNION_PAYLOAD_VOID:
case ZIR_INST_EXPORT:
case ZIR_INST_SET_EVAL_BRANCH_QUOTA:
case ZIR_INST_ATOMIC_STORE:
case ZIR_INST_STORE_NODE:
case ZIR_INST_STORE_TO_INFERRED_PTR:
case ZIR_INST_RESOLVE_INFERRED_ALLOC:
case ZIR_INST_SET_RUNTIME_SAFETY:
case ZIR_INST_MEMCPY:
case ZIR_INST_MEMSET:
case ZIR_INST_MEMMOVE:
case ZIR_INST_VALIDATE_DEREF:
case ZIR_INST_VALIDATE_DESTRUCTURE:
case ZIR_INST_SAVE_ERR_RET_INDEX:
case ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL:
case ZIR_INST_RESTORE_ERR_RET_INDEX_FN_ENTRY:
case ZIR_INST_VALIDATE_STRUCT_INIT_TY:
case ZIR_INST_VALIDATE_STRUCT_INIT_RESULT_TY:
case ZIR_INST_VALIDATE_PTR_STRUCT_INIT:
case ZIR_INST_VALIDATE_ARRAY_INIT_TY:
case ZIR_INST_VALIDATE_ARRAY_INIT_RESULT_TY:
case ZIR_INST_VALIDATE_PTR_ARRAY_INIT:
case ZIR_INST_VALIDATE_REF_TY:
case ZIR_INST_VALIDATE_CONST:
elide_check = true;
break;
// Extended: check opcode.
case ZIR_INST_EXTENDED: {
uint32_t opcode = ag->inst_datas[inst].extended.opcode;
elide_check = (opcode == ZIR_EXT_BREAKPOINT
|| opcode == ZIR_EXT_BRANCH_HINT
|| opcode == ZIR_EXT_SET_FLOAT_MODE
|| opcode == ZIR_EXT_DISABLE_INSTRUMENTATION
|| opcode == ZIR_EXT_DISABLE_INTRINSICS);
break;
}
// Everything else: might produce non-void result → emit check.
default:
elide_check = false;
break;
}
} else {
// Named ref constant.
is_noreturn = (maybe_unused_result == ZIR_REF_UNREACHABLE_VALUE);
elide_check
= (is_noreturn || maybe_unused_result == ZIR_REF_VOID_VALUE);
}
if (!elide_check) {
addUnNode(
gz, ZIR_INST_ENSURE_RESULT_USED, maybe_unused_result, statement);
}
return is_noreturn;
}
// --- countDefers (AstGen.zig:2966) ---
// Walk scope chain and count defer types.
static DeferCounts countDefers(const Scope* outer_scope, Scope* inner_scope) {
DeferCounts c = { false, false, false, false };
Scope* s = inner_scope;
while (s != outer_scope) {
switch (s->tag) {
case SCOPE_GEN_ZIR:
s = ((GenZir*)s)->parent;
break;
case SCOPE_LOCAL_VAL:
s = ((ScopeLocalVal*)s)->parent;
break;
case SCOPE_LOCAL_PTR:
s = ((ScopeLocalPtr*)s)->parent;
break;
case SCOPE_DEFER_NORMAL: {
ScopeDefer* d = (ScopeDefer*)s;
s = d->parent;
c.have_normal = true;
break;
}
case SCOPE_DEFER_ERROR: {
ScopeDefer* d = (ScopeDefer*)s;
s = d->parent;
c.have_err = true;
// need_err_code if remapped_err_code exists (we don't
// implement err capture yet, so always false).
break;
}
default:
return c;
}
}
c.have_any = c.have_normal || c.have_err;
return c;
}
// --- genDefers (AstGen.zig:3014) ---
// Walk scope chain from inner to outer, emitting .defer instructions.
// which: DEFER_NORMAL_ONLY or DEFER_BOTH_SANS_ERR.
static void genDefers(
GenZir* gz, const Scope* outer_scope, Scope* inner_scope, int which) {
Scope* s = inner_scope;
while (s != outer_scope) {
switch (s->tag) {
case SCOPE_GEN_ZIR: {
GenZir* g = (GenZir*)s;
s = g->parent;
break;
}
case SCOPE_LOCAL_VAL: {
ScopeLocalVal* lv = (ScopeLocalVal*)s;
s = lv->parent;
break;
}
case SCOPE_LOCAL_PTR: {
ScopeLocalPtr* lp = (ScopeLocalPtr*)s;
s = lp->parent;
break;
}
case SCOPE_DEFER_NORMAL: {
ScopeDefer* d = (ScopeDefer*)s;
s = d->parent;
// Emit ZIR_INST_DEFER (AstGen.zig:3031).
ZirInstData data;
data.defer_data.index = d->index;
data.defer_data.len = d->len;
addInstruction(gz, ZIR_INST_DEFER, data);
break;
}
case SCOPE_DEFER_ERROR: {
ScopeDefer* d = (ScopeDefer*)s;
s = d->parent;
if (which == DEFER_BOTH_SANS_ERR) {
// Emit regular DEFER for error defers too (AstGen.zig:3038).
ZirInstData data;
data.defer_data.index = d->index;
data.defer_data.len = d->len;
addInstruction(gz, ZIR_INST_DEFER, data);
}
// DEFER_NORMAL_ONLY: skip error defers (AstGen.zig:3063).
break;
}
case SCOPE_LABEL: {
// Labels store parent in the GenZir they're attached to.
// Just skip by going to the parent scope stored in parent.
// Actually labels don't have a separate parent pointer in our
// representation; they're part of GenZir. This case shouldn't
// appear when walking from blockExprStmts scope.
return;
}
case SCOPE_NAMESPACE:
case SCOPE_TOP:
default:
return;
}
}
}
// --- blockExprStmts (AstGen.zig:2538) ---
// Processes block statements sequentially, threading scope.
static void blockExprStmts(GenZir* gz, Scope* scope,
const uint32_t* statements, uint32_t stmt_count) {
AstGenCtx* ag = gz->astgen;
// Stack-allocated scope storage for local variables and defers.
// Max 64 local variable declarations and 64 defers per block.
ScopeLocalVal val_scopes[64];
ScopeLocalPtr ptr_scopes[64];
ScopeDefer defer_scopes[64];
uint32_t val_idx = 0;
uint32_t ptr_idx = 0;
uint32_t defer_idx = 0;
Scope* cur_scope = scope;
bool noreturn_stmt = false;
for (uint32_t i = 0; i < stmt_count; i++) {
if (ag->has_compile_errors)
return;
uint32_t stmt = statements[i];
AstNodeTag tag = ag->tree->nodes.tags[stmt];
switch (tag) {
case AST_NODE_ASSIGN:
assignStmt(gz, cur_scope, stmt);
break;
// Compound assignment operators (AstGen.zig:2588-2607).
case AST_NODE_ASSIGN_ADD:
assignOp(gz, cur_scope, stmt, ZIR_INST_ADD);
break;
case AST_NODE_ASSIGN_SUB:
assignOp(gz, cur_scope, stmt, ZIR_INST_SUB);
break;
case AST_NODE_ASSIGN_MUL:
assignOp(gz, cur_scope, stmt, ZIR_INST_MUL);
break;
case AST_NODE_ASSIGN_DIV:
assignOp(gz, cur_scope, stmt, ZIR_INST_DIV);
break;
case AST_NODE_ASSIGN_MOD:
assignOp(gz, cur_scope, stmt, ZIR_INST_MOD_REM);
break;
case AST_NODE_ASSIGN_BIT_AND:
assignOp(gz, cur_scope, stmt, ZIR_INST_BIT_AND);
break;
case AST_NODE_ASSIGN_BIT_OR:
assignOp(gz, cur_scope, stmt, ZIR_INST_BIT_OR);
break;
case AST_NODE_ASSIGN_BIT_XOR:
assignOp(gz, cur_scope, stmt, ZIR_INST_XOR);
break;
case AST_NODE_ASSIGN_ADD_WRAP:
assignOp(gz, cur_scope, stmt, ZIR_INST_ADDWRAP);
break;
case AST_NODE_ASSIGN_SUB_WRAP:
assignOp(gz, cur_scope, stmt, ZIR_INST_SUBWRAP);
break;
case AST_NODE_ASSIGN_MUL_WRAP:
assignOp(gz, cur_scope, stmt, ZIR_INST_MULWRAP);
break;
case AST_NODE_ASSIGN_ADD_SAT:
assignOp(gz, cur_scope, stmt, ZIR_INST_ADD_SAT);
break;
case AST_NODE_ASSIGN_SUB_SAT:
assignOp(gz, cur_scope, stmt, ZIR_INST_SUB_SAT);
break;
case AST_NODE_ASSIGN_MUL_SAT:
assignOp(gz, cur_scope, stmt, ZIR_INST_MUL_SAT);
break;
case AST_NODE_SIMPLE_VAR_DECL:
case AST_NODE_LOCAL_VAR_DECL:
case AST_NODE_ALIGNED_VAR_DECL:
if (val_idx < 64 && ptr_idx < 64) {
varDecl(gz, cur_scope, stmt, &val_scopes[val_idx],
&ptr_scopes[ptr_idx], &cur_scope);
// Check which one was used: if scope now points to
// val_scopes[val_idx], advance val_idx; same for ptr.
if (cur_scope == &val_scopes[val_idx].base)
val_idx++;
else if (cur_scope == &ptr_scopes[ptr_idx].base)
ptr_idx++;
} else {
SET_ERROR(ag);
}
break;
// defer/errdefer (AstGen.zig:2580-2581).
case AST_NODE_DEFER:
case AST_NODE_ERRDEFER: {
if (defer_idx >= 64) {
SET_ERROR(ag);
break;
}
ScopeTag scope_tag = (tag == AST_NODE_DEFER) ? SCOPE_DEFER_NORMAL
: SCOPE_DEFER_ERROR;
// Create sub-block for defer body (AstGen.zig:3123-3126).
GenZir defer_gen = makeSubBlock(gz, cur_scope);
// Evaluate deferred expression (AstGen.zig:3165).
// DEFER: lhs is the deferred expression, rhs = 0.
// ERRDEFER: lhs is optional error capture token, rhs is expr.
AstData dnd = ag->tree->nodes.datas[stmt];
uint32_t expr_node;
if (tag == AST_NODE_DEFER) {
expr_node = dnd.lhs;
} else {
expr_node = dnd.rhs;
}
// unusedResultExpr pattern (AstGen.zig:3165, 2641-2646).
emitDbgNode(&defer_gen, expr_node);
uint32_t defer_result
= expr(&defer_gen, &defer_gen.base, expr_node);
addEnsureResult(&defer_gen, defer_result, expr_node);
// Add break_inline at end (AstGen.zig:3167).
addBreak(&defer_gen, ZIR_INST_BREAK_INLINE, 0, ZIR_REF_VOID_VALUE,
AST_NODE_OFFSET_NONE);
// Write body to extra (AstGen.zig:3173-3175).
uint32_t raw_body_len = gzInstructionsLen(&defer_gen);
const uint32_t* body = gzInstructionsSlice(&defer_gen);
uint32_t extra_index = ag->extra_len;
uint32_t fixup_len
= countBodyLenAfterFixups(ag, body, raw_body_len);
ensureExtraCapacity(ag, fixup_len);
for (uint32_t b = 0; b < raw_body_len; b++)
appendPossiblyRefdBodyInst(ag, body[b]);
gzUnstack(&defer_gen);
// Create scope (AstGen.zig:3179-3185).
defer_scopes[defer_idx] = (ScopeDefer) {
.base = { .tag = scope_tag },
.parent = cur_scope,
.index = extra_index,
.len = fixup_len,
};
cur_scope = &defer_scopes[defer_idx].base;
defer_idx++;
break;
}
// while/for as statements (AstGen.zig:2605-2610).
// These do NOT get emitDbgNode; they emit their own dbg_stmt.
case AST_NODE_WHILE_SIMPLE:
case AST_NODE_WHILE_CONT:
case AST_NODE_WHILE:
(void)whileExpr(gz, cur_scope, stmt, true);
break;
case AST_NODE_FOR_SIMPLE:
case AST_NODE_FOR:
(void)forExpr(gz, cur_scope, stmt, true);
break;
default: {
// Expression statement (AstGen.zig:2627 unusedResultExpr).
emitDbgNode(gz, stmt);
uint32_t result = expr(gz, cur_scope, stmt);
noreturn_stmt = addEnsureResult(gz, result, stmt);
break;
}
}
}
// Emit normal defers at block exit (AstGen.zig:2633-2634).
if (!noreturn_stmt) {
genDefers(gz, scope, cur_scope, DEFER_NORMAL_ONLY);
}
}
// --- fullBodyExpr (AstGen.zig:2358) ---
// Processes a body expression. If it's an unlabeled block, processes
// statements inline without creating a BLOCK instruction (unlike blockExprExpr
// which wraps in BLOCK). Returns the result ref.
static uint32_t fullBodyExpr(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
const Ast* tree = gz->astgen->tree;
AstNodeTag tag = tree->nodes.tags[node];
// Extract block statements (AstGen.zig:2368).
AstData nd = tree->nodes.datas[node];
uint32_t stmt_buf[2];
const uint32_t* statements = NULL;
uint32_t stmt_count = 0;
switch (tag) {
case AST_NODE_BLOCK_TWO:
case AST_NODE_BLOCK_TWO_SEMICOLON: {
uint32_t idx = 0;
if (nd.lhs != 0)
stmt_buf[idx++] = nd.lhs;
if (nd.rhs != 0)
stmt_buf[idx++] = nd.rhs;
statements = stmt_buf;
stmt_count = idx;
break;
}
case AST_NODE_BLOCK:
case AST_NODE_BLOCK_SEMICOLON: {
uint32_t start = nd.lhs;
uint32_t end = nd.rhs;
statements = tree->extra_data.arr + start;
stmt_count = end - start;
break;
}
default:
// Not a block — treat as single expression (AstGen.zig:2369).
return exprRl(gz, scope, rl, node);
}
// Check if labeled (AstGen.zig:2373-2377).
uint32_t lbrace = tree->nodes.main_tokens[node];
bool is_labeled
= (lbrace >= 2 && tree->tokens.tags[lbrace - 1] == TOKEN_COLON
&& tree->tokens.tags[lbrace - 2] == TOKEN_IDENTIFIER);
if (is_labeled) {
// Labeled blocks need a proper block instruction.
return blockExprExpr(gz, scope, rl, node);
}
// Unlabeled block: process statements inline (AstGen.zig:2380-2383).
GenZir sub_gz = makeSubBlock(gz, scope);
blockExprStmts(&sub_gz, &sub_gz.base, statements, stmt_count);
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
}
// --- lastToken (Ast.zig:874) ---
// Mechanical port of Ast.lastToken. Uses iterative end_offset accumulation.
static uint32_t lastToken(const Ast* tree, uint32_t node) {
uint32_t n = node;
uint32_t end_offset = 0;
while (1) {
AstNodeTag tag = tree->nodes.tags[n];
AstData nd = tree->nodes.datas[n];
switch (tag) {
case AST_NODE_ROOT:
return tree->tokens.len - 1;
// Binary ops: recurse into RHS (Ast.zig:893-948).
case AST_NODE_ASSIGN:
case AST_NODE_ADD:
case AST_NODE_SUB:
case AST_NODE_MUL:
case AST_NODE_DIV:
case AST_NODE_MOD:
case AST_NODE_BIT_AND:
case AST_NODE_BIT_OR:
case AST_NODE_BIT_XOR:
case AST_NODE_SHL:
case AST_NODE_SHR:
case AST_NODE_ARRAY_CAT:
case AST_NODE_ARRAY_MULT:
case AST_NODE_ADD_WRAP:
case AST_NODE_SUB_WRAP:
case AST_NODE_ADD_SAT:
case AST_NODE_SUB_SAT:
case AST_NODE_MUL_WRAP:
case AST_NODE_MUL_SAT:
case AST_NODE_MERGE_ERROR_SETS:
case AST_NODE_EQUAL_EQUAL:
case AST_NODE_BANG_EQUAL:
case AST_NODE_LESS_THAN:
case AST_NODE_GREATER_THAN:
case AST_NODE_LESS_OR_EQUAL:
case AST_NODE_GREATER_OR_EQUAL:
case AST_NODE_BOOL_AND:
case AST_NODE_BOOL_OR:
case AST_NODE_ORELSE:
case AST_NODE_CATCH:
case AST_NODE_ERROR_UNION:
case AST_NODE_SHL_SAT:
n = nd.rhs;
continue;
// field_access: return field token + end_offset (Ast.zig:979).
case AST_NODE_FIELD_ACCESS:
return nd.rhs + end_offset;
// test_decl: recurse into body node (Ast.zig:950).
case AST_NODE_TEST_DECL:
n = nd.rhs;
continue;
// defer: recurse into body (lhs) (Ast.zig:951).
case AST_NODE_DEFER:
n = nd.lhs;
continue;
// errdefer: recurse into body (rhs) (Ast.zig:950).
case AST_NODE_ERRDEFER:
n = nd.rhs;
continue;
// block (Ast.zig:1085): end_offset += 1 (rbrace), recurse into last.
case AST_NODE_BLOCK: {
uint32_t start = nd.lhs;
uint32_t end = nd.rhs;
assert(start != end);
end_offset += 1;
n = tree->extra_data.arr[end - 1];
continue;
}
// block_semicolon (Ast.zig:1097): += 2 (semicolon + rbrace).
case AST_NODE_BLOCK_SEMICOLON: {
uint32_t start = nd.lhs;
uint32_t end = nd.rhs;
assert(start != end);
end_offset += 2;
n = tree->extra_data.arr[end - 1];
continue;
}
// block_two (Ast.zig:1117): if rhs, recurse rhs +1; if lhs, +1; else
// +1. Note: C parser uses 0 for "none" (OptionalIndex), not
// UINT32_MAX.
case AST_NODE_BLOCK_TWO: {
if (nd.rhs != 0) {
end_offset += 1;
n = nd.rhs;
} else if (nd.lhs != 0) {
end_offset += 1;
n = nd.lhs;
} else {
end_offset += 1;
return tree->nodes.main_tokens[n] + end_offset;
}
continue;
}
// block_two_semicolon (Ast.zig:1153).
case AST_NODE_BLOCK_TWO_SEMICOLON: {
if (nd.rhs != 0) {
end_offset += 2;
n = nd.rhs;
} else if (nd.lhs != 0) {
end_offset += 2;
n = nd.lhs;
} else {
end_offset += 1;
return tree->nodes.main_tokens[n] + end_offset;
}
continue;
}
// builtin_call_two (Ast.zig:1118): recurse into args + rparen.
case AST_NODE_BUILTIN_CALL_TWO: {
if (nd.rhs != 0) {
end_offset += 1;
n = nd.rhs;
} else if (nd.lhs != 0) {
end_offset += 1;
n = nd.lhs;
} else {
end_offset += 2; // lparen + rparen
return tree->nodes.main_tokens[n] + end_offset;
}
continue;
}
case AST_NODE_BUILTIN_CALL_TWO_COMMA: {
if (nd.rhs != 0) {
end_offset += 2; // comma + rparen
n = nd.rhs;
} else if (nd.lhs != 0) {
end_offset += 2;
n = nd.lhs;
} else {
end_offset += 1;
return tree->nodes.main_tokens[n] + end_offset;
}
continue;
}
// Unary ops: recurse into lhs (Ast.zig:895-910).
case AST_NODE_BOOL_NOT:
case AST_NODE_BIT_NOT:
case AST_NODE_NEGATION:
case AST_NODE_NEGATION_WRAP:
case AST_NODE_ADDRESS_OF:
case AST_NODE_TRY:
case AST_NODE_AWAIT:
case AST_NODE_OPTIONAL_TYPE:
case AST_NODE_COMPTIME:
case AST_NODE_NOSUSPEND:
case AST_NODE_RESUME:
n = nd.lhs;
continue;
// return: optional operand (Ast.zig:998-1002).
case AST_NODE_RETURN:
if (nd.lhs != 0) {
n = nd.lhs;
continue;
}
return tree->nodes.main_tokens[n] + end_offset;
// deref: main_token is the dot, +1 for '*' (Ast.zig:974).
case AST_NODE_DEREF:
return tree->nodes.main_tokens[n] + 1 + end_offset;
// unwrap_optional: +1 for '?' (Ast.zig:971).
case AST_NODE_UNWRAP_OPTIONAL:
return tree->nodes.main_tokens[n] + 1 + end_offset;
// for_range: recurse into rhs if present, else lhs.
case AST_NODE_FOR_RANGE:
if (nd.rhs != 0) {
n = nd.rhs;
} else {
// Unbounded range: last token is the '..' operator.
// main_token + 1 (the second dot of ..)
return tree->nodes.main_tokens[n] + 1 + end_offset;
}
continue;
// error_value: main_token is `error`, last token is name (+2)
// (Ast.zig:986).
case AST_NODE_ERROR_VALUE:
return tree->nodes.main_tokens[n] + 2 + end_offset;
// Terminals: return main_token + end_offset (Ast.zig:988-996).
case AST_NODE_NUMBER_LITERAL:
case AST_NODE_STRING_LITERAL:
case AST_NODE_IDENTIFIER:
case AST_NODE_ENUM_LITERAL:
case AST_NODE_CHAR_LITERAL:
case AST_NODE_UNREACHABLE_LITERAL:
case AST_NODE_ANYFRAME_LITERAL:
return tree->nodes.main_tokens[n] + end_offset;
// call_one: recurse into lhs, +1 for ')'.
case AST_NODE_CALL_ONE:
end_offset += 1; // rparen
if (nd.rhs != 0) {
n = nd.rhs;
} else {
n = nd.lhs;
}
continue;
case AST_NODE_CALL_ONE_COMMA:
end_offset += 2; // comma + rparen
if (nd.rhs != 0) {
n = nd.rhs;
} else {
n = nd.lhs;
}
continue;
// array_access: end_offset += 1 (rbracket), recurse rhs.
case AST_NODE_ARRAY_ACCESS:
end_offset += 1;
n = nd.rhs;
continue;
// simple_var_decl: recurse into init/type (Ast.zig:1169-1178).
case AST_NODE_SIMPLE_VAR_DECL:
if (nd.rhs != 0) {
n = nd.rhs; // init expr
} else if (nd.lhs != 0) {
n = nd.lhs; // type expr
} else {
end_offset += 1; // from mut token to name
return tree->nodes.main_tokens[n] + end_offset;
}
continue;
// aligned_var_decl: recurse into init/align (Ast.zig:1180-1187).
case AST_NODE_ALIGNED_VAR_DECL:
if (nd.rhs != 0) {
n = nd.rhs; // init expr
} else {
end_offset += 1; // rparen
n = nd.lhs; // align expr
}
continue;
// local_var_decl (Ast.zig:1209-1217).
case AST_NODE_LOCAL_VAR_DECL:
if (nd.rhs != 0) {
n = nd.rhs; // init expr
} else {
// extra[lhs] has align_node
end_offset += 1; // rparen
n = tree->extra_data.arr[nd.lhs]; // align_node
}
continue;
// global_var_decl (Ast.zig:1189-1207).
case AST_NODE_GLOBAL_VAR_DECL:
if (nd.rhs != 0) {
n = nd.rhs; // init expr
} else {
// extra[lhs] = {type_node, align_node, ...}
// complex; approximate by using main_token
end_offset += 1;
return tree->nodes.main_tokens[n] + end_offset;
}
continue;
// slice_open: end_offset += 2 (ellipsis2 + rbracket), recurse rhs
// (Ast.zig:1245-1248).
case AST_NODE_SLICE_OPEN:
end_offset += 2;
n = nd.rhs;
continue;
// grouped_expression: end_offset += 1 (rparen), recurse lhs.
case AST_NODE_GROUPED_EXPRESSION:
end_offset += 1;
n = nd.lhs;
continue;
// if_simple: recurse into body (rhs) (Ast.zig:942).
case AST_NODE_IF_SIMPLE:
case AST_NODE_WHILE_SIMPLE:
case AST_NODE_FOR_SIMPLE:
case AST_NODE_FN_DECL:
case AST_NODE_ARRAY_TYPE:
n = nd.rhs;
continue;
// if: recurse into else_expr (Ast.zig:1295).
case AST_NODE_IF: {
// If[rhs]: { then_expr, else_expr }
n = tree->extra_data.arr[nd.rhs + 1]; // else_expr
continue;
}
// while: recurse into else_expr (Ast.zig:1290).
case AST_NODE_WHILE: {
// While[rhs]: { cont_expr, then_expr, else_expr }
n = tree->extra_data.arr[nd.rhs + 2]; // else_expr
continue;
}
// while_cont: recurse into then_expr (Ast.zig:943-like).
case AST_NODE_WHILE_CONT: {
// WhileCont[rhs]: { cont_expr, then_expr }
n = tree->extra_data.arr[nd.rhs + 1]; // then_expr
continue;
}
// switch: recurse into last case (Ast.zig:1031-1041).
case AST_NODE_SWITCH: {
uint32_t ei = nd.rhs;
uint32_t cs = tree->extra_data.arr[ei];
uint32_t ce = tree->extra_data.arr[ei + 1];
if (cs == ce) {
end_offset += 3; // rparen, lbrace, rbrace
n = nd.lhs;
} else {
end_offset += 1; // rbrace
n = tree->extra_data.arr[ce - 1];
}
continue;
}
case AST_NODE_SWITCH_COMMA: {
uint32_t ei = nd.rhs;
uint32_t cs = tree->extra_data.arr[ei];
uint32_t ce = tree->extra_data.arr[ei + 1];
assert(cs != ce);
end_offset += 2; // comma + rbrace
n = tree->extra_data.arr[ce - 1];
continue;
}
// switch_case_one: recurse into rhs (body) (Ast.zig:942).
case AST_NODE_SWITCH_CASE_ONE:
case AST_NODE_SWITCH_CASE_INLINE_ONE:
case AST_NODE_SWITCH_CASE:
case AST_NODE_SWITCH_CASE_INLINE:
n = nd.rhs;
continue;
// switch_range: recurse into rhs (Ast.zig: binary op pattern).
case AST_NODE_SWITCH_RANGE:
n = nd.rhs;
continue;
// struct_init_one: recurse into field if present, +1.
case AST_NODE_STRUCT_INIT_ONE:
end_offset += 1; // rbrace
if (nd.rhs != 0) {
n = nd.rhs;
} else {
return tree->nodes.main_tokens[n] + end_offset;
}
continue;
case AST_NODE_STRUCT_INIT_ONE_COMMA:
end_offset += 2; // comma + rbrace
n = nd.rhs;
continue;
// struct_init_dot_two: similar to block_two.
case AST_NODE_STRUCT_INIT_DOT_TWO:
if (nd.rhs != 0) {
end_offset += 1;
n = nd.rhs;
} else if (nd.lhs != 0) {
end_offset += 1;
n = nd.lhs;
} else {
end_offset += 1; // rbrace
return tree->nodes.main_tokens[n] + end_offset;
}
continue;
case AST_NODE_STRUCT_INIT_DOT_TWO_COMMA:
end_offset += 2;
if (nd.rhs != 0) {
n = nd.rhs;
} else {
n = nd.lhs;
}
continue;
// struct_init_dot: SubRange pattern.
case AST_NODE_STRUCT_INIT_DOT:
assert(nd.lhs != nd.rhs);
end_offset += 1;
n = tree->extra_data.arr[nd.rhs - 1];
continue;
// struct_init: node_and_extra SubRange pattern.
case AST_NODE_STRUCT_INIT: {
uint32_t si = tree->extra_data.arr[nd.rhs];
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
assert(si != se);
end_offset += 1;
n = tree->extra_data.arr[se - 1];
continue;
}
// call: SubRange pattern.
case AST_NODE_CALL: {
uint32_t si = tree->extra_data.arr[nd.rhs];
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
assert(si != se);
end_offset += 1;
n = tree->extra_data.arr[se - 1];
continue;
}
case AST_NODE_CALL_COMMA: {
uint32_t si = tree->extra_data.arr[nd.rhs];
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
assert(si != se);
end_offset += 2;
n = tree->extra_data.arr[se - 1];
continue;
}
// fn_proto_simple: recurse into rhs (return type).
case AST_NODE_FN_PROTO_SIMPLE:
case AST_NODE_FN_PROTO_ONE:
case AST_NODE_FN_PROTO_MULTI:
case AST_NODE_FN_PROTO:
n = nd.rhs;
continue;
// error_set_decl: rhs is the closing rbrace token.
case AST_NODE_ERROR_SET_DECL:
return nd.rhs + end_offset;
// ptr_type variants: recurse into rhs (child type).
case AST_NODE_PTR_TYPE_ALIGNED:
case AST_NODE_PTR_TYPE_SENTINEL:
case AST_NODE_PTR_TYPE:
case AST_NODE_PTR_TYPE_BIT_RANGE:
n = nd.rhs;
continue;
// container_decl: extra_range pattern.
case AST_NODE_CONTAINER_DECL:
case AST_NODE_TAGGED_UNION:
assert(nd.lhs != nd.rhs);
end_offset += 1;
n = tree->extra_data.arr[nd.rhs - 1];
continue;
case AST_NODE_CONTAINER_DECL_TRAILING:
case AST_NODE_TAGGED_UNION_TRAILING:
assert(nd.lhs != nd.rhs);
end_offset += 2;
n = tree->extra_data.arr[nd.rhs - 1];
continue;
// container_decl_two: like block_two.
case AST_NODE_CONTAINER_DECL_TWO:
case AST_NODE_TAGGED_UNION_TWO:
if (nd.rhs != 0) {
end_offset += 1;
n = nd.rhs;
} else if (nd.lhs != 0) {
end_offset += 1;
n = nd.lhs;
} else {
end_offset += 2; // lbrace + rbrace
return tree->nodes.main_tokens[n] + end_offset;
}
continue;
case AST_NODE_CONTAINER_DECL_TWO_TRAILING:
case AST_NODE_TAGGED_UNION_TWO_TRAILING:
end_offset += 2;
if (nd.rhs != 0) {
n = nd.rhs;
} else {
n = nd.lhs;
}
continue;
// container_decl_arg: node_and_extra SubRange.
case AST_NODE_CONTAINER_DECL_ARG: {
uint32_t si = tree->extra_data.arr[nd.rhs];
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
if (si == se) {
end_offset += 3; // rparen + lbrace + rbrace
n = nd.lhs;
} else {
end_offset += 1;
n = tree->extra_data.arr[se - 1];
}
continue;
}
case AST_NODE_CONTAINER_DECL_ARG_TRAILING: {
uint32_t si = tree->extra_data.arr[nd.rhs];
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
assert(si != se);
end_offset += 2;
n = tree->extra_data.arr[se - 1];
continue;
}
// slice: extra data pattern.
case AST_NODE_SLICE: {
// Slice[rhs]: { start, end }
end_offset += 1;
n = tree->extra_data.arr[nd.rhs + 1]; // end
continue;
}
case AST_NODE_SLICE_SENTINEL: {
// SliceSentinel[rhs]: { start, end, sentinel }
end_offset += 1;
n = tree->extra_data.arr[nd.rhs + 2]; // sentinel
continue;
}
// array_type_sentinel: extra data.
case AST_NODE_ARRAY_TYPE_SENTINEL: {
// ArrayTypeSentinel[rhs]: { sentinel, elem_type }
n = tree->extra_data.arr[nd.rhs + 1]; // elem_type
continue;
}
// multiline_string_literal: main_token + end_offset.
case AST_NODE_MULTILINE_STRING_LITERAL:
return nd.rhs + end_offset;
// break/continue (Ast.zig:1275-1283).
// lhs is opt_token (null_token = UINT32_MAX), rhs is opt_node (0 =
// none).
case AST_NODE_BREAK:
case AST_NODE_CONTINUE:
if (nd.rhs != 0) {
n = nd.rhs; // optional rhs expression
} else if (nd.lhs != UINT32_MAX) {
return nd.lhs + end_offset; // label token
} else {
return tree->nodes.main_tokens[n] + end_offset;
}
continue;
// array_init_one: end_offset += 1 (rbrace), recurse rhs
// (Ast.zig:1224-1230).
case AST_NODE_ARRAY_INIT_ONE:
end_offset += 1;
n = nd.rhs;
continue;
case AST_NODE_ARRAY_INIT_ONE_COMMA:
end_offset += 2; // comma + rbrace
n = nd.rhs;
continue;
// struct_init_dot_comma: SubRange pattern.
case AST_NODE_STRUCT_INIT_DOT_COMMA:
assert(nd.lhs != nd.rhs);
end_offset += 2; // comma + rbrace
n = tree->extra_data.arr[nd.rhs - 1];
continue;
// struct_init_comma: node_and_extra SubRange.
case AST_NODE_STRUCT_INIT_COMMA: {
uint32_t si = tree->extra_data.arr[nd.rhs];
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
assert(si != se);
end_offset += 2;
n = tree->extra_data.arr[se - 1];
continue;
}
// array_init variants.
case AST_NODE_ARRAY_INIT: {
uint32_t si = tree->extra_data.arr[nd.rhs];
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
assert(si != se);
end_offset += 1;
n = tree->extra_data.arr[se - 1];
continue;
}
case AST_NODE_ARRAY_INIT_COMMA: {
uint32_t si = tree->extra_data.arr[nd.rhs];
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
assert(si != se);
end_offset += 2;
n = tree->extra_data.arr[se - 1];
continue;
}
// array_init_dot variants.
case AST_NODE_ARRAY_INIT_DOT_TWO:
if (nd.rhs != 0) {
end_offset += 1;
n = nd.rhs;
} else if (nd.lhs != 0) {
end_offset += 1;
n = nd.lhs;
} else {
end_offset += 1;
return tree->nodes.main_tokens[n] + end_offset;
}
continue;
case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA:
end_offset += 2;
if (nd.rhs != 0) {
n = nd.rhs;
} else {
n = nd.lhs;
}
continue;
case AST_NODE_ARRAY_INIT_DOT:
assert(nd.lhs != nd.rhs);
end_offset += 1;
n = tree->extra_data.arr[nd.rhs - 1];
continue;
case AST_NODE_ARRAY_INIT_DOT_COMMA:
assert(nd.lhs != nd.rhs);
end_offset += 2;
n = tree->extra_data.arr[nd.rhs - 1];
continue;
// builtin_call (Ast.zig:1083-1105).
case AST_NODE_BUILTIN_CALL: {
uint32_t si = tree->extra_data.arr[nd.rhs];
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
assert(si != se);
end_offset += 1;
n = tree->extra_data.arr[se - 1];
continue;
}
case AST_NODE_BUILTIN_CALL_COMMA: {
uint32_t si = tree->extra_data.arr[nd.rhs];
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
assert(si != se);
end_offset += 2;
n = tree->extra_data.arr[se - 1];
continue;
}
// for (Ast.zig:1300-1303): complex extra data.
case AST_NODE_FOR: {
// lhs = span.start (extra_data index),
// rhs = packed(inputs:u31, has_else:u1 at bit 31).
// extra[lhs..] = input nodes, then_body, [else_body].
uint32_t span_start = nd.lhs;
uint32_t for_packed = nd.rhs;
uint32_t inputs = for_packed & 0x7FFFFFFFu;
bool has_else = (for_packed >> 31) != 0;
uint32_t last_idx = span_start + inputs + (has_else ? 1 : 0);
n = tree->extra_data.arr[last_idx];
continue;
}
default:
// Fallback: return main_token + end_offset.
return tree->nodes.main_tokens[n] + end_offset;
}
}
}
// --- addParam (AstGen.zig:12390) ---
// Creates a param instruction with pl_tok data and type body in extra.
static uint32_t addParam(GenZir* gz, GenZir* param_gz, ZirInstTag tag,
uint32_t abs_tok_index, uint32_t name) {
AstGenCtx* ag = gz->astgen;
uint32_t body_len = gzInstructionsLen(param_gz);
const uint32_t* param_body = gzInstructionsSlice(param_gz);
// Param payload: name, type{body_len:u31|is_generic:u1}
ensureExtraCapacity(ag, 2 + body_len);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = name;
ag->extra[ag->extra_len++] = body_len & 0x7FFFFFFFu; // is_generic = false
for (uint32_t i = 0; i < body_len; i++) {
ag->extra[ag->extra_len++] = param_body[i];
}
gzUnstack(param_gz);
// Emit the param instruction.
ensureInstCapacity(ag, 1);
uint32_t idx = ag->inst_len;
ag->inst_tags[idx] = tag;
ZirInstData data;
data.pl_tok.src_tok = tokenIndexToRelative(gz, abs_tok_index);
data.pl_tok.payload_index = payload_index;
ag->inst_datas[idx] = data;
ag->inst_len++;
gzAppendInstruction(gz, idx);
return idx;
}
// --- addDbgVar (AstGen.zig:13196) ---
static void addDbgVar(
GenZir* gz, ZirInstTag tag, uint32_t name, uint32_t inst) {
if (gz->is_comptime)
return;
ZirInstData data;
data.str_op.str = name;
data.str_op.operand = inst;
addInstruction(gz, tag, data);
}
// --- addFunc (AstGen.zig:12023) ---
// Handles non-fancy func/func_inferred instructions.
// ret_body/ret_body_len: instructions for the return type sub-block (may be
// 0). ret_ref: if ret_body_len==0, the return type as a simple Ref.
static uint32_t addFunc(GenZir* gz, uint32_t src_node, uint32_t block_node,
uint32_t param_block, uint32_t ret_ref, const uint32_t* ret_body,
uint32_t ret_body_len, const uint32_t* body, uint32_t body_len,
const uint32_t* param_insts, uint32_t param_insts_len,
uint32_t lbrace_line, uint32_t lbrace_column, bool is_inferred_error) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
uint32_t rbrace_tok = lastToken(tree, block_node);
uint32_t rbrace_start = tree->tokens.starts[rbrace_tok];
advanceSourceCursor(ag, rbrace_start);
uint32_t rbrace_line = ag->source_line - gz->decl_line;
uint32_t rbrace_column = ag->source_column;
// Build Func payload (Zir.Inst.Func: ret_ty, param_block, body_len).
// (AstGen.zig:12187-12194)
uint32_t ret_ty_packed_len;
if (ret_body_len > 0) {
ret_ty_packed_len = ret_body_len; // body-based return type
} else if (ret_ref != ZIR_REF_NONE) {
ret_ty_packed_len = 1; // simple Ref
} else {
ret_ty_packed_len = 0; // void return
}
// Pack RetTy: body_len:u31 | is_generic:bool(u1) = just body_len.
uint32_t ret_ty_packed
= ret_ty_packed_len & 0x7FFFFFFFu; // is_generic=false
uint32_t fixup_body_len = countBodyLenAfterFixupsExtraRefs(
ag, body, body_len, param_insts, param_insts_len);
ensureExtraCapacity(ag, 3 + ret_ty_packed_len + fixup_body_len + 7);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++] = ret_ty_packed; // Func.ret_ty
ag->extra[ag->extra_len++] = param_block; // Func.param_block
ag->extra[ag->extra_len++] = fixup_body_len; // Func.body_len
// Trailing ret_ty: either body instructions or a single ref.
if (ret_body_len > 0) {
for (uint32_t i = 0; i < ret_body_len; i++)
ag->extra[ag->extra_len++] = ret_body[i];
} else if (ret_ref != ZIR_REF_NONE) {
ag->extra[ag->extra_len++] = ret_ref;
}
// Body instructions with extra_refs for param_insts
// (AstGen.zig:12206).
appendBodyWithFixupsExtraRefs(
ag, body, body_len, param_insts, param_insts_len);
// SrcLocs (AstGen.zig:12098-12106).
uint32_t columns = (lbrace_column & 0xFFFFu) | (rbrace_column << 16);
ag->extra[ag->extra_len++] = lbrace_line;
ag->extra[ag->extra_len++] = rbrace_line;
ag->extra[ag->extra_len++] = columns;
// proto_hash (4 words): zero for now.
ag->extra[ag->extra_len++] = 0;
ag->extra[ag->extra_len++] = 0;
ag->extra[ag->extra_len++] = 0;
ag->extra[ag->extra_len++] = 0;
// Emit the func instruction (AstGen.zig:12220-12226).
ZirInstTag tag
= is_inferred_error ? ZIR_INST_FUNC_INFERRED : ZIR_INST_FUNC;
ZirInstData data;
data.pl_node.src_node = (int32_t)src_node - (int32_t)gz->decl_node_index;
data.pl_node.payload_index = payload_index;
return addInstruction(gz, tag, data);
}
// --- testDecl (AstGen.zig:4708) ---
static void testDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts,
uint32_t* decl_idx, uint32_t node) {
const Ast* tree = ag->tree;
AstData nd = tree->nodes.datas[node];
uint32_t body_node = nd.rhs;
// makeDeclaration before advanceSourceCursorToNode (AstGen.zig:4726-4729).
uint32_t decl_inst = makeDeclaration(ag, node);
wip_decl_insts[*decl_idx] = decl_inst;
(*decl_idx)++;
advanceSourceCursorToNode(ag, node);
uint32_t decl_line = ag->source_line;
uint32_t decl_column = ag->source_column;
// Extract test name (AstGen.zig:4748-4835).
uint32_t test_token = tree->nodes.main_tokens[node];
uint32_t test_name_token = test_token + 1;
uint32_t test_name = 0; // NullTerminatedString.empty
DeclFlagsId decl_id = DECL_ID_UNNAMED_TEST;
// Check if the token after 'test' is a string literal.
// We identify string literals by checking the source character.
uint32_t name_tok_start = tree->tokens.starts[test_name_token];
if (name_tok_start < tree->source_len
&& tree->source[name_tok_start] == '"') {
// String literal name.
uint32_t name_len;
strLitAsString(ag, test_name_token, &test_name, &name_len);
decl_id = DECL_ID_TEST;
}
// TODO: handle identifier test names (decltest).
// Set up decl_block GenZir (AstGen.zig:4735-4743).
GenZir decl_block;
memset(&decl_block, 0, sizeof(decl_block));
decl_block.base.tag = SCOPE_GEN_ZIR;
decl_block.parent = NULL;
decl_block.astgen = ag;
decl_block.decl_node_index = node;
decl_block.decl_line = decl_line;
decl_block.is_comptime = true;
decl_block.instructions_top = ag->scratch_inst_len;
decl_block.break_block = UINT32_MAX;
// Set up fn_block GenZir (AstGen.zig:4837-4845).
GenZir fn_block;
memset(&fn_block, 0, sizeof(fn_block));
fn_block.base.tag = SCOPE_GEN_ZIR;
fn_block.parent = &decl_block.base;
fn_block.astgen = ag;
fn_block.decl_node_index = node;
fn_block.decl_line = decl_line;
fn_block.is_comptime = false;
fn_block.instructions_top = ag->scratch_inst_len;
fn_block.break_block = UINT32_MAX;
// Set fn_block and fn_ret_ty for the body (AstGen.zig:4849-4853).
void* prev_fn_block = ag->fn_block;
uint32_t prev_fn_ret_ty = ag->fn_ret_ty;
setFnBlock(ag, &fn_block);
ag->fn_ret_ty = ZIR_REF_ANYERROR_VOID_ERROR_UNION_TYPE;
// Compute lbrace source location (AstGen.zig:4860-4862).
advanceSourceCursorToNode(ag, body_node);
uint32_t lbrace_line = ag->source_line - decl_line;
uint32_t lbrace_column = ag->source_column;
// Process test body (AstGen.zig:4864).
uint32_t block_result
= fullBodyExpr(&fn_block, &fn_block.base, RL_NONE_VAL, body_node);
ag->fn_block = prev_fn_block;
ag->fn_ret_ty = prev_fn_ret_ty;
// If we hit unimplemented features, bail out.
if (ag->has_compile_errors)
return;
// Add restore_err_ret_index + ret_implicit (AstGen.zig:4865-4871).
if (gzInstructionsLen(&fn_block) == 0
|| !refIsNoReturn(&fn_block, block_result)) {
ZirInstData rdata;
rdata.un_node.operand = ZIR_REF_NONE; // .none for .ret
rdata.un_node.src_node
= (int32_t)node - (int32_t)fn_block.decl_node_index;
addInstruction(
&fn_block, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata);
uint32_t body_last_tok = lastToken(tree, body_node);
ZirInstData rdata2;
rdata2.un_tok.operand = ZIR_REF_VOID_VALUE;
rdata2.un_tok.src_tok = tokenIndexToRelative(&fn_block, body_last_tok);
addInstruction(&fn_block, ZIR_INST_RET_IMPLICIT, rdata2);
}
// Read fn_block body before unstacking (AstGen.zig:4874).
// Upstream unstacks fn_block inside addFunc before appending the func
// instruction to decl_block. We must unstack fn_block first so that
// addFunc's addInstruction goes into decl_block's range.
const uint32_t* fn_body = gzInstructionsSlice(&fn_block);
uint32_t fn_body_len = gzInstructionsLen(&fn_block);
gzUnstack(&fn_block);
// Create func instruction (AstGen.zig:4874-4897).
uint32_t func_ref = addFunc(&decl_block, node, body_node, decl_inst,
ZIR_REF_ANYERROR_VOID_ERROR_UNION_TYPE, NULL, 0, fn_body, fn_body_len,
NULL, 0, lbrace_line, lbrace_column, false);
// break_inline returning func to declaration (AstGen.zig:4899).
makeBreakInline(&decl_block, decl_inst, func_ref, AST_NODE_OFFSET_NONE);
// setDeclaration (AstGen.zig:4903-4923).
setDeclaration(ag, decl_inst,
(SetDeclArgs) { .src_line = decl_line,
.src_column = decl_column,
.id = decl_id,
.name = test_name,
.lib_name = UINT32_MAX,
.value_body = gzInstructionsSlice(&decl_block),
.value_body_len = gzInstructionsLen(&decl_block) });
gzUnstack(&decl_block);
(void)gz;
}
// --- fnDecl (AstGen.zig:4067) / fnDeclInner (AstGen.zig:4228) ---
// Handles non-extern function declarations with bodies, including params.
static void fnDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts,
uint32_t* decl_idx, uint32_t node) {
const Ast* tree = ag->tree;
AstData nd = tree->nodes.datas[node];
// For fn_decl: data.lhs = fn_proto node, data.rhs = body node.
uint32_t proto_node = nd.lhs;
uint32_t body_node = nd.rhs;
// Get function name token (main_token of proto + 1 = fn name).
uint32_t fn_token = tree->nodes.main_tokens[proto_node];
uint32_t fn_name_token = fn_token + 1;
// Check for 'pub' modifier (Ast.zig:2003-2025).
bool is_pub = (fn_token > 0
&& tree->tokens.tags[fn_token - 1] == TOKEN_KEYWORD_PUB);
// makeDeclaration on fn_proto node (AstGen.zig:4090).
uint32_t decl_inst = makeDeclaration(ag, proto_node);
wip_decl_insts[*decl_idx] = decl_inst;
(*decl_idx)++;
advanceSourceCursorToNode(ag, node);
uint32_t decl_line = ag->source_line;
uint32_t decl_column = ag->source_column;
// Save source cursor for restoring after ret_gz (AstGen.zig:4387-4388).
uint32_t saved_source_offset = ag->source_offset;
uint32_t saved_source_line = ag->source_line;
uint32_t saved_source_column = ag->source_column;
AstNodeTag proto_tag = tree->nodes.tags[proto_node];
AstData proto_data = tree->nodes.datas[proto_node];
// Extract return type node (rhs for all fn_proto variants).
uint32_t return_type_node = proto_data.rhs;
// Detect inferred error set: token before return type is '!'
// (AstGen.zig:4249-4251).
bool is_inferred_error = false;
if (return_type_node != 0) {
uint32_t ret_first_tok = firstToken(tree, return_type_node);
if (ret_first_tok > 0) {
uint32_t maybe_bang = ret_first_tok - 1;
uint32_t bang_start = tree->tokens.starts[maybe_bang];
if (tree->source[bang_start] == '!')
is_inferred_error = true;
}
}
// Extract param type nodes from proto variant (AstGen.zig:4253-4254).
uint32_t param_nodes_buf[1]; // buffer for fn_proto_simple/fn_proto_one
const uint32_t* param_nodes = NULL;
uint32_t params_len = 0;
if (proto_tag == AST_NODE_FN_PROTO_SIMPLE) {
// data.lhs = optional param node, data.rhs = return type.
if (proto_data.lhs != 0) {
param_nodes_buf[0] = proto_data.lhs;
param_nodes = param_nodes_buf;
params_len = 1;
}
} else if (proto_tag == AST_NODE_FN_PROTO_ONE) {
// data.lhs = extra_data index → AstFnProtoOne.
uint32_t extra_idx = proto_data.lhs;
uint32_t param
= tree->extra_data.arr[extra_idx]; // AstFnProtoOne.param
if (param != 0) {
param_nodes_buf[0] = param;
param_nodes = param_nodes_buf;
params_len = 1;
}
} else if (proto_tag == AST_NODE_FN_PROTO_MULTI) {
// data.lhs = extra_data index → SubRange{start, end}.
uint32_t extra_idx = proto_data.lhs;
uint32_t range_start = tree->extra_data.arr[extra_idx];
uint32_t range_end = tree->extra_data.arr[extra_idx + 1];
param_nodes = tree->extra_data.arr + range_start;
params_len = range_end - range_start;
} else if (proto_tag == AST_NODE_FN_PROTO) {
// data.lhs = extra_data index → AstFnProto{params_start, params_end,
// ...}.
uint32_t extra_idx = proto_data.lhs;
uint32_t pstart = tree->extra_data.arr[extra_idx]; // params_start
uint32_t pend = tree->extra_data.arr[extra_idx + 1]; // params_end
param_nodes = tree->extra_data.arr + pstart;
params_len = pend - pstart;
}
// decl_gz (called value_gz in caller, decl_gz in fnDeclInner)
// (AstGen.zig:4194-4201).
GenZir decl_gz;
memset(&decl_gz, 0, sizeof(decl_gz));
decl_gz.base.tag = SCOPE_GEN_ZIR;
decl_gz.parent = NULL;
decl_gz.astgen = ag;
decl_gz.decl_node_index = proto_node;
decl_gz.decl_line = decl_line;
decl_gz.is_comptime = true;
decl_gz.instructions_top = ag->scratch_inst_len;
decl_gz.break_block = UINT32_MAX;
// --- Parameter iteration (AstGen.zig:4260-4363) ---
// Walk params, creating param instructions and ScopeLocalVal entries.
// We keep param scopes on the C stack (max 32 params like upstream).
Scope* params_scope = &decl_gz.base;
ScopeLocalVal param_scopes[32];
uint32_t param_scope_count = 0;
// Collect param instruction indices (AstGen.zig:4254, 4360).
uint32_t param_insts[32];
uint32_t param_insts_len = 0;
for (uint32_t param_i = 0; param_i < params_len; param_i++) {
uint32_t param_type_node = param_nodes[param_i];
// Find param name token by scanning backwards from firstToken of
// type expression (mirrors FnProto.Iterator.next, Ast.zig:2687).
// Layout: [comptime] [name] [:] type_expr
// So: type_first_tok - 1 is ':', type_first_tok - 2 is name.
uint32_t type_first_tok = firstToken(tree, param_type_node);
uint32_t name_token = 0; // 0 = no name found
bool is_comptime_param = false;
if (type_first_tok >= 2
&& tree->tokens.tags[type_first_tok - 1] == TOKEN_COLON) {
// Named parameter: name is at type_first_tok - 2.
uint32_t maybe_name = type_first_tok - 2;
uint32_t name_start = tree->tokens.starts[maybe_name];
char ch = tree->source[name_start];
if ((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z')
|| ch == '_' || ch == '@') {
// Could be name or comptime/noalias keyword.
if (name_start + 8 <= tree->source_len
&& memcmp(tree->source + name_start, "comptime", 8) == 0) {
is_comptime_param = true;
} else if (name_start + 7 <= tree->source_len
&& memcmp(tree->source + name_start, "noalias", 7) == 0) {
// noalias keyword, not a name.
} else {
name_token = maybe_name;
// Check for preceding comptime keyword.
if (maybe_name > 0) {
uint32_t prev = maybe_name - 1;
uint32_t prev_start = tree->tokens.starts[prev];
if (prev_start + 8 <= tree->source_len
&& memcmp(tree->source + prev_start, "comptime", 8)
== 0)
is_comptime_param = true;
}
}
}
}
// Determine param name string (AstGen.zig:4283-4321).
// Must be resolved BEFORE type expression to match upstream string
// table ordering.
uint32_t param_name_str = 0; // NullTerminatedString.empty
if (name_token != 0) {
uint32_t name_start = tree->tokens.starts[name_token];
char nch = tree->source[name_start];
// Skip "_" params (AstGen.zig:4285-4286).
if (nch == '_') {
uint32_t next_start = tree->tokens.starts[name_token + 1];
if (next_start == name_start + 1) {
// Single underscore: empty name.
param_name_str = 0;
} else {
param_name_str = identAsString(ag, name_token);
}
} else {
param_name_str = identAsString(ag, name_token);
}
}
// Evaluate param type expression in a sub-block
// (AstGen.zig:4333-4337).
GenZir param_gz = makeSubBlock(&decl_gz, params_scope);
uint32_t param_type_ref
= expr(&param_gz, params_scope, param_type_node);
if (ag->has_compile_errors)
return;
// The break_inline target is the param instruction we're about to
// create (AstGen.zig:4336-4337).
uint32_t param_inst_expected = ag->inst_len + 1;
// +1 because: the break_inline is emitted first (uses inst_len),
// then addParam emits the param instruction at inst_len.
// Actually, addParam emits the param after break_inline. The
// break_inline's block_inst field should point to the param inst.
// We know it will be at ag->inst_len after the break_inline.
makeBreakInline(&param_gz, param_inst_expected, param_type_ref,
(int32_t)param_type_node - (int32_t)param_gz.decl_node_index);
// Create param instruction (AstGen.zig:4341-4343).
ZirInstTag param_tag
= is_comptime_param ? ZIR_INST_PARAM_COMPTIME : ZIR_INST_PARAM;
uint32_t name_tok_for_src = name_token != 0
? name_token
: tree->nodes.main_tokens[param_type_node];
uint32_t param_inst = addParam(
&decl_gz, &param_gz, param_tag, name_tok_for_src, param_name_str);
(void)param_inst_expected;
// Record param instruction index (AstGen.zig:4360).
if (param_insts_len < 32)
param_insts[param_insts_len++] = param_inst;
// Create ScopeLocalVal for this param (AstGen.zig:4349-4359).
if (param_name_str != 0 && param_scope_count < 32) {
ScopeLocalVal* lv = &param_scopes[param_scope_count++];
lv->base.tag = SCOPE_LOCAL_VAL;
lv->parent = params_scope;
lv->gen_zir = &decl_gz;
lv->inst = param_inst + ZIR_REF_START_INDEX; // toRef()
lv->token_src = name_token;
lv->name = param_name_str;
params_scope = &lv->base;
}
}
// --- Return type (AstGen.zig:4369-4383) ---
GenZir ret_gz = makeSubBlock(&decl_gz, params_scope);
uint32_t ret_ref = ZIR_REF_NONE;
if (return_type_node != 0) {
ret_ref = expr(&ret_gz, params_scope, return_type_node);
if (ag->has_compile_errors)
return;
// If ret_gz produced instructions, add break_inline
// (AstGen.zig:4377-4381).
if (gzInstructionsLen(&ret_gz) > 0) {
// break_inline targets the func instruction (which doesn't
// exist yet). We use 0 as placeholder and patch later.
makeBreakInline(&ret_gz, 0, ret_ref, AST_NODE_OFFSET_NONE);
}
}
// Map void_type → .none (AstGen.zig:12054).
if (ret_ref == ZIR_REF_VOID_TYPE)
ret_ref = ZIR_REF_NONE;
uint32_t ret_body_len = gzInstructionsLen(&ret_gz);
// Copy ret_body before unstacking: body_gz reuses the same scratch area.
uint32_t* ret_body = NULL;
if (ret_body_len > 0) {
ret_body = malloc(ret_body_len * sizeof(uint32_t));
if (!ret_body)
abort();
memcpy(ret_body, gzInstructionsSlice(&ret_gz),
ret_body_len * sizeof(uint32_t));
}
gzUnstack(&ret_gz);
// Restore source cursor (AstGen.zig:4387-4388).
ag->source_offset = saved_source_offset;
ag->source_line = saved_source_line;
ag->source_column = saved_source_column;
// --- Body (AstGen.zig:4415-4424) ---
GenZir body_gz;
memset(&body_gz, 0, sizeof(body_gz));
body_gz.base.tag = SCOPE_GEN_ZIR;
body_gz.parent = params_scope;
body_gz.astgen = ag;
body_gz.decl_node_index = proto_node;
body_gz.decl_line = decl_line;
body_gz.is_comptime = false;
body_gz.instructions_top = ag->scratch_inst_len;
// Set fn_block and fn_ret_ty for the body (AstGen.zig:4442-4455).
void* prev_fn_block = ag->fn_block;
setFnBlock(ag, &body_gz);
uint32_t prev_fn_ret_ty = ag->fn_ret_ty;
if (is_inferred_error || ret_ref == ZIR_REF_NONE) {
// Non-void non-trivial return type: emit ret_type instruction.
if (ret_body_len > 0 || is_inferred_error) {
ZirInstData rtdata;
memset(&rtdata, 0, sizeof(rtdata));
rtdata.node = (int32_t)node - (int32_t)body_gz.decl_node_index;
ag->fn_ret_ty
= addInstruction(&body_gz, ZIR_INST_RET_TYPE, rtdata);
} else {
ag->fn_ret_ty = ret_ref; // void
}
} else {
// ret_ref is a simple ref (not void, not inferred error).
// Still need ret_type instruction if it resolved to an inst.
if (ret_ref >= ZIR_REF_START_INDEX) {
ZirInstData rtdata;
memset(&rtdata, 0, sizeof(rtdata));
rtdata.node = (int32_t)node - (int32_t)body_gz.decl_node_index;
ag->fn_ret_ty
= addInstruction(&body_gz, ZIR_INST_RET_TYPE, rtdata);
} else {
ag->fn_ret_ty = ret_ref;
}
}
// Process function body (AstGen.zig:4461-4465).
advanceSourceCursorToNode(ag, body_node);
uint32_t lbrace_line = ag->source_line - decl_line;
uint32_t lbrace_column = ag->source_column;
fullBodyExpr(&body_gz, &body_gz.base, RL_NONE_VAL, body_node);
ag->fn_block = prev_fn_block;
ag->fn_ret_ty = prev_fn_ret_ty;
if (ag->has_compile_errors) {
free(ret_body);
return;
}
// Add implicit return at end of function body
// (AstGen.zig:4465-4871).
if (!endsWithNoReturn(&body_gz)) {
ZirInstData rdata;
rdata.un_node.operand = ZIR_REF_NONE;
rdata.un_node.src_node
= (int32_t)node - (int32_t)body_gz.decl_node_index;
addInstruction(
&body_gz, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata);
uint32_t body_last_tok = lastToken(tree, body_node);
ZirInstData rdata2;
rdata2.un_tok.operand = ZIR_REF_VOID_VALUE;
rdata2.un_tok.src_tok = tokenIndexToRelative(&body_gz, body_last_tok);
addInstruction(&body_gz, ZIR_INST_RET_IMPLICIT, rdata2);
}
// Read body before unstacking (AstGen.zig:12215-12218).
const uint32_t* fn_body = gzInstructionsSlice(&body_gz);
uint32_t fn_body_len = gzInstructionsLen(&body_gz);
gzUnstack(&body_gz);
// Create func instruction (AstGen.zig:4476-4494).
uint32_t func_ref = addFunc(&decl_gz, node, body_node, decl_inst, ret_ref,
ret_body, ret_body_len, fn_body, fn_body_len, param_insts,
param_insts_len, lbrace_line, lbrace_column, is_inferred_error);
// Patch ret_body break_inline to point to func instruction
// (AstGen.zig:12199-12202).
if (ret_body_len > 0) {
uint32_t break_inst = ret_body[ret_body_len - 1];
// The break_inline payload is at payload_index; block_inst is at
// offset 1 in the Break struct.
uint32_t break_payload
= ag->inst_datas[break_inst].break_data.payload_index;
ag->extra[break_payload + 1] = func_ref - ZIR_REF_START_INDEX;
}
free(ret_body);
// break_inline returning func to declaration (AstGen.zig:4495).
// nodeIndexToRelative(decl_node) = node - decl_gz.decl_node_index.
makeBreakInline(
&decl_gz, decl_inst, func_ref, (int32_t)node - (int32_t)proto_node);
// setDeclaration (AstGen.zig:4208-4225).
DeclFlagsId decl_id
= is_pub ? DECL_ID_PUB_CONST_SIMPLE : DECL_ID_CONST_SIMPLE;
uint32_t name_str = identAsString(ag, fn_name_token);
setDeclaration(ag, decl_inst,
(SetDeclArgs) { .src_line = decl_line,
.src_column = decl_column,
.id = decl_id,
.name = name_str,
.lib_name = UINT32_MAX,
.value_body = gzInstructionsSlice(&decl_gz),
.value_body_len = gzInstructionsLen(&decl_gz) });
gzUnstack(&decl_gz);
(void)gz;
}
// --- comptimeDecl (AstGen.zig:4645) ---
static void comptimeDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts,
uint32_t* decl_idx, uint32_t node) {
// makeDeclaration before advanceSourceCursorToNode (AstGen.zig:4663-4665).
uint32_t decl_inst = makeDeclaration(ag, node);
wip_decl_insts[*decl_idx] = decl_inst;
(*decl_idx)++;
advanceSourceCursorToNode(ag, node);
uint32_t decl_line = ag->source_line;
uint32_t decl_column = ag->source_column;
// Value sub-block (AstGen.zig:4675-4686).
GenZir value_gz;
memset(&value_gz, 0, sizeof(value_gz));
value_gz.base.tag = SCOPE_GEN_ZIR;
value_gz.parent = NULL;
value_gz.astgen = ag;
value_gz.decl_node_index = node;
value_gz.decl_line = decl_line;
value_gz.is_comptime = true;
value_gz.instructions_top = ag->scratch_inst_len;
// For comptime {}: body is empty block → no instructions generated.
// comptime_gz.isEmpty() == true → addBreak(.break_inline, decl_inst,
// .void_value) (AstGen.zig:4685-4686)
makeBreakInline(
&value_gz, decl_inst, ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
setDeclaration(ag, decl_inst,
(SetDeclArgs) { .src_line = decl_line,
.src_column = decl_column,
.id = DECL_ID_COMPTIME,
.name = 0,
.lib_name = UINT32_MAX,
.value_body = gzInstructionsSlice(&value_gz),
.value_body_len = gzInstructionsLen(&value_gz) });
gzUnstack(&value_gz);
(void)gz;
}
// --- globalVarDecl (AstGen.zig:4498) ---
// Extract VarDecl fields from an AST node (Ast.zig:1326-1380).
typedef struct {
uint32_t mut_token;
uint32_t type_node; // 0 = none
uint32_t align_node; // 0 = none
uint32_t addrspace_node; // 0 = none
uint32_t section_node; // 0 = none
uint32_t init_node; // UINT32_MAX = none
bool is_pub;
bool is_extern;
bool is_export;
bool is_mutable;
bool is_threadlocal;
uint32_t lib_name_token; // UINT32_MAX = none
} VarDeclInfo;
static VarDeclInfo extractVarDecl(const Ast* tree, uint32_t node) {
AstNodeTag tag = tree->nodes.tags[node];
AstData nd = tree->nodes.datas[node];
uint32_t mut_token = tree->nodes.main_tokens[node];
VarDeclInfo info;
memset(&info, 0, sizeof(info));
info.mut_token = mut_token;
info.init_node = UINT32_MAX;
info.lib_name_token = UINT32_MAX;
switch (tag) {
case AST_NODE_SIMPLE_VAR_DECL:
// lhs = type_node (optional), rhs = init_node (optional)
info.type_node = nd.lhs;
info.init_node = nd.rhs;
break;
case AST_NODE_ALIGNED_VAR_DECL:
// lhs = align_node, rhs = init_node (optional)
info.align_node = nd.lhs;
info.init_node = nd.rhs;
break;
case AST_NODE_GLOBAL_VAR_DECL: {
// lhs = extra_data index, rhs = init_node (optional)
uint32_t ei = nd.lhs;
info.type_node = tree->extra_data.arr[ei + 0];
info.align_node = tree->extra_data.arr[ei + 1];
info.addrspace_node = tree->extra_data.arr[ei + 2];
info.section_node = tree->extra_data.arr[ei + 3];
info.init_node = nd.rhs;
break;
}
case AST_NODE_LOCAL_VAR_DECL: {
// lhs = extra_data index, rhs = init_node (optional)
uint32_t ei = nd.lhs;
info.type_node = tree->extra_data.arr[ei + 0];
info.align_node = tree->extra_data.arr[ei + 1];
info.init_node = nd.rhs;
break;
}
default:
break;
}
// Scan backwards from mut_token to find modifiers (Ast.zig:2003-2025).
info.is_mutable = (tree->tokens.tags[mut_token] == TOKEN_KEYWORD_VAR);
for (uint32_t i = mut_token; i > 0;) {
i--;
TokenizerTag ttag = tree->tokens.tags[i];
if (ttag == TOKEN_KEYWORD_EXTERN)
info.is_extern = true;
else if (ttag == TOKEN_KEYWORD_EXPORT)
info.is_export = true;
else if (ttag == TOKEN_KEYWORD_PUB)
info.is_pub = true;
else if (ttag == TOKEN_KEYWORD_THREADLOCAL)
info.is_threadlocal = true;
else if (ttag == TOKEN_STRING_LITERAL)
info.lib_name_token = i;
else
break;
}
return info;
}
// Compute DeclFlagsId from VarDecl properties (AstGen.zig:13916-13972).
static DeclFlagsId computeVarDeclId(bool is_mutable, bool is_pub,
bool is_extern, bool is_export, bool is_threadlocal, bool has_type_body,
bool has_special_body, bool has_lib_name) {
if (!is_mutable) {
// const
if (is_extern) {
if (is_pub) {
if (has_lib_name || has_special_body)
return DECL_ID_PUB_EXTERN_CONST;
return DECL_ID_PUB_EXTERN_CONST_SIMPLE;
}
if (has_lib_name || has_special_body)
return DECL_ID_EXTERN_CONST;
return DECL_ID_EXTERN_CONST_SIMPLE;
}
if (is_export)
return is_pub ? DECL_ID_PUB_EXPORT_CONST : DECL_ID_EXPORT_CONST;
if (is_pub) {
if (has_special_body)
return DECL_ID_PUB_CONST;
if (has_type_body)
return DECL_ID_PUB_CONST_TYPED;
return DECL_ID_PUB_CONST_SIMPLE;
}
if (has_special_body)
return DECL_ID_CONST;
if (has_type_body)
return DECL_ID_CONST_TYPED;
return DECL_ID_CONST_SIMPLE;
}
// var
if (is_extern) {
if (is_pub) {
if (is_threadlocal)
return DECL_ID_PUB_EXTERN_VAR_THREADLOCAL;
return DECL_ID_PUB_EXTERN_VAR;
}
if (is_threadlocal)
return DECL_ID_EXTERN_VAR_THREADLOCAL;
return DECL_ID_EXTERN_VAR;
}
if (is_export) {
if (is_pub) {
if (is_threadlocal)
return DECL_ID_PUB_EXPORT_VAR_THREADLOCAL;
return DECL_ID_PUB_EXPORT_VAR;
}
if (is_threadlocal)
return DECL_ID_EXPORT_VAR_THREADLOCAL;
return DECL_ID_EXPORT_VAR;
}
if (is_pub) {
if (is_threadlocal)
return DECL_ID_PUB_VAR_THREADLOCAL;
if (has_special_body || has_type_body)
return DECL_ID_PUB_VAR;
return DECL_ID_PUB_VAR_SIMPLE;
}
if (is_threadlocal)
return DECL_ID_VAR_THREADLOCAL;
if (has_special_body || has_type_body)
return DECL_ID_VAR;
return DECL_ID_VAR_SIMPLE;
}
static void globalVarDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts,
uint32_t* decl_idx, uint32_t node) {
const Ast* tree = ag->tree;
VarDeclInfo vd = extractVarDecl(tree, node);
uint32_t name_token = vd.mut_token + 1;
// advanceSourceCursorToNode before makeDeclaration (AstGen.zig:4542-4546).
advanceSourceCursorToNode(ag, node);
uint32_t decl_column = ag->source_column;
uint32_t decl_inst = makeDeclaration(ag, node);
wip_decl_insts[*decl_idx] = decl_inst;
(*decl_idx)++;
// Set up type sub-block (AstGen.zig:4574-4582).
GenZir type_gz;
memset(&type_gz, 0, sizeof(type_gz));
type_gz.base.tag = SCOPE_GEN_ZIR;
type_gz.astgen = ag;
type_gz.decl_node_index = node;
type_gz.instructions_top = ag->scratch_inst_len;
type_gz.decl_line = ag->source_line;
type_gz.is_comptime = true;
if (vd.type_node != 0) {
uint32_t type_inst = typeExpr(&type_gz, &type_gz.base, vd.type_node);
makeBreakInline(&type_gz, decl_inst, type_inst, 0);
}
// Record type_gz boundary for slicing.
uint32_t type_top = ag->scratch_inst_len;
// Align sub-block (AstGen.zig:4592-4596).
GenZir align_gz;
memset(&align_gz, 0, sizeof(align_gz));
align_gz.base.tag = SCOPE_GEN_ZIR;
align_gz.astgen = ag;
align_gz.decl_node_index = node;
align_gz.instructions_top = type_top;
align_gz.decl_line = ag->source_line;
align_gz.is_comptime = true;
if (vd.align_node != 0) {
uint32_t align_inst = expr(&align_gz, &align_gz.base, vd.align_node);
makeBreakInline(&align_gz, decl_inst, align_inst, 0);
}
uint32_t align_top = ag->scratch_inst_len;
// Linksection sub-block (AstGen.zig:4598-4602).
GenZir linksection_gz;
memset(&linksection_gz, 0, sizeof(linksection_gz));
linksection_gz.base.tag = SCOPE_GEN_ZIR;
linksection_gz.astgen = ag;
linksection_gz.decl_node_index = node;
linksection_gz.instructions_top = align_top;
linksection_gz.decl_line = ag->source_line;
linksection_gz.is_comptime = true;
if (vd.section_node != 0) {
uint32_t ls_inst
= expr(&linksection_gz, &linksection_gz.base, vd.section_node);
makeBreakInline(&linksection_gz, decl_inst, ls_inst, 0);
}
uint32_t linksection_top = ag->scratch_inst_len;
// Addrspace sub-block (AstGen.zig:4604-4608).
GenZir addrspace_gz;
memset(&addrspace_gz, 0, sizeof(addrspace_gz));
addrspace_gz.base.tag = SCOPE_GEN_ZIR;
addrspace_gz.astgen = ag;
addrspace_gz.decl_node_index = node;
addrspace_gz.instructions_top = linksection_top;
addrspace_gz.decl_line = ag->source_line;
addrspace_gz.is_comptime = true;
if (vd.addrspace_node != 0) {
uint32_t as_inst
= expr(&addrspace_gz, &addrspace_gz.base, vd.addrspace_node);
makeBreakInline(&addrspace_gz, decl_inst, as_inst, 0);
}
uint32_t addrspace_top = ag->scratch_inst_len;
// Value sub-block (AstGen.zig:4610-4620).
GenZir value_gz;
memset(&value_gz, 0, sizeof(value_gz));
value_gz.base.tag = SCOPE_GEN_ZIR;
value_gz.astgen = ag;
value_gz.decl_node_index = node;
value_gz.instructions_top = addrspace_top;
value_gz.decl_line = ag->source_line;
value_gz.is_comptime = true;
if (vd.init_node != UINT32_MAX && vd.init_node != 0) {
uint32_t init_ref = expr(&value_gz, &value_gz.base, vd.init_node);
makeBreakInline(&value_gz, decl_inst, init_ref, 0);
}
// Compute body slices (instructionsSliceUpto).
const uint32_t* type_body
= ag->scratch_instructions + type_gz.instructions_top;
uint32_t type_body_len = type_top - type_gz.instructions_top;
const uint32_t* align_body
= ag->scratch_instructions + align_gz.instructions_top;
uint32_t align_body_len = align_top - align_gz.instructions_top;
const uint32_t* ls_body
= ag->scratch_instructions + linksection_gz.instructions_top;
uint32_t ls_body_len = linksection_top - linksection_gz.instructions_top;
const uint32_t* as_body
= ag->scratch_instructions + addrspace_gz.instructions_top;
uint32_t as_body_len = addrspace_top - addrspace_gz.instructions_top;
const uint32_t* val_body = gzInstructionsSlice(&value_gz);
uint32_t val_body_len = gzInstructionsLen(&value_gz);
bool has_type_body = (type_body_len > 0);
bool has_special_body
= (align_body_len > 0 || ls_body_len > 0 || as_body_len > 0);
bool has_lib_name = (vd.lib_name_token != UINT32_MAX);
uint32_t name_str = identAsString(ag, name_token);
DeclFlagsId decl_id = computeVarDeclId(vd.is_mutable, vd.is_pub,
vd.is_extern, vd.is_export, vd.is_threadlocal, has_type_body,
has_special_body, has_lib_name);
// Compute lib_name string index.
uint32_t lib_name = UINT32_MAX;
if (has_lib_name) {
uint32_t li, ll;
strLitAsString(ag, vd.lib_name_token, &li, &ll);
lib_name = li;
}
setDeclaration(ag, decl_inst,
(SetDeclArgs) { .src_line = ag->source_line,
.src_column = decl_column,
.id = decl_id,
.name = name_str,
.lib_name = lib_name,
.type_body = type_body,
.type_body_len = type_body_len,
.align_body = align_body,
.align_body_len = align_body_len,
.linksection_body = ls_body,
.linksection_body_len = ls_body_len,
.addrspace_body = as_body,
.addrspace_body_len = as_body_len,
.value_body = val_body,
.value_body_len = val_body_len });
gzUnstack(&value_gz);
(void)gz;
}
// --- nodeImpliesMoreThanOnePossibleValue (AstGen.zig:10548) ---
// Check if an identifier is a primitive type with more than one value.
static bool identImpliesMoreThanOnePossibleValue(
const Ast* tree, uint32_t main_token) {
uint32_t start = tree->tokens.starts[main_token];
const char* src = tree->source + start;
// Match known primitive types that have more than one possible value.
// (AstGen.zig:10729-10766)
if (src[0] == 'u' || src[0] == 'i') {
// u8, u16, u32, u64, u128, u1, u29, usize, i8, i16, i32, i64, i128,
// isize
char c1 = src[1];
if (c1 >= '0' && c1 <= '9')
return true;
if (c1 == 's') // usize, isize
return (src[2] == 'i' && src[3] == 'z' && src[4] == 'e');
}
if (src[0] == 'f') {
// f16, f32, f64, f80, f128
char c1 = src[1];
if (c1 >= '0' && c1 <= '9')
return true;
}
if (src[0] == 'b' && src[1] == 'o' && src[2] == 'o' && src[3] == 'l'
&& !(src[4] >= 'a' && src[4] <= 'z')
&& !(src[4] >= 'A' && src[4] <= 'Z')
&& !(src[4] >= '0' && src[4] <= '9') && src[4] != '_')
return true;
if (src[0] == 'c' && src[1] == '_')
return true; // c_int, c_long, etc.
if (src[0] == 'a' && src[1] == 'n' && src[2] == 'y') {
// anyerror, anyframe, anyopaque
return true;
}
if (src[0] == 'c' && src[1] == 'o' && src[2] == 'm' && src[3] == 'p'
&& src[4] == 't' && src[5] == 'i' && src[6] == 'm' && src[7] == 'e')
return true; // comptime_float, comptime_int
if (src[0] == 't' && src[1] == 'y' && src[2] == 'p' && src[3] == 'e'
&& !(src[4] >= 'a' && src[4] <= 'z')
&& !(src[4] >= 'A' && src[4] <= 'Z')
&& !(src[4] >= '0' && src[4] <= '9') && src[4] != '_')
return true;
return false;
}
static bool nodeImpliesMoreThanOnePossibleValue(
const Ast* tree, uint32_t node) {
uint32_t cur = node;
while (1) {
AstNodeTag tag = tree->nodes.tags[cur];
switch (tag) {
// Pointer/optional/array/anyframe types → true
// (AstGen.zig:10718-10725)
case AST_NODE_PTR_TYPE_ALIGNED:
case AST_NODE_PTR_TYPE_SENTINEL:
case AST_NODE_PTR_TYPE:
case AST_NODE_PTR_TYPE_BIT_RANGE:
case AST_NODE_OPTIONAL_TYPE:
case AST_NODE_ANYFRAME_TYPE:
case AST_NODE_ARRAY_TYPE_SENTINEL:
return true;
// Forward to LHS: try, comptime, nosuspend
// (AstGen.zig:10710-10713)
case AST_NODE_TRY:
case AST_NODE_COMPTIME:
case AST_NODE_NOSUSPEND:
cur = tree->nodes.datas[cur].lhs;
continue;
// Forward to LHS: grouped_expression, unwrap_optional
// (AstGen.zig:10714-10716)
case AST_NODE_GROUPED_EXPRESSION:
case AST_NODE_UNWRAP_OPTIONAL:
cur = tree->nodes.datas[cur].lhs;
continue;
// Identifier: check primitives (AstGen.zig:10727-10780)
case AST_NODE_IDENTIFIER:
return identImpliesMoreThanOnePossibleValue(
tree, tree->nodes.main_tokens[cur]);
default:
return false;
}
}
}
// --- nodeImpliesComptimeOnly (AstGen.zig:10787) ---
static bool identImpliesComptimeOnly(const Ast* tree, uint32_t main_token) {
uint32_t start = tree->tokens.starts[main_token];
const char* src = tree->source + start;
// Only comptime_float, comptime_int, type → true
// (AstGen.zig:11010-11013)
if (src[0] == 'c' && src[1] == 'o' && src[2] == 'm' && src[3] == 'p'
&& src[4] == 't' && src[5] == 'i' && src[6] == 'm' && src[7] == 'e')
return true; // comptime_float, comptime_int
if (src[0] == 't' && src[1] == 'y' && src[2] == 'p' && src[3] == 'e'
&& !(src[4] >= 'a' && src[4] <= 'z')
&& !(src[4] >= 'A' && src[4] <= 'Z')
&& !(src[4] >= '0' && src[4] <= '9') && src[4] != '_')
return true;
return false;
}
static bool nodeImpliesComptimeOnly(const Ast* tree, uint32_t node) {
uint32_t cur = node;
while (1) {
AstNodeTag tag = tree->nodes.tags[cur];
switch (tag) {
// Function prototypes → true (AstGen.zig:10950-10955)
case AST_NODE_FN_PROTO_SIMPLE:
case AST_NODE_FN_PROTO_MULTI:
case AST_NODE_FN_PROTO_ONE:
case AST_NODE_FN_PROTO:
return true;
// Forward to LHS: try, comptime, nosuspend
case AST_NODE_TRY:
case AST_NODE_COMPTIME:
case AST_NODE_NOSUSPEND:
cur = tree->nodes.datas[cur].lhs;
continue;
case AST_NODE_GROUPED_EXPRESSION:
case AST_NODE_UNWRAP_OPTIONAL:
cur = tree->nodes.datas[cur].lhs;
continue;
// Identifier: check primitives
case AST_NODE_IDENTIFIER:
return identImpliesComptimeOnly(
tree, tree->nodes.main_tokens[cur]);
default:
return false;
}
}
}
// --- WipMembers (AstGen.zig:3989) ---
// Tracks decl indices, field bit-flags, and per-field data during container
// processing. All data lives in a single malloc'd array laid out as:
// [decls (decl_count)] [field_bits (ceil)] [fields (up to field_count*max)]
// Bodies are tracked separately in a dynamic array.
typedef struct {
uint32_t* payload; // malloc'd array
uint32_t payload_top; // always 0 (start of decls region)
uint32_t field_bits_start;
uint32_t fields_start;
uint32_t fields_end;
uint32_t decl_index;
uint32_t field_index;
// Bodies scratch: dynamically grown array for field type/align/init
// bodies.
uint32_t* bodies;
uint32_t bodies_len;
uint32_t bodies_cap;
} WipMembers;
static WipMembers wipMembersInit(uint32_t decl_count, uint32_t field_count) {
// bits_per_field = 4, max_field_size = 5
uint32_t fields_per_u32 = 8; // 32 / 4
uint32_t field_bits_start = decl_count;
uint32_t bit_words = field_count > 0
? (field_count + fields_per_u32 - 1) / fields_per_u32
: 0;
uint32_t fields_start = field_bits_start + bit_words;
uint32_t payload_end = fields_start + field_count * 5;
uint32_t alloc_size = payload_end > 0 ? payload_end : 1;
uint32_t* payload = calloc(alloc_size, sizeof(uint32_t));
if (!payload)
exit(1);
WipMembers wm;
memset(&wm, 0, sizeof(wm));
wm.payload = payload;
wm.payload_top = 0;
wm.field_bits_start = field_bits_start;
wm.fields_start = fields_start;
wm.fields_end = fields_start;
wm.decl_index = 0;
wm.field_index = 0;
wm.bodies = NULL;
wm.bodies_len = 0;
wm.bodies_cap = 0;
return wm;
}
static void wipMembersDeinit(WipMembers* wm) {
free(wm->payload);
free(wm->bodies);
}
static void wipMembersNextDecl(WipMembers* wm, uint32_t decl_inst) {
wm->payload[wm->payload_top + wm->decl_index] = decl_inst;
wm->decl_index++;
}
// bits_per_field = 4: bits[0]=have_align, bits[1]=have_value,
// bits[2]=is_comptime, bits[3]=have_type_body
static void wipMembersNextField(WipMembers* wm, bool bits[4]) {
uint32_t fields_per_u32 = 8; // 32 / 4
uint32_t index = wm->field_bits_start + wm->field_index / fields_per_u32;
uint32_t bit_bag
= (wm->field_index % fields_per_u32 == 0) ? 0 : wm->payload[index];
bit_bag >>= 4;
for (int i = 0; i < 4; i++) {
bit_bag |= ((uint32_t)(bits[i] ? 1 : 0)) << (32 - 4 + i);
}
wm->payload[index] = bit_bag;
wm->field_index++;
}
static void wipMembersAppendToField(WipMembers* wm, uint32_t data) {
wm->payload[wm->fields_end] = data;
wm->fields_end++;
}
static void wipMembersFinishBits(WipMembers* wm) {
uint32_t fields_per_u32 = 8; // 32 / 4
uint32_t empty_field_slots
= fields_per_u32 - (wm->field_index % fields_per_u32);
if (wm->field_index > 0 && empty_field_slots < fields_per_u32) {
uint32_t index
= wm->field_bits_start + wm->field_index / fields_per_u32;
wm->payload[index] >>= (empty_field_slots * 4);
}
}
// Returns pointer to decls region and its length.
static const uint32_t* wipMembersDeclsSlice(
const WipMembers* wm, uint32_t* out_len) {
*out_len = wm->decl_index;
return wm->payload + wm->payload_top;
}
// Returns pointer to fields region (field_bits + field_data) and its length.
static const uint32_t* wipMembersFieldsSlice(
const WipMembers* wm, uint32_t* out_len) {
*out_len = wm->fields_end - wm->field_bits_start;
return wm->payload + wm->field_bits_start;
}
// Append body instructions to the WipMembers bodies scratch.
static void wipMembersBodiesAppend(
WipMembers* wm, const uint32_t* data, uint32_t len) {
if (wm->bodies_len + len > wm->bodies_cap) {
uint32_t new_cap = wm->bodies_cap == 0 ? 64 : wm->bodies_cap * 2;
while (new_cap < wm->bodies_len + len)
new_cap *= 2;
wm->bodies = realloc(wm->bodies, new_cap * sizeof(uint32_t));
if (!wm->bodies)
exit(1);
wm->bodies_cap = new_cap;
}
memcpy(wm->bodies + wm->bodies_len, data, len * sizeof(uint32_t));
wm->bodies_len += len;
}
// Append body instructions with ref_table fixups to wm->bodies.
static void wipMembersBodiesAppendWithFixups(
WipMembers* wm, AstGenCtx* ag, const uint32_t* body, uint32_t body_len) {
for (uint32_t i = 0; i < body_len; i++) {
uint32_t inst = body[i];
// Grow if needed.
if (wm->bodies_len + 1 > wm->bodies_cap) {
uint32_t new_cap = wm->bodies_cap == 0 ? 64 : wm->bodies_cap * 2;
wm->bodies = realloc(wm->bodies, new_cap * sizeof(uint32_t));
if (!wm->bodies)
exit(1);
wm->bodies_cap = new_cap;
}
wm->bodies[wm->bodies_len++] = inst;
// Check for ref fixup.
uint32_t ref_inst;
while (refTableFetchRemove(ag, inst, &ref_inst)) {
if (wm->bodies_len + 1 > wm->bodies_cap) {
uint32_t new_cap = wm->bodies_cap * 2;
wm->bodies = realloc(wm->bodies, new_cap * sizeof(uint32_t));
if (!wm->bodies)
exit(1);
wm->bodies_cap = new_cap;
}
wm->bodies[wm->bodies_len++] = ref_inst;
inst = ref_inst;
}
}
}
// --- containerDecl (AstGen.zig:5468) ---
// Handles container declarations as expressions (struct{}, enum{}, etc.).
static uint32_t containerDecl(GenZir* gz, Scope* scope, uint32_t node) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
AstNodeTag tag = tree->nodes.tags[node];
AstData nd = tree->nodes.datas[node];
// Extract members based on node type (Ast.zig:2459-2470).
uint32_t members_buf[2];
const uint32_t* members;
uint32_t members_len;
switch (tag) {
case AST_NODE_CONTAINER_DECL_TWO:
case AST_NODE_CONTAINER_DECL_TWO_TRAILING:
case AST_NODE_TAGGED_UNION_TWO:
case AST_NODE_TAGGED_UNION_TWO_TRAILING: {
// lhs and rhs are optional member nodes (0 = none).
members_len = 0;
if (nd.lhs != 0)
members_buf[members_len++] = nd.lhs;
if (nd.rhs != 0)
members_buf[members_len++] = nd.rhs;
members = members_buf;
break;
}
case AST_NODE_CONTAINER_DECL:
case AST_NODE_CONTAINER_DECL_TRAILING:
case AST_NODE_TAGGED_UNION:
case AST_NODE_TAGGED_UNION_TRAILING: {
// extra_data[lhs..rhs] contains members.
members = tree->extra_data.arr + nd.lhs;
members_len = nd.rhs - nd.lhs;
break;
}
case AST_NODE_CONTAINER_DECL_ARG:
case AST_NODE_CONTAINER_DECL_ARG_TRAILING:
case AST_NODE_TAGGED_UNION_ENUM_TAG:
case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING: {
// lhs is arg node, rhs is extra index → SubRange(start, end).
uint32_t start = tree->extra_data.arr[nd.rhs];
uint32_t end = tree->extra_data.arr[nd.rhs + 1];
members = tree->extra_data.arr + start;
members_len = end - start;
break;
}
default:
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
// Save/clear fn_block for nested containers (AstGen.zig:5480-5482).
void* prev_fn_block = ag->fn_block;
ag->fn_block = NULL;
// Dispatch based on container keyword (AstGen.zig:5485-5536).
uint32_t main_token = tree->nodes.main_tokens[node];
TokenizerTag kw_tag = tree->tokens.tags[main_token];
uint32_t decl_inst;
switch (kw_tag) {
case TOKEN_KEYWORD_STRUCT:
decl_inst = structDeclInner(ag, gz, node, members, members_len);
break;
case TOKEN_KEYWORD_ENUM:
decl_inst = enumDeclInner(ag, gz, node, members, members_len);
break;
default:
// union/opaque: fall back to struct for now.
decl_inst = structDeclInner(ag, gz, node, members, members_len);
break;
}
(void)scope;
ag->fn_block = prev_fn_block;
return decl_inst + ZIR_REF_START_INDEX;
}
// --- EnumDecl.Small packing (Zir.zig EnumDecl.Small) ---
typedef struct {
bool has_tag_type;
bool has_captures_len;
bool has_body_len;
bool has_fields_len;
bool has_decls_len;
uint8_t name_strategy; // 2 bits
bool nonexhaustive;
} EnumDeclSmall;
static uint16_t packEnumDeclSmall(EnumDeclSmall s) {
uint16_t r = 0;
if (s.has_tag_type)
r |= (1u << 0);
if (s.has_captures_len)
r |= (1u << 1);
if (s.has_body_len)
r |= (1u << 2);
if (s.has_fields_len)
r |= (1u << 3);
if (s.has_decls_len)
r |= (1u << 4);
r |= (uint16_t)(s.name_strategy & 0x3u) << 5;
if (s.nonexhaustive)
r |= (1u << 7);
return r;
}
// Mirrors GenZir.setEnum (AstGen.zig:13080).
static void setEnum(AstGenCtx* ag, uint32_t inst, uint32_t src_node,
EnumDeclSmall small, uint32_t fields_len, uint32_t decls_len) {
ensureExtraCapacity(ag, 6 + 3);
uint32_t payload_index = ag->extra_len;
// fields_hash (4 words): zero-filled.
ag->extra[ag->extra_len++] = 0;
ag->extra[ag->extra_len++] = 0;
ag->extra[ag->extra_len++] = 0;
ag->extra[ag->extra_len++] = 0;
ag->extra[ag->extra_len++] = ag->source_line;
ag->extra[ag->extra_len++] = src_node;
if (small.has_fields_len)
ag->extra[ag->extra_len++] = fields_len;
if (small.has_decls_len)
ag->extra[ag->extra_len++] = decls_len;
ag->inst_tags[inst] = ZIR_INST_EXTENDED;
ZirInstData data;
memset(&data, 0, sizeof(data));
data.extended.opcode = (uint16_t)ZIR_EXT_ENUM_DECL;
data.extended.small = packEnumDeclSmall(small);
data.extended.operand = payload_index;
ag->inst_datas[inst] = data;
}
// --- enumDeclInner (AstGen.zig:5508) ---
static uint32_t enumDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
const uint32_t* members, uint32_t members_len) {
const Ast* tree = ag->tree;
uint32_t decl_inst = reserveInstructionIndex(ag);
gzAppendInstruction(gz, decl_inst);
if (members_len == 0) {
EnumDeclSmall small;
memset(&small, 0, sizeof(small));
setEnum(ag, decl_inst, node, small, 0, 0);
return decl_inst;
}
advanceSourceCursorToNode(ag, node);
uint32_t decl_count = scanContainer(ag, members, members_len);
uint32_t field_count = members_len - decl_count;
// Use WipMembers for decls and field data.
// Enum fields: 1 bit per field (has_value), max 2 words per field
// (name + value).
WipMembers wm = wipMembersInit(decl_count, field_count);
// Enum fields use 1 bit per field: has_value.
// We use the same WipMembers but with 1-bit fields.
// Actually, upstream uses bits_per_field=1, max_field_size=2.
// Re-init with correct params would be better but let's reuse.
// For simplicity: track field data manually.
uint32_t* field_names = NULL;
uint32_t field_names_len = 0;
uint32_t field_names_cap = 0;
for (uint32_t i = 0; i < members_len; i++) {
uint32_t member_node = members[i];
AstNodeTag mtag = tree->nodes.tags[member_node];
switch (mtag) {
case AST_NODE_COMPTIME:
comptimeDecl(ag, gz, wm.payload, &wm.decl_index, member_node);
break;
case AST_NODE_SIMPLE_VAR_DECL:
case AST_NODE_GLOBAL_VAR_DECL:
case AST_NODE_LOCAL_VAR_DECL:
case AST_NODE_ALIGNED_VAR_DECL:
globalVarDecl(ag, gz, wm.payload, &wm.decl_index, member_node);
break;
case AST_NODE_FN_DECL:
fnDecl(ag, gz, wm.payload, &wm.decl_index, member_node);
break;
case AST_NODE_TEST_DECL:
testDecl(ag, gz, wm.payload, &wm.decl_index, member_node);
break;
case AST_NODE_CONTAINER_FIELD_INIT:
case AST_NODE_CONTAINER_FIELD_ALIGN:
case AST_NODE_CONTAINER_FIELD: {
// Enum field: just a name (AstGen.zig:5617-5670).
uint32_t main_token = tree->nodes.main_tokens[member_node];
uint32_t field_name = identAsString(ag, main_token);
// Grow field_names array.
if (field_names_len >= field_names_cap) {
uint32_t new_cap
= field_names_cap == 0 ? 8 : field_names_cap * 2;
field_names = realloc(field_names, new_cap * sizeof(uint32_t));
if (!field_names)
exit(1);
field_names_cap = new_cap;
}
field_names[field_names_len++] = field_name;
break;
}
default:
SET_ERROR(ag);
break;
}
}
EnumDeclSmall small;
memset(&small, 0, sizeof(small));
small.has_fields_len = (field_count > 0);
small.has_decls_len = (decl_count > 0);
setEnum(ag, decl_inst, node, small, field_count, decl_count);
// Append: decls, field_bits, field_names (AstGen.zig:5724-5729).
uint32_t decls_len_out;
const uint32_t* decls_slice = wipMembersDeclsSlice(&wm, &decls_len_out);
// Field bits: 1 bit per field (has_value = false for simple enums).
uint32_t fields_per_u32 = 32;
uint32_t bit_words = field_count > 0
? (field_count + fields_per_u32 - 1) / fields_per_u32
: 0;
ensureExtraCapacity(ag, decls_len_out + bit_words + field_names_len);
for (uint32_t i = 0; i < decls_len_out; i++)
ag->extra[ag->extra_len++] = decls_slice[i];
// Field bits: all zero (no values).
for (uint32_t i = 0; i < bit_words; i++)
ag->extra[ag->extra_len++] = 0;
// Field names.
for (uint32_t i = 0; i < field_names_len; i++)
ag->extra[ag->extra_len++] = field_names[i];
free(field_names);
wipMembersDeinit(&wm);
return decl_inst;
}
// --- structDeclInner (AstGen.zig:4926) ---
static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
const uint32_t* members, uint32_t members_len) {
const Ast* tree = ag->tree;
uint32_t decl_inst = reserveInstructionIndex(ag);
gzAppendInstruction(gz, decl_inst);
// Fast path: no members, no backing int (AstGen.zig:4954-4970).
if (members_len == 0) {
StructDeclSmall small;
memset(&small, 0, sizeof(small));
setStruct(ag, decl_inst, node, small, 0, 0, 0);
return decl_inst;
}
// Non-empty container (AstGen.zig:4973-5189).
advanceSourceCursorToNode(ag, node);
uint32_t decl_count = scanContainer(ag, members, members_len);
uint32_t field_count = members_len - decl_count;
WipMembers wm = wipMembersInit(decl_count, field_count);
// Set up block_scope for field type/align/init expressions.
// (AstGen.zig:4983-4992)
GenZir block_scope;
memset(&block_scope, 0, sizeof(block_scope));
block_scope.base.tag = SCOPE_GEN_ZIR;
block_scope.parent = NULL;
block_scope.astgen = ag;
block_scope.decl_node_index = node;
block_scope.decl_line = ag->source_line;
block_scope.is_comptime = true;
block_scope.instructions_top = ag->scratch_inst_len;
bool known_non_opv = false;
bool known_comptime_only = false;
bool any_comptime_fields = false;
bool any_aligned_fields = false;
bool any_default_inits = false;
// Process each member (AstGen.zig:5060-5147).
for (uint32_t i = 0; i < members_len; i++) {
uint32_t member_node = members[i];
AstNodeTag mtag = tree->nodes.tags[member_node];
switch (mtag) {
case AST_NODE_COMPTIME:
comptimeDecl(ag, gz, wm.payload, &wm.decl_index, member_node);
break;
case AST_NODE_SIMPLE_VAR_DECL:
globalVarDecl(ag, gz, wm.payload, &wm.decl_index, member_node);
break;
case AST_NODE_TEST_DECL:
testDecl(ag, gz, wm.payload, &wm.decl_index, member_node);
break;
case AST_NODE_FN_DECL:
fnDecl(ag, gz, wm.payload, &wm.decl_index, member_node);
break;
case AST_NODE_USINGNAMESPACE:
case AST_NODE_GLOBAL_VAR_DECL:
case AST_NODE_LOCAL_VAR_DECL:
case AST_NODE_ALIGNED_VAR_DECL:
globalVarDecl(ag, gz, wm.payload, &wm.decl_index, member_node);
break;
case AST_NODE_CONTAINER_FIELD_INIT:
case AST_NODE_CONTAINER_FIELD_ALIGN:
case AST_NODE_CONTAINER_FIELD: {
// Extract field info from AST node (Ast.zig:1413-1454).
uint32_t main_token = tree->nodes.main_tokens[member_node];
AstData nd = tree->nodes.datas[member_node];
uint32_t type_node = nd.lhs;
uint32_t align_node = 0;
uint32_t value_node = 0;
bool has_comptime_token = false;
switch (mtag) {
case AST_NODE_CONTAINER_FIELD_INIT:
// lhs = type_expr, rhs = value_expr (optional, 0=none)
value_node = nd.rhs;
break;
case AST_NODE_CONTAINER_FIELD_ALIGN:
// lhs = type_expr, rhs = align_expr
align_node = nd.rhs;
break;
case AST_NODE_CONTAINER_FIELD:
// lhs = type_expr, rhs = extra index to {align, value}
if (nd.rhs != 0) {
align_node = tree->extra_data.arr[nd.rhs];
value_node = tree->extra_data.arr[nd.rhs + 1];
}
break;
default:
break;
}
// Check for comptime token preceding main_token
// (Ast.zig:2071-2082).
if (main_token > 0
&& tree->tokens.tags[main_token - 1]
== TOKEN_KEYWORD_COMPTIME) {
has_comptime_token = true;
}
// Field name (AstGen.zig:5080).
uint32_t field_name = identAsString(ag, main_token);
wipMembersAppendToField(&wm, field_name);
// Type expression (AstGen.zig:5089-5109).
bool have_type_body = false;
uint32_t field_type = 0;
if (type_node != 0) {
field_type
= typeExpr(&block_scope, &block_scope.base, type_node);
have_type_body = (gzInstructionsLen(&block_scope) > 0);
}
bool have_align = (align_node != 0);
bool have_value = (value_node != 0);
bool is_comptime = has_comptime_token;
if (is_comptime) {
any_comptime_fields = true;
} else {
// (AstGen.zig:5106-5109)
if (type_node != 0) {
known_non_opv = known_non_opv
|| nodeImpliesMoreThanOnePossibleValue(
tree, type_node);
known_comptime_only = known_comptime_only
|| nodeImpliesComptimeOnly(tree, type_node);
}
}
bool field_bits[4]
= { have_align, have_value, is_comptime, have_type_body };
wipMembersNextField(&wm, field_bits);
if (have_type_body) {
// Emit break_inline to carry the type value
// (AstGen.zig:5097-5099).
if (!endsWithNoReturn(&block_scope)) {
makeBreakInline(&block_scope, decl_inst, field_type,
AST_NODE_OFFSET_NONE);
}
uint32_t raw_len = gzInstructionsLen(&block_scope);
const uint32_t* body = gzInstructionsSlice(&block_scope);
uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_len);
uint32_t bodies_before = wm.bodies_len;
wipMembersBodiesAppendWithFixups(&wm, ag, body, raw_len);
(void)bodies_before;
wipMembersAppendToField(&wm, body_len);
// Reset block_scope.
ag->scratch_inst_len = block_scope.instructions_top;
} else {
wipMembersAppendToField(&wm, field_type);
}
if (have_align) {
any_aligned_fields = true;
uint32_t align_ref
= expr(&block_scope, &block_scope.base, align_node);
if (!endsWithNoReturn(&block_scope)) {
makeBreakInline(&block_scope, decl_inst, align_ref,
AST_NODE_OFFSET_NONE);
}
uint32_t raw_len = gzInstructionsLen(&block_scope);
const uint32_t* body = gzInstructionsSlice(&block_scope);
uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_len);
wipMembersBodiesAppendWithFixups(&wm, ag, body, raw_len);
wipMembersAppendToField(&wm, body_len);
ag->scratch_inst_len = block_scope.instructions_top;
}
if (have_value) {
any_default_inits = true;
uint32_t default_ref
= expr(&block_scope, &block_scope.base, value_node);
if (!endsWithNoReturn(&block_scope)) {
makeBreakInline(&block_scope, decl_inst, default_ref,
AST_NODE_OFFSET_NONE);
}
uint32_t raw_len = gzInstructionsLen(&block_scope);
const uint32_t* body = gzInstructionsSlice(&block_scope);
uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_len);
wipMembersBodiesAppendWithFixups(&wm, ag, body, raw_len);
wipMembersAppendToField(&wm, body_len);
ag->scratch_inst_len = block_scope.instructions_top;
}
break;
}
default:
SET_ERROR(ag);
break;
}
}
wipMembersFinishBits(&wm);
// setStruct (AstGen.zig:5152-5166).
StructDeclSmall small;
memset(&small, 0, sizeof(small));
small.has_decls_len = (decl_count > 0);
small.has_fields_len = (field_count > 0);
small.known_non_opv = known_non_opv;
small.known_comptime_only = known_comptime_only;
small.any_comptime_fields = any_comptime_fields;
small.any_default_inits = any_default_inits;
small.any_aligned_fields = any_aligned_fields;
setStruct(ag, decl_inst, node, small, 0, field_count, decl_count);
// Append: captures (none), backing_int (none), decls, fields, bodies
// (AstGen.zig:5176-5189).
uint32_t decls_len;
const uint32_t* decls_slice = wipMembersDeclsSlice(&wm, &decls_len);
uint32_t fields_len;
const uint32_t* fields_slice = wipMembersFieldsSlice(&wm, &fields_len);
ensureExtraCapacity(ag, decls_len + fields_len + wm.bodies_len);
for (uint32_t i = 0; i < decls_len; i++)
ag->extra[ag->extra_len++] = decls_slice[i];
for (uint32_t i = 0; i < fields_len; i++)
ag->extra[ag->extra_len++] = fields_slice[i];
for (uint32_t i = 0; i < wm.bodies_len; i++)
ag->extra[ag->extra_len++] = wm.bodies[i];
gzUnstack(&block_scope);
wipMembersDeinit(&wm);
return decl_inst;
}
// --- AstRlAnnotate (AstRlAnnotate.zig) ---
// Pre-pass to determine which AST nodes need result locations.
typedef struct {
bool have_type;
bool have_ptr;
} RlResultInfo;
#define RL_RI_NONE ((RlResultInfo) { false, false })
#define RL_RI_TYPED_PTR ((RlResultInfo) { true, true })
#define RL_RI_INFERRED_PTR ((RlResultInfo) { false, true })
#define RL_RI_TYPE_ONLY ((RlResultInfo) { true, false })
// Block for label tracking (AstRlAnnotate.zig:56-62).
typedef struct RlBlock {
struct RlBlock* parent;
uint32_t label_token; // UINT32_MAX = no label
bool is_loop;
RlResultInfo ri;
bool consumes_res_ptr;
} RlBlock;
static void nodesNeedRlAdd(AstGenCtx* ag, uint32_t node) {
if (ag->nodes_need_rl_len >= ag->nodes_need_rl_cap) {
uint32_t new_cap
= ag->nodes_need_rl_cap == 0 ? 16 : ag->nodes_need_rl_cap * 2;
ag->nodes_need_rl
= realloc(ag->nodes_need_rl, new_cap * sizeof(uint32_t));
ag->nodes_need_rl_cap = new_cap;
}
ag->nodes_need_rl[ag->nodes_need_rl_len++] = node;
}
static bool nodesNeedRlContains(const AstGenCtx* ag, uint32_t node) {
for (uint32_t i = 0; i < ag->nodes_need_rl_len; i++) {
if (ag->nodes_need_rl[i] == node)
return true;
}
return false;
}
// Compare two identifier tokens by their source text.
static bool rlTokenIdentEqual(
const Ast* tree, uint32_t tok_a, uint32_t tok_b) {
const char* src = tree->source;
uint32_t a_start = tree->tokens.starts[tok_a];
uint32_t b_start = tree->tokens.starts[tok_b];
for (uint32_t i = 0;; i++) {
char ca = src[a_start + i];
char cb = src[b_start + i];
bool a_id = (ca >= 'a' && ca <= 'z') || (ca >= 'A' && ca <= 'Z')
|| (ca >= '0' && ca <= '9') || ca == '_';
bool b_id = (cb >= 'a' && cb <= 'z') || (cb >= 'A' && cb <= 'Z')
|| (cb >= '0' && cb <= '9') || cb == '_';
if (!a_id && !b_id)
return true;
if (!a_id || !b_id)
return false;
if (ca != cb)
return false;
}
}
// Forward declarations.
static bool rlExpr(
AstGenCtx* ag, uint32_t node, RlBlock* block, RlResultInfo ri);
static void rlContainerDecl(AstGenCtx* ag, RlBlock* block, uint32_t node);
static bool rlBlockExpr(AstGenCtx* ag, RlBlock* parent_block, RlResultInfo ri,
uint32_t node, const uint32_t* stmts, uint32_t count);
static bool rlBuiltinCall(AstGenCtx* ag, RlBlock* block, uint32_t node,
const uint32_t* args, uint32_t nargs);
// containerDecl (AstRlAnnotate.zig:89-127).
static void rlContainerDecl(AstGenCtx* ag, RlBlock* block, uint32_t node) {
const Ast* tree = ag->tree;
AstNodeTag tag = tree->nodes.tags[node];
AstData nd = tree->nodes.datas[node];
// Extract arg and members depending on variant.
// All container decls: recurse arg with type_only, members with none.
// (The keyword type — struct/union/enum/opaque — doesn't matter for RL.)
uint32_t member_buf[2];
const uint32_t* members = NULL;
uint32_t members_len = 0;
uint32_t arg_node = 0; // 0 = no arg
switch (tag) {
case AST_NODE_CONTAINER_DECL_TWO:
case AST_NODE_CONTAINER_DECL_TWO_TRAILING:
case AST_NODE_TAGGED_UNION_TWO:
case AST_NODE_TAGGED_UNION_TWO_TRAILING: {
uint32_t idx = 0;
if (nd.lhs != 0)
member_buf[idx++] = nd.lhs;
if (nd.rhs != 0)
member_buf[idx++] = nd.rhs;
members = member_buf;
members_len = idx;
break;
}
case AST_NODE_CONTAINER_DECL:
case AST_NODE_CONTAINER_DECL_TRAILING:
case AST_NODE_TAGGED_UNION:
case AST_NODE_TAGGED_UNION_TRAILING:
members = tree->extra_data.arr + nd.lhs;
members_len = nd.rhs - nd.lhs;
break;
case AST_NODE_CONTAINER_DECL_ARG:
case AST_NODE_CONTAINER_DECL_ARG_TRAILING:
case AST_NODE_TAGGED_UNION_ENUM_TAG:
case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING: {
arg_node = nd.lhs;
uint32_t extra_idx = nd.rhs;
uint32_t start = tree->extra_data.arr[extra_idx];
uint32_t end = tree->extra_data.arr[extra_idx + 1];
members = tree->extra_data.arr + start;
members_len = end - start;
break;
}
default:
return;
}
if (arg_node != 0)
(void)rlExpr(ag, arg_node, block, RL_RI_TYPE_ONLY);
for (uint32_t i = 0; i < members_len; i++)
(void)rlExpr(ag, members[i], block, RL_RI_NONE);
}
// blockExpr (AstRlAnnotate.zig:787-814).
static bool rlBlockExpr(AstGenCtx* ag, RlBlock* parent_block, RlResultInfo ri,
uint32_t node, const uint32_t* stmts, uint32_t count) {
const Ast* tree = ag->tree;
uint32_t lbrace = tree->nodes.main_tokens[node];
bool is_labeled
= (lbrace >= 2 && tree->tokens.tags[lbrace - 1] == TOKEN_COLON
&& tree->tokens.tags[lbrace - 2] == TOKEN_IDENTIFIER);
if (is_labeled) {
RlBlock new_block;
new_block.parent = parent_block;
new_block.label_token = lbrace - 2;
new_block.is_loop = false;
new_block.ri = ri;
new_block.consumes_res_ptr = false;
for (uint32_t i = 0; i < count; i++)
(void)rlExpr(ag, stmts[i], &new_block, RL_RI_NONE);
if (new_block.consumes_res_ptr)
nodesNeedRlAdd(ag, node);
return new_block.consumes_res_ptr;
} else {
for (uint32_t i = 0; i < count; i++)
(void)rlExpr(ag, stmts[i], parent_block, RL_RI_NONE);
return false;
}
}
// builtinCall (AstRlAnnotate.zig:816-1100).
// Simplified: no builtin currently consumes its result location,
// so we just recurse into all args with RL_RI_NONE.
static bool rlBuiltinCall(AstGenCtx* ag, RlBlock* block, uint32_t node,
const uint32_t* args, uint32_t nargs) {
(void)node;
for (uint32_t i = 0; i < nargs; i++)
(void)rlExpr(ag, args[i], block, RL_RI_NONE);
return false;
}
// expr (AstRlAnnotate.zig:130-771).
static bool rlExpr(
AstGenCtx* ag, uint32_t node, RlBlock* block, RlResultInfo ri) {
const Ast* tree = ag->tree;
AstNodeTag tag = tree->nodes.tags[node];
AstData nd = tree->nodes.datas[node];
switch (tag) {
// Unreachable nodes (AstRlAnnotate.zig:133-142).
case AST_NODE_ROOT:
case AST_NODE_SWITCH_CASE_ONE:
case AST_NODE_SWITCH_CASE_INLINE_ONE:
case AST_NODE_SWITCH_CASE:
case AST_NODE_SWITCH_CASE_INLINE:
case AST_NODE_SWITCH_RANGE:
case AST_NODE_FOR_RANGE:
case AST_NODE_ASM_OUTPUT:
case AST_NODE_ASM_INPUT:
return false; // unreachable in upstream
// errdefer (AstRlAnnotate.zig:144-147).
case AST_NODE_ERRDEFER:
(void)rlExpr(ag, nd.rhs, block, RL_RI_NONE);
return false;
// defer (AstRlAnnotate.zig:148-151).
case AST_NODE_DEFER:
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
return false;
// container_field (AstRlAnnotate.zig:153-167).
case AST_NODE_CONTAINER_FIELD_INIT: {
// lhs = type_expr, rhs = value_expr
if (nd.lhs != 0)
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
if (nd.rhs != 0)
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
return false;
}
case AST_NODE_CONTAINER_FIELD_ALIGN: {
// lhs = type_expr, rhs = align_expr
if (nd.lhs != 0)
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
if (nd.rhs != 0)
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
return false;
}
case AST_NODE_CONTAINER_FIELD: {
// lhs = type_expr, rhs = extra index to {align_expr, value_expr}
if (nd.lhs != 0)
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
if (nd.rhs != 0) {
uint32_t align_node = tree->extra_data.arr[nd.rhs];
uint32_t value_node = tree->extra_data.arr[nd.rhs + 1];
if (align_node != 0)
(void)rlExpr(ag, align_node, block, RL_RI_TYPE_ONLY);
if (value_node != 0)
(void)rlExpr(ag, value_node, block, RL_RI_TYPE_ONLY);
}
return false;
}
// test_decl (AstRlAnnotate.zig:168-171).
case AST_NODE_TEST_DECL:
(void)rlExpr(ag, nd.rhs, block, RL_RI_NONE);
return false;
// var_decl (AstRlAnnotate.zig:172-202).
case AST_NODE_GLOBAL_VAR_DECL:
case AST_NODE_LOCAL_VAR_DECL:
case AST_NODE_SIMPLE_VAR_DECL:
case AST_NODE_ALIGNED_VAR_DECL: {
uint32_t type_node = 0;
uint32_t init_node = 0;
uint32_t mut_token = tree->nodes.main_tokens[node];
if (tag == AST_NODE_SIMPLE_VAR_DECL) {
type_node = nd.lhs;
init_node = nd.rhs;
} else if (tag == AST_NODE_LOCAL_VAR_DECL
|| tag == AST_NODE_GLOBAL_VAR_DECL) {
type_node = tree->extra_data.arr[nd.lhs];
init_node = nd.rhs;
} else { // ALIGNED_VAR_DECL
init_node = nd.rhs;
}
RlResultInfo init_ri;
if (type_node != 0) {
(void)rlExpr(ag, type_node, block, RL_RI_TYPE_ONLY);
init_ri = RL_RI_TYPED_PTR;
} else {
init_ri = RL_RI_INFERRED_PTR;
}
if (init_node == 0)
return false;
bool is_const = (tree->source[tree->tokens.starts[mut_token]] == 'c');
if (is_const) {
bool init_consumes_rl = rlExpr(ag, init_node, block, init_ri);
if (init_consumes_rl)
nodesNeedRlAdd(ag, node);
return false;
} else {
(void)rlExpr(ag, init_node, block, init_ri);
return false;
}
}
// assign (AstRlAnnotate.zig:212-217).
case AST_NODE_ASSIGN:
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPED_PTR);
return false;
// compound assign (AstRlAnnotate.zig:218-240).
case AST_NODE_ASSIGN_SHL:
case AST_NODE_ASSIGN_SHL_SAT:
case AST_NODE_ASSIGN_SHR:
case AST_NODE_ASSIGN_BIT_AND:
case AST_NODE_ASSIGN_BIT_OR:
case AST_NODE_ASSIGN_BIT_XOR:
case AST_NODE_ASSIGN_DIV:
case AST_NODE_ASSIGN_SUB:
case AST_NODE_ASSIGN_SUB_WRAP:
case AST_NODE_ASSIGN_SUB_SAT:
case AST_NODE_ASSIGN_MOD:
case AST_NODE_ASSIGN_ADD:
case AST_NODE_ASSIGN_ADD_WRAP:
case AST_NODE_ASSIGN_ADD_SAT:
case AST_NODE_ASSIGN_MUL:
case AST_NODE_ASSIGN_MUL_WRAP:
case AST_NODE_ASSIGN_MUL_SAT:
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
(void)rlExpr(ag, nd.rhs, block, RL_RI_NONE);
return false;
// shl/shr (AstRlAnnotate.zig:241-246).
case AST_NODE_SHL:
case AST_NODE_SHR:
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
return false;
// binary arithmetic/comparison (AstRlAnnotate.zig:247-274).
case AST_NODE_ADD:
case AST_NODE_ADD_WRAP:
case AST_NODE_ADD_SAT:
case AST_NODE_SUB:
case AST_NODE_SUB_WRAP:
case AST_NODE_SUB_SAT:
case AST_NODE_MUL:
case AST_NODE_MUL_WRAP:
case AST_NODE_MUL_SAT:
case AST_NODE_DIV:
case AST_NODE_MOD:
case AST_NODE_SHL_SAT:
case AST_NODE_BIT_AND:
case AST_NODE_BIT_OR:
case AST_NODE_BIT_XOR:
case AST_NODE_BANG_EQUAL:
case AST_NODE_EQUAL_EQUAL:
case AST_NODE_GREATER_THAN:
case AST_NODE_GREATER_OR_EQUAL:
case AST_NODE_LESS_THAN:
case AST_NODE_LESS_OR_EQUAL:
case AST_NODE_ARRAY_CAT:
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
(void)rlExpr(ag, nd.rhs, block, RL_RI_NONE);
return false;
// array_mult (AstRlAnnotate.zig:276-281).
case AST_NODE_ARRAY_MULT:
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
return false;
// error_union, merge_error_sets (AstRlAnnotate.zig:282-287).
case AST_NODE_ERROR_UNION:
case AST_NODE_MERGE_ERROR_SETS:
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
(void)rlExpr(ag, nd.rhs, block, RL_RI_NONE);
return false;
// bool_and, bool_or (AstRlAnnotate.zig:288-295).
case AST_NODE_BOOL_AND:
case AST_NODE_BOOL_OR:
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
return false;
// bool_not (AstRlAnnotate.zig:296-299).
case AST_NODE_BOOL_NOT:
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
return false;
// bit_not, negation, negation_wrap (AstRlAnnotate.zig:300-303).
case AST_NODE_BIT_NOT:
case AST_NODE_NEGATION:
case AST_NODE_NEGATION_WRAP:
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
return false;
// Leaves (AstRlAnnotate.zig:305-320).
case AST_NODE_IDENTIFIER:
case AST_NODE_STRING_LITERAL:
case AST_NODE_MULTILINE_STRING_LITERAL:
case AST_NODE_NUMBER_LITERAL:
case AST_NODE_UNREACHABLE_LITERAL:
case AST_NODE_ASM_SIMPLE:
case AST_NODE_ASM:
case AST_NODE_ASM_LEGACY:
case AST_NODE_ENUM_LITERAL:
case AST_NODE_ERROR_VALUE:
case AST_NODE_ANYFRAME_LITERAL:
case AST_NODE_CONTINUE:
case AST_NODE_CHAR_LITERAL:
case AST_NODE_ERROR_SET_DECL:
return false;
// builtin_call (AstRlAnnotate.zig:322-330).
case AST_NODE_BUILTIN_CALL_TWO:
case AST_NODE_BUILTIN_CALL_TWO_COMMA: {
uint32_t args[2];
uint32_t nargs = 0;
if (nd.lhs != 0)
args[nargs++] = nd.lhs;
if (nd.rhs != 0)
args[nargs++] = nd.rhs;
return rlBuiltinCall(ag, block, node, args, nargs);
}
case AST_NODE_BUILTIN_CALL:
case AST_NODE_BUILTIN_CALL_COMMA: {
uint32_t start = nd.lhs;
uint32_t end = nd.rhs;
return rlBuiltinCall(
ag, block, node, tree->extra_data.arr + start, end - start);
}
// call (AstRlAnnotate.zig:332-351).
case AST_NODE_CALL_ONE:
case AST_NODE_CALL_ONE_COMMA: {
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
if (nd.rhs != 0)
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
return false;
}
case AST_NODE_CALL:
case AST_NODE_CALL_COMMA: {
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
uint32_t start = tree->extra_data.arr[nd.rhs];
uint32_t end = tree->extra_data.arr[nd.rhs + 1];
for (uint32_t i = start; i < end; i++)
(void)rlExpr(ag, tree->extra_data.arr[i], block, RL_RI_TYPE_ONLY);
return false;
}
// return (AstRlAnnotate.zig:353-361).
case AST_NODE_RETURN:
if (nd.lhs != 0) {
bool ret_consumes_rl = rlExpr(ag, nd.lhs, block, RL_RI_TYPED_PTR);
if (ret_consumes_rl)
nodesNeedRlAdd(ag, node);
}
return false;
// field_access (AstRlAnnotate.zig:363-367).
case AST_NODE_FIELD_ACCESS:
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
return false;
// if_simple, if (AstRlAnnotate.zig:369-387).
case AST_NODE_IF_SIMPLE:
case AST_NODE_IF: {
uint32_t cond_node = nd.lhs;
uint32_t then_node, else_node = 0;
if (tag == AST_NODE_IF_SIMPLE) {
then_node = nd.rhs;
} else {
then_node = tree->extra_data.arr[nd.rhs];
else_node = tree->extra_data.arr[nd.rhs + 1];
}
// Detect payload/error token.
uint32_t last_cond_tok = lastToken(tree, cond_node);
uint32_t pipe_tok = last_cond_tok + 2;
bool has_payload = (pipe_tok < tree->tokens.len
&& tree->tokens.tags[pipe_tok] == TOKEN_PIPE);
bool has_error = false;
if (else_node != 0) {
uint32_t else_tok = lastToken(tree, then_node) + 1;
has_error = (else_tok + 1 < tree->tokens.len
&& tree->tokens.tags[else_tok + 1] == TOKEN_PIPE);
}
if (has_error || has_payload)
(void)rlExpr(ag, cond_node, block, RL_RI_NONE);
else
(void)rlExpr(ag, cond_node, block, RL_RI_TYPE_ONLY);
if (else_node != 0) {
bool then_uses = rlExpr(ag, then_node, block, ri);
bool else_uses = rlExpr(ag, else_node, block, ri);
bool uses_rl = then_uses || else_uses;
if (uses_rl)
nodesNeedRlAdd(ag, node);
return uses_rl;
} else {
(void)rlExpr(ag, then_node, block, RL_RI_NONE);
return false;
}
}
// while (AstRlAnnotate.zig:389-419).
case AST_NODE_WHILE_SIMPLE:
case AST_NODE_WHILE_CONT:
case AST_NODE_WHILE: {
uint32_t cond_node = nd.lhs;
uint32_t body_node, cont_node = 0, else_node = 0;
if (tag == AST_NODE_WHILE_SIMPLE) {
body_node = nd.rhs;
} else if (tag == AST_NODE_WHILE_CONT) {
cont_node = tree->extra_data.arr[nd.rhs];
body_node = tree->extra_data.arr[nd.rhs + 1];
} else {
cont_node = tree->extra_data.arr[nd.rhs];
body_node = tree->extra_data.arr[nd.rhs + 1];
else_node = tree->extra_data.arr[nd.rhs + 2];
}
uint32_t main_tok = tree->nodes.main_tokens[node];
bool is_labeled
= (main_tok >= 2 && tree->tokens.tags[main_tok - 1] == TOKEN_COLON
&& tree->tokens.tags[main_tok - 2] == TOKEN_IDENTIFIER);
uint32_t label_token = is_labeled ? main_tok - 2 : UINT32_MAX;
// Detect payload/error.
uint32_t last_cond_tok = lastToken(tree, cond_node);
uint32_t pipe_tok = last_cond_tok + 2;
bool has_payload = (pipe_tok < tree->tokens.len
&& tree->tokens.tags[pipe_tok] == TOKEN_PIPE);
// Error token detection for while: check for else |err|.
bool has_error = false;
if (else_node != 0) {
uint32_t else_tok = lastToken(tree, body_node) + 1;
has_error = (else_tok + 1 < tree->tokens.len
&& tree->tokens.tags[else_tok + 1] == TOKEN_PIPE);
}
if (has_error || has_payload)
(void)rlExpr(ag, cond_node, block, RL_RI_NONE);
else
(void)rlExpr(ag, cond_node, block, RL_RI_TYPE_ONLY);
RlBlock new_block;
new_block.parent = block;
new_block.label_token = label_token;
new_block.is_loop = true;
new_block.ri = ri;
new_block.consumes_res_ptr = false;
if (cont_node != 0)
(void)rlExpr(ag, cont_node, &new_block, RL_RI_NONE);
(void)rlExpr(ag, body_node, &new_block, RL_RI_NONE);
bool else_consumes = false;
if (else_node != 0)
else_consumes = rlExpr(ag, else_node, block, ri);
if (new_block.consumes_res_ptr || else_consumes) {
nodesNeedRlAdd(ag, node);
return true;
}
return false;
}
// for (AstRlAnnotate.zig:421-454).
case AST_NODE_FOR_SIMPLE:
case AST_NODE_FOR: {
uint32_t input_buf[16];
const uint32_t* inputs = NULL;
uint32_t num_inputs = 0;
uint32_t body_node = 0;
uint32_t else_node = 0;
if (tag == AST_NODE_FOR_SIMPLE) {
input_buf[0] = nd.lhs;
inputs = input_buf;
num_inputs = 1;
body_node = nd.rhs;
} else {
AstFor for_data;
memcpy(&for_data, &nd.rhs, sizeof(AstFor));
num_inputs = for_data.inputs;
if (num_inputs > 16)
num_inputs = 16;
for (uint32_t i = 0; i < num_inputs; i++)
input_buf[i] = tree->extra_data.arr[nd.lhs + i];
inputs = input_buf;
body_node = tree->extra_data.arr[nd.lhs + num_inputs];
if (for_data.has_else)
else_node = tree->extra_data.arr[nd.lhs + num_inputs + 1];
}
uint32_t main_tok = tree->nodes.main_tokens[node];
bool is_labeled
= (main_tok >= 2 && tree->tokens.tags[main_tok - 1] == TOKEN_COLON
&& tree->tokens.tags[main_tok - 2] == TOKEN_IDENTIFIER);
uint32_t label_token = is_labeled ? main_tok - 2 : UINT32_MAX;
for (uint32_t i = 0; i < num_inputs; i++) {
uint32_t input = inputs[i];
if (tree->nodes.tags[input] == AST_NODE_FOR_RANGE) {
AstData range_nd = tree->nodes.datas[input];
(void)rlExpr(ag, range_nd.lhs, block, RL_RI_TYPE_ONLY);
if (range_nd.rhs != 0)
(void)rlExpr(ag, range_nd.rhs, block, RL_RI_TYPE_ONLY);
} else {
(void)rlExpr(ag, input, block, RL_RI_NONE);
}
}
RlBlock new_block;
new_block.parent = block;
new_block.label_token = label_token;
new_block.is_loop = true;
new_block.ri = ri;
new_block.consumes_res_ptr = false;
(void)rlExpr(ag, body_node, &new_block, RL_RI_NONE);
bool else_consumes = false;
if (else_node != 0)
else_consumes = rlExpr(ag, else_node, block, ri);
if (new_block.consumes_res_ptr || else_consumes) {
nodesNeedRlAdd(ag, node);
return true;
}
return false;
}
// slice (AstRlAnnotate.zig:456-480).
case AST_NODE_SLICE_OPEN:
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
return false;
case AST_NODE_SLICE: {
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
uint32_t start = tree->extra_data.arr[nd.rhs];
uint32_t end = tree->extra_data.arr[nd.rhs + 1];
(void)rlExpr(ag, start, block, RL_RI_TYPE_ONLY);
(void)rlExpr(ag, end, block, RL_RI_TYPE_ONLY);
return false;
}
case AST_NODE_SLICE_SENTINEL: {
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
AstSliceSentinel ss;
ss.start = tree->extra_data.arr[nd.rhs];
ss.end = tree->extra_data.arr[nd.rhs + 1];
ss.sentinel = tree->extra_data.arr[nd.rhs + 2];
(void)rlExpr(ag, ss.start, block, RL_RI_TYPE_ONLY);
if (ss.end != 0)
(void)rlExpr(ag, ss.end, block, RL_RI_TYPE_ONLY);
(void)rlExpr(ag, ss.sentinel, block, RL_RI_NONE);
return false;
}
// deref (AstRlAnnotate.zig:481-484).
case AST_NODE_DEREF:
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
return false;
// address_of (AstRlAnnotate.zig:485-488).
case AST_NODE_ADDRESS_OF:
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
return false;
// optional_type (AstRlAnnotate.zig:489-492).
case AST_NODE_OPTIONAL_TYPE:
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
return false;
// try, nosuspend (AstRlAnnotate.zig:493-495).
case AST_NODE_TRY:
case AST_NODE_NOSUSPEND:
return rlExpr(ag, nd.lhs, block, ri);
// grouped_expression, unwrap_optional (AstRlAnnotate.zig:496-498).
case AST_NODE_GROUPED_EXPRESSION:
case AST_NODE_UNWRAP_OPTIONAL:
return rlExpr(ag, nd.lhs, block, ri);
// block (AstRlAnnotate.zig:500-508).
case AST_NODE_BLOCK_TWO:
case AST_NODE_BLOCK_TWO_SEMICOLON: {
uint32_t stmts[2];
uint32_t count = 0;
if (nd.lhs != 0)
stmts[count++] = nd.lhs;
if (nd.rhs != 0)
stmts[count++] = nd.rhs;
return rlBlockExpr(ag, block, ri, node, stmts, count);
}
case AST_NODE_BLOCK:
case AST_NODE_BLOCK_SEMICOLON:
return rlBlockExpr(ag, block, ri, node, tree->extra_data.arr + nd.lhs,
nd.rhs - nd.lhs);
// anyframe_type (AstRlAnnotate.zig:509-513).
case AST_NODE_ANYFRAME_TYPE:
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
return false;
// catch/orelse (AstRlAnnotate.zig:514-522).
case AST_NODE_CATCH:
case AST_NODE_ORELSE: {
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
bool rhs_consumes = rlExpr(ag, nd.rhs, block, ri);
if (rhs_consumes)
nodesNeedRlAdd(ag, node);
return rhs_consumes;
}
// ptr_type (AstRlAnnotate.zig:524-546).
case AST_NODE_PTR_TYPE_ALIGNED:
if (nd.lhs != 0)
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
return false;
case AST_NODE_PTR_TYPE_SENTINEL:
if (nd.lhs != 0)
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
return false;
case AST_NODE_PTR_TYPE: {
AstPtrType pt;
pt.sentinel = tree->extra_data.arr[nd.lhs];
pt.align_node = tree->extra_data.arr[nd.lhs + 1];
pt.addrspace_node = tree->extra_data.arr[nd.lhs + 2];
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
if (pt.sentinel != 0)
(void)rlExpr(ag, pt.sentinel, block, RL_RI_TYPE_ONLY);
if (pt.align_node != 0)
(void)rlExpr(ag, pt.align_node, block, RL_RI_TYPE_ONLY);
if (pt.addrspace_node != 0)
(void)rlExpr(ag, pt.addrspace_node, block, RL_RI_TYPE_ONLY);
return false;
}
case AST_NODE_PTR_TYPE_BIT_RANGE: {
AstPtrTypeBitRange pt;
pt.sentinel = tree->extra_data.arr[nd.lhs];
pt.align_node = tree->extra_data.arr[nd.lhs + 1];
pt.addrspace_node = tree->extra_data.arr[nd.lhs + 2];
pt.bit_range_start = tree->extra_data.arr[nd.lhs + 3];
pt.bit_range_end = tree->extra_data.arr[nd.lhs + 4];
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
if (pt.sentinel != 0)
(void)rlExpr(ag, pt.sentinel, block, RL_RI_TYPE_ONLY);
if (pt.align_node != 0)
(void)rlExpr(ag, pt.align_node, block, RL_RI_TYPE_ONLY);
if (pt.addrspace_node != 0)
(void)rlExpr(ag, pt.addrspace_node, block, RL_RI_TYPE_ONLY);
if (pt.bit_range_start != 0) {
(void)rlExpr(ag, pt.bit_range_start, block, RL_RI_TYPE_ONLY);
(void)rlExpr(ag, pt.bit_range_end, block, RL_RI_TYPE_ONLY);
}
return false;
}
// container_decl (AstRlAnnotate.zig:548-564).
case AST_NODE_CONTAINER_DECL:
case AST_NODE_CONTAINER_DECL_TRAILING:
case AST_NODE_CONTAINER_DECL_ARG:
case AST_NODE_CONTAINER_DECL_ARG_TRAILING:
case AST_NODE_CONTAINER_DECL_TWO:
case AST_NODE_CONTAINER_DECL_TWO_TRAILING:
case AST_NODE_TAGGED_UNION:
case AST_NODE_TAGGED_UNION_TRAILING:
case AST_NODE_TAGGED_UNION_ENUM_TAG:
case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING:
case AST_NODE_TAGGED_UNION_TWO:
case AST_NODE_TAGGED_UNION_TWO_TRAILING:
rlContainerDecl(ag, block, node);
return false;
// break (AstRlAnnotate.zig:566-596).
case AST_NODE_BREAK: {
uint32_t opt_label_tok = nd.lhs; // 0 = no label
uint32_t rhs_node = nd.rhs; // 0 = void break
if (rhs_node == 0)
return false;
RlBlock* opt_cur_block = block;
if (opt_label_tok != 0) {
// Labeled break: find matching block.
while (opt_cur_block != NULL) {
if (opt_cur_block->label_token != UINT32_MAX
&& rlTokenIdentEqual(
tree, opt_cur_block->label_token, opt_label_tok))
break;
opt_cur_block = opt_cur_block->parent;
}
} else {
// No label: breaking from innermost loop.
while (opt_cur_block != NULL) {
if (opt_cur_block->is_loop)
break;
opt_cur_block = opt_cur_block->parent;
}
}
if (opt_cur_block != NULL) {
bool consumes = rlExpr(ag, rhs_node, block, opt_cur_block->ri);
if (consumes)
opt_cur_block->consumes_res_ptr = true;
} else {
(void)rlExpr(ag, rhs_node, block, RL_RI_NONE);
}
return false;
}
// array_type (AstRlAnnotate.zig:598-611).
case AST_NODE_ARRAY_TYPE:
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
return false;
case AST_NODE_ARRAY_TYPE_SENTINEL: {
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
uint32_t elem_type = tree->extra_data.arr[nd.rhs + 1];
uint32_t sentinel = tree->extra_data.arr[nd.rhs];
(void)rlExpr(ag, elem_type, block, RL_RI_TYPE_ONLY);
(void)rlExpr(ag, sentinel, block, RL_RI_TYPE_ONLY);
return false;
}
// array_access (AstRlAnnotate.zig:612-617).
case AST_NODE_ARRAY_ACCESS:
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
return false;
// comptime (AstRlAnnotate.zig:618-623).
case AST_NODE_COMPTIME:
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
return false;
// switch (AstRlAnnotate.zig:624-650).
case AST_NODE_SWITCH:
case AST_NODE_SWITCH_COMMA: {
uint32_t cond_node = nd.lhs;
uint32_t extra_idx = nd.rhs;
uint32_t cases_start = tree->extra_data.arr[extra_idx];
uint32_t cases_end = tree->extra_data.arr[extra_idx + 1];
(void)rlExpr(ag, cond_node, block, RL_RI_NONE);
bool any_consumed = false;
for (uint32_t ci = cases_start; ci < cases_end; ci++) {
uint32_t case_node = tree->extra_data.arr[ci];
AstNodeTag ct = tree->nodes.tags[case_node];
AstData cd = tree->nodes.datas[case_node];
// Process case values.
if (ct == AST_NODE_SWITCH_CASE_ONE
|| ct == AST_NODE_SWITCH_CASE_INLINE_ONE) {
if (cd.lhs != 0) {
if (tree->nodes.tags[cd.lhs] == AST_NODE_SWITCH_RANGE) {
AstData rd = tree->nodes.datas[cd.lhs];
(void)rlExpr(ag, rd.lhs, block, RL_RI_NONE);
(void)rlExpr(ag, rd.rhs, block, RL_RI_NONE);
} else {
(void)rlExpr(ag, cd.lhs, block, RL_RI_NONE);
}
}
} else {
// SWITCH_CASE / SWITCH_CASE_INLINE: SubRange[lhs]
uint32_t items_start = tree->extra_data.arr[cd.lhs];
uint32_t items_end = tree->extra_data.arr[cd.lhs + 1];
for (uint32_t ii = items_start; ii < items_end; ii++) {
uint32_t item = tree->extra_data.arr[ii];
if (tree->nodes.tags[item] == AST_NODE_SWITCH_RANGE) {
AstData rd = tree->nodes.datas[item];
(void)rlExpr(ag, rd.lhs, block, RL_RI_NONE);
(void)rlExpr(ag, rd.rhs, block, RL_RI_NONE);
} else {
(void)rlExpr(ag, item, block, RL_RI_NONE);
}
}
}
// Process case target expr.
if (rlExpr(ag, cd.rhs, block, ri))
any_consumed = true;
}
if (any_consumed)
nodesNeedRlAdd(ag, node);
return any_consumed;
}
// suspend (AstRlAnnotate.zig:651-654).
case AST_NODE_SUSPEND:
if (nd.lhs != 0)
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
return false;
// resume (AstRlAnnotate.zig:655-658).
case AST_NODE_RESUME:
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
return false;
// array_init (AstRlAnnotate.zig:660-695).
case AST_NODE_ARRAY_INIT_ONE:
case AST_NODE_ARRAY_INIT_ONE_COMMA:
case AST_NODE_ARRAY_INIT_DOT_TWO:
case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA:
case AST_NODE_ARRAY_INIT_DOT:
case AST_NODE_ARRAY_INIT_DOT_COMMA:
case AST_NODE_ARRAY_INIT:
case AST_NODE_ARRAY_INIT_COMMA: {
// Extract type_expr and elements.
uint32_t type_expr = 0;
uint32_t elem_buf[2];
const uint32_t* elems = NULL;
uint32_t nelem = 0;
switch (tag) {
case AST_NODE_ARRAY_INIT_ONE:
case AST_NODE_ARRAY_INIT_ONE_COMMA:
type_expr = nd.lhs;
if (nd.rhs != 0) {
elem_buf[0] = nd.rhs;
elems = elem_buf;
nelem = 1;
}
break;
case AST_NODE_ARRAY_INIT_DOT_TWO:
case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA: {
uint32_t idx = 0;
if (nd.lhs != 0)
elem_buf[idx++] = nd.lhs;
if (nd.rhs != 0)
elem_buf[idx++] = nd.rhs;
elems = elem_buf;
nelem = idx;
break;
}
case AST_NODE_ARRAY_INIT_DOT:
case AST_NODE_ARRAY_INIT_DOT_COMMA:
elems = tree->extra_data.arr + nd.lhs;
nelem = nd.rhs - nd.lhs;
break;
case AST_NODE_ARRAY_INIT:
case AST_NODE_ARRAY_INIT_COMMA: {
type_expr = nd.lhs;
uint32_t start = tree->extra_data.arr[nd.rhs];
uint32_t end = tree->extra_data.arr[nd.rhs + 1];
elems = tree->extra_data.arr + start;
nelem = end - start;
break;
}
default:
break;
}
if (type_expr != 0) {
(void)rlExpr(ag, type_expr, block, RL_RI_NONE);
for (uint32_t i = 0; i < nelem; i++)
(void)rlExpr(ag, elems[i], block, RL_RI_TYPE_ONLY);
return false;
}
if (ri.have_type) {
for (uint32_t i = 0; i < nelem; i++)
(void)rlExpr(ag, elems[i], block, ri);
return ri.have_ptr;
} else {
for (uint32_t i = 0; i < nelem; i++)
(void)rlExpr(ag, elems[i], block, RL_RI_NONE);
return false;
}
}
// struct_init (AstRlAnnotate.zig:697-732).
case AST_NODE_STRUCT_INIT_ONE:
case AST_NODE_STRUCT_INIT_ONE_COMMA:
case AST_NODE_STRUCT_INIT_DOT_TWO:
case AST_NODE_STRUCT_INIT_DOT_TWO_COMMA:
case AST_NODE_STRUCT_INIT_DOT:
case AST_NODE_STRUCT_INIT_DOT_COMMA:
case AST_NODE_STRUCT_INIT:
case AST_NODE_STRUCT_INIT_COMMA: {
uint32_t type_expr = 0;
uint32_t field_buf[2];
const uint32_t* fields = NULL;
uint32_t nfields = 0;
switch (tag) {
case AST_NODE_STRUCT_INIT_ONE:
case AST_NODE_STRUCT_INIT_ONE_COMMA:
type_expr = nd.lhs;
if (nd.rhs != 0) {
field_buf[0] = nd.rhs;
fields = field_buf;
nfields = 1;
}
break;
case AST_NODE_STRUCT_INIT_DOT_TWO:
case AST_NODE_STRUCT_INIT_DOT_TWO_COMMA: {
uint32_t idx = 0;
if (nd.lhs != 0)
field_buf[idx++] = nd.lhs;
if (nd.rhs != 0)
field_buf[idx++] = nd.rhs;
fields = field_buf;
nfields = idx;
break;
}
case AST_NODE_STRUCT_INIT_DOT:
case AST_NODE_STRUCT_INIT_DOT_COMMA:
fields = tree->extra_data.arr + nd.lhs;
nfields = nd.rhs - nd.lhs;
break;
case AST_NODE_STRUCT_INIT:
case AST_NODE_STRUCT_INIT_COMMA: {
type_expr = nd.lhs;
uint32_t start = tree->extra_data.arr[nd.rhs];
uint32_t end = tree->extra_data.arr[nd.rhs + 1];
fields = tree->extra_data.arr + start;
nfields = end - start;
break;
}
default:
break;
}
if (type_expr != 0) {
(void)rlExpr(ag, type_expr, block, RL_RI_NONE);
for (uint32_t i = 0; i < nfields; i++)
(void)rlExpr(ag, fields[i], block, RL_RI_TYPE_ONLY);
return false;
}
if (ri.have_type) {
for (uint32_t i = 0; i < nfields; i++)
(void)rlExpr(ag, fields[i], block, ri);
return ri.have_ptr;
} else {
for (uint32_t i = 0; i < nfields; i++)
(void)rlExpr(ag, fields[i], block, RL_RI_NONE);
return false;
}
}
// fn_proto, fn_decl (AstRlAnnotate.zig:734-770).
case AST_NODE_FN_PROTO_SIMPLE:
case AST_NODE_FN_PROTO_MULTI:
case AST_NODE_FN_PROTO_ONE:
case AST_NODE_FN_PROTO:
case AST_NODE_FN_DECL: {
// Extract return type and body.
uint32_t return_type = 0;
uint32_t body_node = 0;
if (tag == AST_NODE_FN_DECL) {
body_node = nd.rhs;
// fn_proto is nd.lhs
uint32_t proto = nd.lhs;
AstNodeTag ptag = tree->nodes.tags[proto];
AstData pnd = tree->nodes.datas[proto];
if (ptag == AST_NODE_FN_PROTO_SIMPLE) {
return_type = pnd.rhs;
if (pnd.lhs != 0)
(void)rlExpr(ag, pnd.lhs, block, RL_RI_TYPE_ONLY);
} else if (ptag == AST_NODE_FN_PROTO_MULTI) {
return_type = pnd.rhs;
uint32_t ps = tree->extra_data.arr[pnd.lhs];
uint32_t pe = tree->extra_data.arr[pnd.lhs + 1];
for (uint32_t i = ps; i < pe; i++)
(void)rlExpr(
ag, tree->extra_data.arr[i], block, RL_RI_TYPE_ONLY);
} else if (ptag == AST_NODE_FN_PROTO_ONE) {
return_type = pnd.rhs;
AstFnProtoOne fp;
fp.param = tree->extra_data.arr[pnd.lhs];
fp.align_expr = tree->extra_data.arr[pnd.lhs + 1];
fp.addrspace_expr = tree->extra_data.arr[pnd.lhs + 2];
fp.section_expr = tree->extra_data.arr[pnd.lhs + 3];
fp.callconv_expr = tree->extra_data.arr[pnd.lhs + 4];
if (fp.param != 0)
(void)rlExpr(ag, fp.param, block, RL_RI_TYPE_ONLY);
if (fp.align_expr != 0)
(void)rlExpr(ag, fp.align_expr, block, RL_RI_TYPE_ONLY);
if (fp.addrspace_expr != 0)
(void)rlExpr(
ag, fp.addrspace_expr, block, RL_RI_TYPE_ONLY);
if (fp.section_expr != 0)
(void)rlExpr(ag, fp.section_expr, block, RL_RI_TYPE_ONLY);
if (fp.callconv_expr != 0)
(void)rlExpr(ag, fp.callconv_expr, block, RL_RI_TYPE_ONLY);
} else if (ptag == AST_NODE_FN_PROTO) {
return_type = pnd.rhs;
AstFnProto fp;
fp.params_start = tree->extra_data.arr[pnd.lhs];
fp.params_end = tree->extra_data.arr[pnd.lhs + 1];
fp.align_expr = tree->extra_data.arr[pnd.lhs + 2];
fp.addrspace_expr = tree->extra_data.arr[pnd.lhs + 3];
fp.section_expr = tree->extra_data.arr[pnd.lhs + 4];
fp.callconv_expr = tree->extra_data.arr[pnd.lhs + 5];
for (uint32_t i = fp.params_start; i < fp.params_end; i++)
(void)rlExpr(
ag, tree->extra_data.arr[i], block, RL_RI_TYPE_ONLY);
if (fp.align_expr != 0)
(void)rlExpr(ag, fp.align_expr, block, RL_RI_TYPE_ONLY);
if (fp.addrspace_expr != 0)
(void)rlExpr(
ag, fp.addrspace_expr, block, RL_RI_TYPE_ONLY);
if (fp.section_expr != 0)
(void)rlExpr(ag, fp.section_expr, block, RL_RI_TYPE_ONLY);
if (fp.callconv_expr != 0)
(void)rlExpr(ag, fp.callconv_expr, block, RL_RI_TYPE_ONLY);
}
} else {
// Standalone fn_proto (no body).
if (tag == AST_NODE_FN_PROTO_SIMPLE) {
return_type = nd.rhs;
if (nd.lhs != 0)
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
} else if (tag == AST_NODE_FN_PROTO_MULTI) {
return_type = nd.rhs;
uint32_t ps = tree->extra_data.arr[nd.lhs];
uint32_t pe = tree->extra_data.arr[nd.lhs + 1];
for (uint32_t i = ps; i < pe; i++)
(void)rlExpr(
ag, tree->extra_data.arr[i], block, RL_RI_TYPE_ONLY);
} else if (tag == AST_NODE_FN_PROTO_ONE) {
return_type = nd.rhs;
AstFnProtoOne fp;
fp.param = tree->extra_data.arr[nd.lhs];
fp.align_expr = tree->extra_data.arr[nd.lhs + 1];
fp.addrspace_expr = tree->extra_data.arr[nd.lhs + 2];
fp.section_expr = tree->extra_data.arr[nd.lhs + 3];
fp.callconv_expr = tree->extra_data.arr[nd.lhs + 4];
if (fp.param != 0)
(void)rlExpr(ag, fp.param, block, RL_RI_TYPE_ONLY);
if (fp.align_expr != 0)
(void)rlExpr(ag, fp.align_expr, block, RL_RI_TYPE_ONLY);
if (fp.addrspace_expr != 0)
(void)rlExpr(
ag, fp.addrspace_expr, block, RL_RI_TYPE_ONLY);
if (fp.section_expr != 0)
(void)rlExpr(ag, fp.section_expr, block, RL_RI_TYPE_ONLY);
if (fp.callconv_expr != 0)
(void)rlExpr(ag, fp.callconv_expr, block, RL_RI_TYPE_ONLY);
} else if (tag == AST_NODE_FN_PROTO) {
return_type = nd.rhs;
AstFnProto fp;
fp.params_start = tree->extra_data.arr[nd.lhs];
fp.params_end = tree->extra_data.arr[nd.lhs + 1];
fp.align_expr = tree->extra_data.arr[nd.lhs + 2];
fp.addrspace_expr = tree->extra_data.arr[nd.lhs + 3];
fp.section_expr = tree->extra_data.arr[nd.lhs + 4];
fp.callconv_expr = tree->extra_data.arr[nd.lhs + 5];
for (uint32_t i = fp.params_start; i < fp.params_end; i++)
(void)rlExpr(
ag, tree->extra_data.arr[i], block, RL_RI_TYPE_ONLY);
if (fp.align_expr != 0)
(void)rlExpr(ag, fp.align_expr, block, RL_RI_TYPE_ONLY);
if (fp.addrspace_expr != 0)
(void)rlExpr(
ag, fp.addrspace_expr, block, RL_RI_TYPE_ONLY);
if (fp.section_expr != 0)
(void)rlExpr(ag, fp.section_expr, block, RL_RI_TYPE_ONLY);
if (fp.callconv_expr != 0)
(void)rlExpr(ag, fp.callconv_expr, block, RL_RI_TYPE_ONLY);
}
}
if (return_type != 0)
(void)rlExpr(ag, return_type, block, RL_RI_TYPE_ONLY);
if (body_node != 0)
(void)rlExpr(ag, body_node, block, RL_RI_NONE);
return false;
}
// Remaining: usingnamespace, await, assign_destructure, async calls.
case AST_NODE_USINGNAMESPACE:
return false;
case AST_NODE_AWAIT:
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
return false;
case AST_NODE_ASSIGN_DESTRUCTURE:
return false; // TODO if needed
case AST_NODE_ASYNC_CALL_ONE:
case AST_NODE_ASYNC_CALL_ONE_COMMA:
case AST_NODE_ASYNC_CALL:
case AST_NODE_ASYNC_CALL_COMMA:
return false; // async not relevant
default:
return false;
}
}
// astRlAnnotate (AstRlAnnotate.zig:64-83).
// Entry point: run the RL annotation pre-pass.
static void astRlAnnotate(AstGenCtx* ag) {
const Ast* tree = ag->tree;
if (tree->has_error)
return;
// Get root container members (same as in astGen).
AstData root_data = tree->nodes.datas[0];
uint32_t members_start = root_data.lhs;
uint32_t members_end = root_data.rhs;
const uint32_t* members = tree->extra_data.arr + members_start;
uint32_t members_len = members_end - members_start;
for (uint32_t i = 0; i < members_len; i++)
(void)rlExpr(ag, members[i], NULL, RL_RI_NONE);
}
// --- Public API: astGen (AstGen.zig:144) ---
Zir astGen(const Ast* ast) {
AstGenCtx ag;
memset(&ag, 0, sizeof(ag));
ag.tree = ast;
// Initial allocations (AstGen.zig:162-172).
uint32_t nodes_len = ast->nodes.len;
uint32_t init_cap = nodes_len > 8 ? nodes_len : 8;
ag.inst_cap = init_cap;
ag.inst_tags = ARR_INIT(ZirInstTag, ag.inst_cap);
ag.inst_datas = ARR_INIT(ZirInstData, ag.inst_cap);
ag.extra_cap = init_cap + ZIR_EXTRA_RESERVED_COUNT;
ag.extra = ARR_INIT(uint32_t, ag.extra_cap);
ag.string_bytes_cap = 16;
ag.string_bytes = ARR_INIT(uint8_t, ag.string_bytes_cap);
// String table index 0 is reserved for NullTerminatedString.empty
// (AstGen.zig:163).
ag.string_bytes[0] = 0;
ag.string_bytes_len = 1;
// Reserve extra[0..1] (AstGen.zig:170-172).
ag.extra[ZIR_EXTRA_COMPILE_ERRORS] = 0;
ag.extra[ZIR_EXTRA_IMPORTS] = 0;
ag.extra_len = ZIR_EXTRA_RESERVED_COUNT;
// Run AstRlAnnotate pre-pass (AstGen.zig:150-151).
astRlAnnotate(&ag);
// Set up root GenZir scope (AstGen.zig:176-185).
GenZir gen_scope;
memset(&gen_scope, 0, sizeof(gen_scope));
gen_scope.base.tag = SCOPE_GEN_ZIR;
gen_scope.parent = NULL;
gen_scope.astgen = &ag;
gen_scope.is_comptime = true;
gen_scope.decl_node_index = 0; // root
gen_scope.decl_line = 0;
gen_scope.break_block = UINT32_MAX;
// Get root container members: containerDeclRoot (AstGen.zig:191-195).
AstData root_data = ast->nodes.datas[0];
uint32_t members_start = root_data.lhs;
uint32_t members_end = root_data.rhs;
const uint32_t* members = ast->extra_data.arr + members_start;
uint32_t members_len = members_end - members_start;
structDeclInner(&ag, &gen_scope, 0, members, members_len);
// Write imports list (AstGen.zig:227-244).
writeImports(&ag);
// Build output Zir (AstGen.zig:211-239).
Zir zir;
zir.inst_len = ag.inst_len;
zir.inst_cap = ag.inst_cap;
zir.inst_tags = ag.inst_tags;
zir.inst_datas = ag.inst_datas;
zir.extra_len = ag.extra_len;
zir.extra_cap = ag.extra_cap;
zir.extra = ag.extra;
zir.string_bytes_len = ag.string_bytes_len;
zir.string_bytes_cap = ag.string_bytes_cap;
zir.string_bytes = ag.string_bytes;
zir.has_compile_errors = ag.has_compile_errors;
free(ag.imports);
free(ag.decl_names);
free(ag.decl_nodes);
free(ag.scratch_instructions);
free(ag.scratch_extra);
free(ag.ref_table_keys);
free(ag.ref_table_vals);
free(ag.nodes_need_rl);
free(ag.string_table);
return zir;
}