Fix multiple bugs found via the array_list.zig corpus test: - Fix anytype param ref/index double-conversion (addStrTok returns a ref, don't add ZIR_REF_START_INDEX again) - Implement is_generic param tracking via is_used_or_discarded pointer in ScopeLocalVal - Fix globalVarDecl declaration src_line: use type_gz.decl_line instead of ag->source_line (which was advanced by init expression) - Fix cppcheck warning: remove redundant (0u << 2) in bitmask - Implement fetchRemoveRefEntries and ret_param_refs in addFunc - Add func_fancy case to buildHashSkipMask in test - Fix valgrind: zero elem_val_imm padding, skip addNodeExtended undefined small field, handle more padding-sensitive tags Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
15273 lines
588 KiB
C
15273 lines
588 KiB
C
// astgen.c — AST to ZIR conversion, ported from lib/std/zig/AstGen.zig.
|
|
//
|
|
// Structural translation of AstGen.zig into C.
|
|
// Each function corresponds to a Zig function with the same name,
|
|
// with line references to Zig 0.15.1 AstGen.zig.
|
|
|
|
#include "astgen.h"
|
|
#include "common.h"
|
|
#include <assert.h>
|
|
#include <stdlib.h>
|
|
#include <string.h>
|
|
|
|
// --- Declaration.Flags.Id enum (Zir.zig:2724) ---
|
|
|
|
typedef enum {
|
|
DECL_ID_UNNAMED_TEST,
|
|
DECL_ID_TEST,
|
|
DECL_ID_DECLTEST,
|
|
DECL_ID_COMPTIME,
|
|
DECL_ID_CONST_SIMPLE,
|
|
DECL_ID_CONST_TYPED,
|
|
DECL_ID_CONST,
|
|
DECL_ID_PUB_CONST_SIMPLE,
|
|
DECL_ID_PUB_CONST_TYPED,
|
|
DECL_ID_PUB_CONST,
|
|
DECL_ID_EXTERN_CONST_SIMPLE,
|
|
DECL_ID_EXTERN_CONST,
|
|
DECL_ID_PUB_EXTERN_CONST_SIMPLE,
|
|
DECL_ID_PUB_EXTERN_CONST,
|
|
DECL_ID_EXPORT_CONST,
|
|
DECL_ID_PUB_EXPORT_CONST,
|
|
DECL_ID_VAR_SIMPLE,
|
|
DECL_ID_VAR,
|
|
DECL_ID_VAR_THREADLOCAL,
|
|
DECL_ID_PUB_VAR_SIMPLE,
|
|
DECL_ID_PUB_VAR,
|
|
DECL_ID_PUB_VAR_THREADLOCAL,
|
|
DECL_ID_EXTERN_VAR,
|
|
DECL_ID_EXTERN_VAR_THREADLOCAL,
|
|
DECL_ID_PUB_EXTERN_VAR,
|
|
DECL_ID_PUB_EXTERN_VAR_THREADLOCAL,
|
|
DECL_ID_EXPORT_VAR,
|
|
DECL_ID_EXPORT_VAR_THREADLOCAL,
|
|
DECL_ID_PUB_EXPORT_VAR,
|
|
DECL_ID_PUB_EXPORT_VAR_THREADLOCAL,
|
|
} DeclFlagsId;
|
|
|
|
// --- Import tracking (AstGen.zig:265) ---
|
|
|
|
typedef struct {
|
|
uint32_t name; // NullTerminatedString index
|
|
uint32_t token; // Ast.TokenIndex
|
|
} ImportEntry;
|
|
|
|
// --- AstGen internal context (mirrors AstGen struct, AstGen.zig:153) ---
|
|
|
|
typedef struct {
|
|
const Ast* tree;
|
|
ZirInstTag* inst_tags;
|
|
ZirInstData* inst_datas;
|
|
uint32_t inst_len;
|
|
uint32_t inst_cap;
|
|
uint32_t* extra;
|
|
uint32_t extra_len;
|
|
uint32_t extra_cap;
|
|
uint8_t* string_bytes;
|
|
uint32_t string_bytes_len;
|
|
uint32_t string_bytes_cap;
|
|
// String dedup table: stores positions in string_bytes that are
|
|
// registered for deduplication (mirrors AstGen.string_table).
|
|
// Only strings added via identAsString/strLitAsString (non-embedded-null)
|
|
// are registered. Multiline strings are NOT registered.
|
|
uint32_t* string_table;
|
|
uint32_t string_table_len;
|
|
uint32_t string_table_cap;
|
|
uint32_t source_offset;
|
|
uint32_t source_line;
|
|
uint32_t source_column;
|
|
ImportEntry* imports;
|
|
uint32_t imports_len;
|
|
uint32_t imports_cap;
|
|
// Shared dynamic array for GenZir instructions (AstGen.zig:11796).
|
|
// Sub-blocks share this array and track their slice via
|
|
// instructions_top.
|
|
uint32_t* scratch_instructions;
|
|
uint32_t scratch_inst_len;
|
|
uint32_t scratch_inst_cap;
|
|
// Scratch extra array for call arguments (mirrors AstGen.scratch in Zig).
|
|
// Used to collect body lengths + body instructions before copying to
|
|
// extra.
|
|
uint32_t* scratch_extra;
|
|
uint32_t scratch_extra_len;
|
|
uint32_t scratch_extra_cap;
|
|
// Return type ref for the current function (set during fnDecl/testDecl).
|
|
uint32_t fn_ret_ty; // ZirInstRef
|
|
// Pointer to the fn_block GenZir for the current function (AstGen.zig:45).
|
|
void* fn_block; // GenZir*
|
|
// ref_table: deferred REF instructions (AstGen.zig:58-68).
|
|
// Key = operand inst index, Value = ref inst index.
|
|
uint32_t* ref_table_keys;
|
|
uint32_t* ref_table_vals;
|
|
uint32_t ref_table_len;
|
|
uint32_t ref_table_cap;
|
|
// nodes_need_rl: set of AST node indices that need result locations.
|
|
// Populated by astRlAnnotate() pre-pass (AstRlAnnotate.zig).
|
|
uint32_t* nodes_need_rl;
|
|
uint32_t nodes_need_rl_len;
|
|
uint32_t nodes_need_rl_cap;
|
|
bool has_compile_errors;
|
|
bool within_fn; // AstGen.zig:49
|
|
bool fn_var_args; // AstGen.zig:46
|
|
} AstGenCtx;
|
|
|
|
static void setCompileError(AstGenCtx* ag) { ag->has_compile_errors = true; }
|
|
#define SET_ERROR(ag) setCompileError(ag)
|
|
|
|
// Set fn_block pointer on AstGenCtx. The caller is responsible for saving
|
|
// and restoring the previous value before the pointed-to GenZir goes out
|
|
// of scope (AstGen.zig:45).
|
|
static void setFnBlock(AstGenCtx* ag, void* block) { ag->fn_block = block; }
|
|
|
|
// --- ref_table operations (AstGen.zig:58-68) ---
|
|
// Simple linear-scan hash table for deferred REF instructions.
|
|
|
|
// Returns pointer to existing value if key found, NULL if not found.
|
|
static uint32_t* refTableGet(AstGenCtx* ag, uint32_t key) {
|
|
for (uint32_t i = 0; i < ag->ref_table_len; i++) {
|
|
if (ag->ref_table_keys[i] == key)
|
|
return &ag->ref_table_vals[i];
|
|
}
|
|
return NULL;
|
|
}
|
|
|
|
// getOrPut: returns pointer to value slot; sets *found to true if existed.
|
|
static uint32_t* refTableGetOrPut(AstGenCtx* ag, uint32_t key, bool* found) {
|
|
for (uint32_t i = 0; i < ag->ref_table_len; i++) {
|
|
if (ag->ref_table_keys[i] == key) {
|
|
*found = true;
|
|
return &ag->ref_table_vals[i];
|
|
}
|
|
}
|
|
*found = false;
|
|
if (ag->ref_table_len >= ag->ref_table_cap) {
|
|
uint32_t new_cap = ag->ref_table_cap == 0 ? 16 : ag->ref_table_cap * 2;
|
|
ag->ref_table_keys
|
|
= realloc(ag->ref_table_keys, new_cap * sizeof(uint32_t));
|
|
ag->ref_table_vals
|
|
= realloc(ag->ref_table_vals, new_cap * sizeof(uint32_t));
|
|
ag->ref_table_cap = new_cap;
|
|
}
|
|
uint32_t idx = ag->ref_table_len++;
|
|
ag->ref_table_keys[idx] = key;
|
|
return &ag->ref_table_vals[idx];
|
|
}
|
|
|
|
// fetchRemove: if key exists, remove it and return true with *val set.
|
|
static bool refTableFetchRemove(AstGenCtx* ag, uint32_t key, uint32_t* val) {
|
|
for (uint32_t i = 0; i < ag->ref_table_len; i++) {
|
|
if (ag->ref_table_keys[i] == key) {
|
|
*val = ag->ref_table_vals[i];
|
|
// Swap with last element.
|
|
ag->ref_table_len--;
|
|
if (i < ag->ref_table_len) {
|
|
ag->ref_table_keys[i] = ag->ref_table_keys[ag->ref_table_len];
|
|
ag->ref_table_vals[i] = ag->ref_table_vals[ag->ref_table_len];
|
|
}
|
|
return true;
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
|
|
// --- Result location (AstGen.zig:11808) ---
|
|
// Simplified version of ResultInfo.Loc.
|
|
// Defined here (before GenZir) because GenZir.break_result_info uses it.
|
|
|
|
// ResultInfo.Context (AstGen.zig:371-386).
|
|
typedef enum {
|
|
RI_CTX_NONE,
|
|
RI_CTX_RETURN,
|
|
RI_CTX_ERROR_HANDLING_EXPR,
|
|
RI_CTX_SHIFT_OP,
|
|
RI_CTX_FN_ARG,
|
|
RI_CTX_CONST_INIT,
|
|
RI_CTX_ASSIGNMENT,
|
|
} ResultCtx;
|
|
|
|
// DestructureComponent: mirrors Zig's ResultInfo.Loc.DestructureComponent.
|
|
typedef enum {
|
|
DC_DISCARD,
|
|
DC_TYPED_PTR,
|
|
DC_INFERRED_PTR,
|
|
} DestructureComponentTag;
|
|
|
|
typedef struct {
|
|
DestructureComponentTag tag;
|
|
uint32_t inst; // ZIR inst ref (for DC_TYPED_PTR and DC_INFERRED_PTR).
|
|
uint32_t src_node; // Only for DC_TYPED_PTR.
|
|
} DestructureComponent;
|
|
|
|
typedef enum {
|
|
RL_NONE, // Just compute the value.
|
|
RL_REF, // Compute a pointer to the value.
|
|
RL_DISCARD, // Compute but discard (emit ensure_result_non_error).
|
|
RL_TY, // Coerce to specific type.
|
|
RL_COERCED_TY, // Coerce to specific type, result is the coercion.
|
|
RL_PTR, // Store result to typed pointer. data=alloc inst, src_node=node.
|
|
RL_INFERRED_PTR, // Store result to inferred pointer. data=alloc inst.
|
|
RL_REF_COERCED_TY, // Ref with pointer type. data=ptr_ty_inst.
|
|
RL_DESTRUCTURE, // Destructure into multiple pointers.
|
|
} ResultLocTag;
|
|
|
|
typedef struct {
|
|
ResultLocTag tag;
|
|
uint32_t data; // ZirInstRef: ty_inst for TY/COERCED_TY, alloc inst for
|
|
// PTR/INFERRED_PTR.
|
|
uint32_t src_node; // Used for RL_PTR and RL_DESTRUCTURE.
|
|
ResultCtx ctx; // ResultInfo.Context (AstGen.zig:371).
|
|
DestructureComponent* components; // Only for RL_DESTRUCTURE.
|
|
uint32_t components_len; // Only for RL_DESTRUCTURE.
|
|
} ResultLoc;
|
|
|
|
#define RL_NONE_VAL \
|
|
((ResultLoc) { .tag = RL_NONE, \
|
|
.data = 0, \
|
|
.src_node = 0, \
|
|
.ctx = RI_CTX_NONE, \
|
|
.components = NULL, \
|
|
.components_len = 0 })
|
|
#define RL_REF_VAL \
|
|
((ResultLoc) { .tag = RL_REF, \
|
|
.data = 0, \
|
|
.src_node = 0, \
|
|
.ctx = RI_CTX_NONE, \
|
|
.components = NULL, \
|
|
.components_len = 0 })
|
|
#define RL_DISCARD_VAL \
|
|
((ResultLoc) { .tag = RL_DISCARD, \
|
|
.data = 0, \
|
|
.src_node = 0, \
|
|
.ctx = RI_CTX_NONE, \
|
|
.components = NULL, \
|
|
.components_len = 0 })
|
|
#define RL_IS_REF(rl) ((rl).tag == RL_REF || (rl).tag == RL_REF_COERCED_TY)
|
|
|
|
// --- Scope types (AstGen.zig:11621-11768) ---
|
|
|
|
typedef enum {
|
|
SCOPE_GEN_ZIR,
|
|
SCOPE_LOCAL_VAL,
|
|
SCOPE_LOCAL_PTR,
|
|
SCOPE_DEFER_NORMAL,
|
|
SCOPE_DEFER_ERROR,
|
|
SCOPE_NAMESPACE,
|
|
SCOPE_TOP,
|
|
SCOPE_LABEL,
|
|
} ScopeTag;
|
|
|
|
typedef struct Scope {
|
|
ScopeTag tag;
|
|
} Scope;
|
|
|
|
// --- GenZir scope (mirrors GenZir struct, AstGen.zig:11772) ---
|
|
//
|
|
// Sub-blocks share the parent AstGenCtx's scratch_instructions array and
|
|
// record their starting offset (instructions_top). This mirrors the upstream
|
|
// GenZir.instructions / instructions_top design (AstGen.zig:11796-11850).
|
|
|
|
typedef struct {
|
|
Scope base; // tag = SCOPE_GEN_ZIR
|
|
Scope* parent;
|
|
AstGenCtx* astgen;
|
|
uint32_t decl_node_index;
|
|
uint32_t decl_line;
|
|
bool is_comptime;
|
|
bool is_inline; // true for inline for/while, labeled blocks in comptime
|
|
bool c_import; // true inside @cImport block
|
|
bool is_typeof; // true inside @TypeOf scope
|
|
uint32_t instructions_top; // start index in shared array
|
|
uint32_t break_block; // UINT32_MAX = none (AstGen.zig:11780)
|
|
uint32_t continue_block; // UINT32_MAX = none (AstGen.zig:11784)
|
|
// Label for labeled blocks (AstGen.zig:11800, 11869-11874).
|
|
uint32_t label_token; // UINT32_MAX = no label
|
|
uint32_t label_block_inst; // the BLOCK instruction index
|
|
ResultLoc break_result_info; // RL for break values
|
|
uint32_t any_defer_node; // UINT32_MAX = none (AstGen.zig:11812)
|
|
} GenZir;
|
|
|
|
// Scope.LocalVal (AstGen.zig:11682).
|
|
// This is always a `const` local and the `inst` is a value type, not a
|
|
// pointer.
|
|
typedef struct {
|
|
Scope base; // tag = SCOPE_LOCAL_VAL
|
|
Scope* parent;
|
|
GenZir* gen_zir;
|
|
uint32_t inst; // ZirInstRef
|
|
uint32_t token_src; // Ast.TokenIndex
|
|
uint32_t name; // NullTerminatedString (string table index)
|
|
bool* is_used_or_discarded; // NULL if not tracking (AstGen.zig:11691)
|
|
} ScopeLocalVal;
|
|
|
|
// Scope.LocalPtr (AstGen.zig:11704).
|
|
// This could be a `const` or `var` local. It has a pointer instead of a value.
|
|
typedef struct {
|
|
Scope base; // tag = SCOPE_LOCAL_PTR
|
|
Scope* parent;
|
|
GenZir* gen_zir;
|
|
uint32_t ptr; // ZirInstRef
|
|
uint32_t token_src; // Ast.TokenIndex
|
|
uint32_t name; // NullTerminatedString (string table index)
|
|
bool maybe_comptime;
|
|
} ScopeLocalPtr;
|
|
|
|
// Scope.Defer (AstGen.zig:11741).
|
|
typedef struct {
|
|
Scope base; // tag = SCOPE_DEFER_NORMAL or SCOPE_DEFER_ERROR
|
|
Scope* parent;
|
|
uint32_t index;
|
|
uint32_t len;
|
|
} ScopeDefer;
|
|
|
|
// Scope.Label — for labeled blocks and loops.
|
|
typedef struct {
|
|
Scope base; // tag = SCOPE_LABEL
|
|
Scope* parent;
|
|
uint32_t label_name; // NullTerminatedString
|
|
uint32_t block_inst; // instruction index (not ref)
|
|
} ScopeLabel;
|
|
|
|
// Scope.Namespace (AstGen.zig:11737-11762) — represents a type declaration
|
|
// scope (struct, enum, union, opaque) that has declarations in it.
|
|
typedef struct {
|
|
Scope base; // tag = SCOPE_NAMESPACE
|
|
Scope* parent;
|
|
uint32_t node; // AST node of the container decl
|
|
uint32_t inst; // instruction index of the container decl
|
|
GenZir* declaring_gz; // the GenZir containing this namespace
|
|
bool maybe_generic; // true if inside a function (astgen.within_fn)
|
|
// Per-namespace declaration table (maps name string index → node index).
|
|
uint32_t* decl_names;
|
|
uint32_t* decl_nodes;
|
|
uint32_t decl_len;
|
|
uint32_t decl_cap;
|
|
// Captures (populated by tunnelThroughClosure).
|
|
// Keys are packed Capture u32 values, values are NullTerminatedString.
|
|
uint32_t* capture_keys;
|
|
uint32_t* capture_vals;
|
|
uint32_t captures_len;
|
|
uint32_t captures_cap;
|
|
} ScopeNamespace;
|
|
|
|
// Initialize a ScopeNamespace on the stack.
|
|
static void scopeNamespaceInit(ScopeNamespace* ns, Scope* parent,
|
|
uint32_t node, uint32_t inst, GenZir* declaring_gz, bool maybe_generic) {
|
|
memset(ns, 0, sizeof(*ns));
|
|
ns->base.tag = SCOPE_NAMESPACE;
|
|
ns->parent = parent;
|
|
ns->node = node;
|
|
ns->inst = inst;
|
|
ns->declaring_gz = declaring_gz;
|
|
ns->maybe_generic = maybe_generic;
|
|
}
|
|
|
|
// Free dynamic arrays in a ScopeNamespace.
|
|
static void scopeNamespaceDeinit(ScopeNamespace* ns) {
|
|
free(ns->decl_names);
|
|
free(ns->decl_nodes);
|
|
free(ns->capture_keys);
|
|
free(ns->capture_vals);
|
|
}
|
|
|
|
// Add a declaration to a namespace's decl table.
|
|
static void scopeNamespaceAddDecl(
|
|
ScopeNamespace* ns, uint32_t name_str, uint32_t decl_node) {
|
|
if (ns->decl_len >= ns->decl_cap) {
|
|
uint32_t new_cap = ns->decl_cap > 0 ? ns->decl_cap * 2 : 8;
|
|
uint32_t* n = realloc(ns->decl_names, new_cap * sizeof(uint32_t));
|
|
uint32_t* d = realloc(ns->decl_nodes, new_cap * sizeof(uint32_t));
|
|
if (!n || !d)
|
|
exit(1);
|
|
ns->decl_names = n;
|
|
ns->decl_nodes = d;
|
|
ns->decl_cap = new_cap;
|
|
}
|
|
ns->decl_names[ns->decl_len] = name_str;
|
|
ns->decl_nodes[ns->decl_len] = decl_node;
|
|
ns->decl_len++;
|
|
}
|
|
|
|
// Look up a name in a namespace's decl table. Returns the node index,
|
|
// or UINT32_MAX if not found.
|
|
static uint32_t scopeNamespaceFindDecl(
|
|
const ScopeNamespace* ns, uint32_t name_str) {
|
|
for (uint32_t i = 0; i < ns->decl_len; i++) {
|
|
if (ns->decl_names[i] == name_str)
|
|
return ns->decl_nodes[i];
|
|
}
|
|
return UINT32_MAX;
|
|
}
|
|
|
|
// Add a capture to a namespace. Returns the capture index (position).
|
|
// If the capture already exists, returns the existing index.
|
|
// key is a packed Capture u32 (3-bit tag | 29-bit data).
|
|
static uint32_t scopeNamespaceAddCapture(
|
|
ScopeNamespace* ns, uint32_t key, uint32_t name_str) {
|
|
// Check for existing capture with same key.
|
|
for (uint32_t i = 0; i < ns->captures_len; i++) {
|
|
if (ns->capture_keys[i] == key)
|
|
return i;
|
|
}
|
|
if (ns->captures_len >= ns->captures_cap) {
|
|
uint32_t new_cap = ns->captures_cap > 0 ? ns->captures_cap * 2 : 4;
|
|
uint32_t* k = realloc(ns->capture_keys, new_cap * sizeof(uint32_t));
|
|
uint32_t* v = realloc(ns->capture_vals, new_cap * sizeof(uint32_t));
|
|
if (!k || !v)
|
|
exit(1);
|
|
ns->capture_keys = k;
|
|
ns->capture_vals = v;
|
|
ns->captures_cap = new_cap;
|
|
}
|
|
uint32_t idx = ns->captures_len;
|
|
ns->capture_keys[idx] = key;
|
|
ns->capture_vals[idx] = name_str;
|
|
ns->captures_len++;
|
|
return idx;
|
|
}
|
|
|
|
// Walk to a scope's parent. Returns NULL if there is no parent.
|
|
static Scope* scopeParent(Scope* s) {
|
|
switch (s->tag) {
|
|
case SCOPE_GEN_ZIR:
|
|
return ((GenZir*)s)->parent;
|
|
case SCOPE_LOCAL_VAL:
|
|
return ((ScopeLocalVal*)s)->parent;
|
|
case SCOPE_LOCAL_PTR:
|
|
return ((ScopeLocalPtr*)s)->parent;
|
|
case SCOPE_DEFER_NORMAL:
|
|
case SCOPE_DEFER_ERROR:
|
|
return ((ScopeDefer*)s)->parent;
|
|
case SCOPE_NAMESPACE:
|
|
return ((ScopeNamespace*)s)->parent;
|
|
case SCOPE_LABEL:
|
|
return ((ScopeLabel*)s)->parent;
|
|
case SCOPE_TOP:
|
|
return NULL;
|
|
}
|
|
return NULL;
|
|
}
|
|
|
|
// --- GenZir instruction helpers (AstGen.zig:11830-11850) ---
|
|
|
|
// Returns the number of instructions in this scope.
|
|
static uint32_t gzInstructionsLen(const GenZir* gz) {
|
|
return gz->astgen->scratch_inst_len - gz->instructions_top;
|
|
}
|
|
|
|
// Returns pointer to start of this scope's instructions in the shared array.
|
|
static const uint32_t* gzInstructionsSlice(const GenZir* gz) {
|
|
return gz->astgen->scratch_instructions + gz->instructions_top;
|
|
}
|
|
|
|
// Mirrors GenZir.instructionsSliceUpto (AstGen.zig:11835).
|
|
// Returns instructions from gz up to (but not including) stacked_gz's start.
|
|
static uint32_t gzInstructionsLenUpto(
|
|
const GenZir* gz, const GenZir* stacked_gz) {
|
|
return stacked_gz->instructions_top - gz->instructions_top;
|
|
}
|
|
|
|
static const uint32_t* gzInstructionsSliceUpto(
|
|
const GenZir* gz, const GenZir* stacked_gz) {
|
|
(void)stacked_gz; // used only for length computation
|
|
return gz->astgen->scratch_instructions + gz->instructions_top;
|
|
}
|
|
|
|
// Mirrors GenZir.unstack (AstGen.zig:11822).
|
|
// Restores the shared array length to this scope's start.
|
|
static void gzUnstack(GenZir* gz) {
|
|
gz->astgen->scratch_inst_len = gz->instructions_top;
|
|
}
|
|
|
|
// Append an instruction index to this scope's portion of the shared array.
|
|
static void gzAppendInstruction(GenZir* gz, uint32_t inst_idx) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
if (ag->scratch_inst_len >= ag->scratch_inst_cap) {
|
|
uint32_t new_cap
|
|
= ag->scratch_inst_cap > 0 ? ag->scratch_inst_cap * 2 : 64;
|
|
uint32_t* p
|
|
= realloc(ag->scratch_instructions, new_cap * sizeof(uint32_t));
|
|
if (!p)
|
|
exit(1);
|
|
ag->scratch_instructions = p;
|
|
ag->scratch_inst_cap = new_cap;
|
|
}
|
|
ag->scratch_instructions[ag->scratch_inst_len++] = inst_idx;
|
|
}
|
|
|
|
// Mirrors GenZir.makeSubBlock (AstGen.zig:11852).
|
|
static GenZir makeSubBlock(GenZir* parent, Scope* scope) {
|
|
GenZir sub;
|
|
memset(&sub, 0, sizeof(sub));
|
|
sub.base.tag = SCOPE_GEN_ZIR;
|
|
sub.parent = scope;
|
|
sub.astgen = parent->astgen;
|
|
sub.decl_node_index = parent->decl_node_index;
|
|
sub.decl_line = parent->decl_line;
|
|
sub.is_comptime = parent->is_comptime;
|
|
sub.c_import = parent->c_import;
|
|
sub.is_typeof = parent->is_typeof;
|
|
sub.instructions_top = parent->astgen->scratch_inst_len;
|
|
sub.break_block = UINT32_MAX;
|
|
sub.continue_block = UINT32_MAX;
|
|
sub.label_token = UINT32_MAX;
|
|
sub.any_defer_node = parent->any_defer_node;
|
|
return sub;
|
|
}
|
|
|
|
// --- Capacity helpers ---
|
|
|
|
static void ensureExtraCapacity(AstGenCtx* ag, uint32_t additional) {
|
|
uint32_t needed = ag->extra_len + additional;
|
|
if (needed > ag->extra_cap) {
|
|
uint32_t new_cap = ag->extra_cap * 2;
|
|
if (new_cap < needed)
|
|
new_cap = needed;
|
|
uint32_t* p = realloc(ag->extra, new_cap * sizeof(uint32_t));
|
|
if (!p)
|
|
exit(1);
|
|
ag->extra = p;
|
|
ag->extra_cap = new_cap;
|
|
}
|
|
}
|
|
|
|
static void ensureInstCapacity(AstGenCtx* ag, uint32_t additional) {
|
|
uint32_t needed = ag->inst_len + additional;
|
|
if (needed > ag->inst_cap) {
|
|
uint32_t new_cap = ag->inst_cap * 2;
|
|
if (new_cap < needed)
|
|
new_cap = needed;
|
|
ZirInstTag* t = realloc(ag->inst_tags, new_cap * sizeof(ZirInstTag));
|
|
ZirInstData* d
|
|
= realloc(ag->inst_datas, new_cap * sizeof(ZirInstData));
|
|
if (!t || !d)
|
|
exit(1);
|
|
ag->inst_tags = t;
|
|
ag->inst_datas = d;
|
|
ag->inst_cap = new_cap;
|
|
}
|
|
}
|
|
|
|
static void ensureStringBytesCapacity(AstGenCtx* ag, uint32_t additional) {
|
|
uint32_t needed = ag->string_bytes_len + additional;
|
|
if (needed > ag->string_bytes_cap) {
|
|
uint32_t new_cap = ag->string_bytes_cap * 2;
|
|
if (new_cap < needed)
|
|
new_cap = needed;
|
|
uint8_t* p = realloc(ag->string_bytes, new_cap * sizeof(uint8_t));
|
|
if (!p)
|
|
exit(1);
|
|
ag->string_bytes = p;
|
|
ag->string_bytes_cap = new_cap;
|
|
}
|
|
}
|
|
|
|
// --- Extra data helpers ---
|
|
|
|
static uint32_t addExtraU32(AstGenCtx* ag, uint32_t value) {
|
|
ensureExtraCapacity(ag, 1);
|
|
uint32_t idx = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = value;
|
|
return idx;
|
|
}
|
|
|
|
// --- Instruction helpers ---
|
|
|
|
// Mirrors AstGen.reserveInstructionIndex (AstGen.zig:12902).
|
|
static uint32_t reserveInstructionIndex(AstGenCtx* ag) {
|
|
ensureInstCapacity(ag, 1);
|
|
uint32_t idx = ag->inst_len;
|
|
memset(&ag->inst_datas[idx], 0, sizeof(ZirInstData));
|
|
ag->inst_tags[idx] = (ZirInstTag)0;
|
|
ag->inst_len++;
|
|
return idx;
|
|
}
|
|
|
|
// Forward declarations.
|
|
static int32_t tokenIndexToRelative(const GenZir* gz, uint32_t token);
|
|
static uint32_t firstToken(const Ast* tree, uint32_t node);
|
|
static bool nodesNeedRlContains(const AstGenCtx* ag, uint32_t node);
|
|
|
|
// Mirrors GenZir.makeUnTok (AstGen.zig:12520).
|
|
// Allocates an instruction but does NOT add to GenZir body.
|
|
// Returns the raw instruction INDEX (not a Ref).
|
|
static uint32_t makeUnTok(
|
|
GenZir* gz, ZirInstTag tag, uint32_t operand, uint32_t abs_tok_index) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
ensureInstCapacity(ag, 1);
|
|
uint32_t idx = ag->inst_len;
|
|
ZirInstData data;
|
|
data.un_tok.src_tok = tokenIndexToRelative(gz, abs_tok_index);
|
|
data.un_tok.operand = operand;
|
|
ag->inst_tags[idx] = tag;
|
|
ag->inst_datas[idx] = data;
|
|
ag->inst_len++;
|
|
return idx; // Raw index, NOT a Ref.
|
|
}
|
|
|
|
// Mirrors GenZir.add (AstGen.zig:13162).
|
|
// Appends an instruction and records it in the GenZir body.
|
|
// Returns the instruction index as a Ref (index + ZIR_INST_REF_START_INDEX).
|
|
static uint32_t addInstruction(GenZir* gz, ZirInstTag tag, ZirInstData data) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
ensureInstCapacity(ag, 1);
|
|
uint32_t idx = ag->inst_len;
|
|
ag->inst_tags[idx] = tag;
|
|
ag->inst_datas[idx] = data;
|
|
ag->inst_len++;
|
|
// Record in sub-block body.
|
|
gzAppendInstruction(gz, idx);
|
|
return idx + ZIR_REF_START_INDEX; // toRef()
|
|
}
|
|
|
|
// Mirrors GenZir.addInt (AstGen.zig:12238).
|
|
static uint32_t addInt(GenZir* gz, uint64_t integer) {
|
|
ZirInstData data;
|
|
data.int_val = integer;
|
|
return addInstruction(gz, ZIR_INST_INT, data);
|
|
}
|
|
|
|
// Mirrors GenZir.add for bin data (Zir.zig:1877).
|
|
// Creates an instruction with bin data (lhs + rhs stored in inst_datas).
|
|
static uint32_t addBin(
|
|
GenZir* gz, ZirInstTag tag, uint32_t lhs, uint32_t rhs) {
|
|
ZirInstData data;
|
|
data.bin.lhs = lhs;
|
|
data.bin.rhs = rhs;
|
|
return addInstruction(gz, tag, data);
|
|
}
|
|
|
|
// Mirrors GenZir.addPlNode (AstGen.zig:12308).
|
|
// Creates an instruction with pl_node data and 2-word payload.
|
|
static uint32_t addPlNodeBin(
|
|
GenZir* gz, ZirInstTag tag, uint32_t node, uint32_t lhs, uint32_t rhs) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
ensureExtraCapacity(ag, 2);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = lhs;
|
|
ag->extra[ag->extra_len++] = rhs;
|
|
ZirInstData data;
|
|
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
data.pl_node.payload_index = payload_index;
|
|
return addInstruction(gz, tag, data);
|
|
}
|
|
|
|
// Mirrors addPlNode for 3-operand payloads (e.g. ArrayTypeSentinel).
|
|
static uint32_t addPlNodeTriple(GenZir* gz, ZirInstTag tag, uint32_t node,
|
|
uint32_t a, uint32_t b, uint32_t c) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
ensureExtraCapacity(ag, 3);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = a;
|
|
ag->extra[ag->extra_len++] = b;
|
|
ag->extra[ag->extra_len++] = c;
|
|
ZirInstData data;
|
|
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
data.pl_node.payload_index = payload_index;
|
|
return addInstruction(gz, tag, data);
|
|
}
|
|
|
|
// Checks if an AST identifier node is the single underscore `_`.
|
|
// Used for inferred array length detection in [_]T patterns.
|
|
// Intentionally does NOT support @"_" syntax (matches upstream).
|
|
static bool isUnderscoreIdent(const Ast* tree, uint32_t ident_node) {
|
|
uint32_t id_tok = tree->nodes.main_tokens[ident_node];
|
|
uint32_t id_start = tree->tokens.starts[id_tok];
|
|
if (tree->source[id_start] != '_')
|
|
return false;
|
|
if (id_start + 1 >= tree->source_len)
|
|
return true;
|
|
char next = tree->source[id_start + 1];
|
|
return !((next >= 'a' && next <= 'z') || (next >= 'A' && next <= 'Z')
|
|
|| next == '_' || (next >= '0' && next <= '9'));
|
|
}
|
|
|
|
// Mirrors GenZir.addUnNode (AstGen.zig:12406).
|
|
static uint32_t addUnNode(
|
|
GenZir* gz, ZirInstTag tag, uint32_t operand, uint32_t node) {
|
|
ZirInstData data;
|
|
data.un_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
data.un_node.operand = operand;
|
|
return addInstruction(gz, tag, data);
|
|
}
|
|
|
|
// Mirrors GenZir.addNode (AstGen.zig:12414).
|
|
static uint32_t addNode(GenZir* gz, ZirInstTag tag, uint32_t node) {
|
|
ZirInstData data;
|
|
data.node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
return addInstruction(gz, tag, data);
|
|
}
|
|
|
|
// Mirrors GenZir.addUnTok (AstGen.zig:12497).
|
|
static uint32_t addUnTok(
|
|
GenZir* gz, ZirInstTag tag, uint32_t operand, uint32_t abs_tok_index) {
|
|
ZirInstData data;
|
|
data.un_tok.src_tok = tokenIndexToRelative(gz, abs_tok_index);
|
|
data.un_tok.operand = operand;
|
|
return addInstruction(gz, tag, data);
|
|
}
|
|
|
|
// Mirrors GenZir.addStrTok (AstGen.zig:12349).
|
|
static uint32_t addStrTok(
|
|
GenZir* gz, ZirInstTag tag, uint32_t str_index, uint32_t token) {
|
|
ZirInstData data;
|
|
data.str_tok.start = str_index;
|
|
data.str_tok.src_tok = tokenIndexToRelative(gz, token);
|
|
return addInstruction(gz, tag, data);
|
|
}
|
|
|
|
// Mirrors GenZir.addPlNodePayloadIndex (AstGen.zig:12332).
|
|
static uint32_t addPlNodePayloadIndex(
|
|
GenZir* gz, ZirInstTag tag, uint32_t node, uint32_t payload_index) {
|
|
ZirInstData data;
|
|
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
data.pl_node.payload_index = payload_index;
|
|
return addInstruction(gz, tag, data);
|
|
}
|
|
|
|
// Mirrors GenZir.addBuiltinValue (AstGen.zig:12389-12391).
|
|
// Emits extended instruction with builtin_value opcode.
|
|
static uint32_t addBuiltinValue(
|
|
GenZir* gz, uint32_t src_node, uint16_t builtin_val) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
ensureInstCapacity(ag, 1);
|
|
uint32_t idx = ag->inst_len;
|
|
ag->inst_tags[idx] = ZIR_INST_EXTENDED;
|
|
ZirInstData data;
|
|
data.extended.opcode = (uint16_t)ZIR_EXT_BUILTIN_VALUE;
|
|
data.extended.small = builtin_val;
|
|
data.extended.operand
|
|
= (uint32_t)((int32_t)src_node - (int32_t)gz->decl_node_index);
|
|
ag->inst_datas[idx] = data;
|
|
ag->inst_len++;
|
|
gzAppendInstruction(gz, idx);
|
|
return idx + ZIR_REF_START_INDEX;
|
|
}
|
|
|
|
// Mirrors GenZir.addNodeExtended (AstGen.zig:12765-12779).
|
|
// Creates an extended instruction with operand = node offset.
|
|
static uint32_t addNodeExtended(
|
|
GenZir* gz, uint16_t opcode, uint32_t src_node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
ensureInstCapacity(ag, 1);
|
|
uint32_t idx = ag->inst_len;
|
|
ag->inst_tags[idx] = ZIR_INST_EXTENDED;
|
|
ZirInstData data;
|
|
data.extended.opcode = opcode;
|
|
data.extended.small = 0;
|
|
data.extended.operand
|
|
= (uint32_t)((int32_t)src_node - (int32_t)gz->decl_node_index);
|
|
ag->inst_datas[idx] = data;
|
|
ag->inst_len++;
|
|
gzAppendInstruction(gz, idx);
|
|
return idx + ZIR_REF_START_INDEX;
|
|
}
|
|
|
|
// Mirrors GenZir.addExtendedNodeSmall (AstGen.zig:12479-12499).
|
|
// Creates an extended instruction with operand = node offset and custom small.
|
|
static uint32_t addNodeExtendedSmall(
|
|
GenZir* gz, uint16_t opcode, uint32_t src_node, uint16_t small) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
ensureInstCapacity(ag, 1);
|
|
uint32_t idx = ag->inst_len;
|
|
ag->inst_tags[idx] = ZIR_INST_EXTENDED;
|
|
ZirInstData data;
|
|
data.extended.opcode = opcode;
|
|
data.extended.small = small;
|
|
data.extended.operand
|
|
= (uint32_t)((int32_t)src_node - (int32_t)gz->decl_node_index);
|
|
ag->inst_datas[idx] = data;
|
|
ag->inst_len++;
|
|
gzAppendInstruction(gz, idx);
|
|
return idx + ZIR_REF_START_INDEX;
|
|
}
|
|
|
|
// Mirrors GenZir.addExtendedPayload (AstGen.zig:12781).
|
|
// Creates an extended instruction with given payload_index and small=0.
|
|
static uint32_t addExtendedPayload(
|
|
GenZir* gz, uint16_t opcode, uint32_t payload_index) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
ensureInstCapacity(ag, 1);
|
|
uint32_t idx = ag->inst_len;
|
|
ag->inst_tags[idx] = ZIR_INST_EXTENDED;
|
|
ZirInstData data;
|
|
data.extended.opcode = opcode;
|
|
data.extended.small = 0;
|
|
data.extended.operand = payload_index;
|
|
ag->inst_datas[idx] = data;
|
|
ag->inst_len++;
|
|
gzAppendInstruction(gz, idx);
|
|
return idx + ZIR_REF_START_INDEX;
|
|
}
|
|
|
|
// Mirrors GenZir.addExtendedPayloadSmall (variant with small field).
|
|
// Creates an extended instruction with given payload_index and small value.
|
|
static uint32_t addExtendedPayloadSmall(
|
|
GenZir* gz, uint16_t opcode, uint16_t small, uint32_t payload_index) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
ensureInstCapacity(ag, 1);
|
|
uint32_t idx = ag->inst_len;
|
|
ag->inst_tags[idx] = ZIR_INST_EXTENDED;
|
|
ZirInstData data;
|
|
data.extended.opcode = opcode;
|
|
data.extended.small = small;
|
|
data.extended.operand = payload_index;
|
|
ag->inst_datas[idx] = data;
|
|
ag->inst_len++;
|
|
gzAppendInstruction(gz, idx);
|
|
return idx + ZIR_REF_START_INDEX;
|
|
}
|
|
|
|
// --- Source cursor (AstGen.zig:13335-13359) ---
|
|
|
|
// Mirrors AstGen.advanceSourceCursor (AstGen.zig:13342).
|
|
static void advanceSourceCursor(AstGenCtx* ag, uint32_t end) {
|
|
const char* source = ag->tree->source;
|
|
uint32_t i = ag->source_offset;
|
|
uint32_t line = ag->source_line;
|
|
uint32_t column = ag->source_column;
|
|
assert(i <= end);
|
|
while (i < end) {
|
|
if (source[i] == '\n') {
|
|
line++;
|
|
column = 0;
|
|
} else {
|
|
column++;
|
|
}
|
|
i++;
|
|
}
|
|
ag->source_offset = i;
|
|
ag->source_line = line;
|
|
ag->source_column = column;
|
|
}
|
|
|
|
// Mirrors tree.firstToken (Ast.zig:596).
|
|
// Recurse through nodes to find the first token.
|
|
static uint32_t firstToken(const Ast* tree, uint32_t node) {
|
|
uint32_t end_offset = 0;
|
|
uint32_t n = node;
|
|
while (1) {
|
|
AstNodeTag tag = tree->nodes.tags[n];
|
|
switch (tag) {
|
|
case AST_NODE_ROOT:
|
|
return 0;
|
|
|
|
// Return main_token directly (Ast.zig:602-643).
|
|
case AST_NODE_TEST_DECL:
|
|
case AST_NODE_ERRDEFER:
|
|
case AST_NODE_DEFER:
|
|
case AST_NODE_BOOL_NOT:
|
|
case AST_NODE_NEGATION:
|
|
case AST_NODE_BIT_NOT:
|
|
case AST_NODE_NEGATION_WRAP:
|
|
case AST_NODE_ADDRESS_OF:
|
|
case AST_NODE_TRY:
|
|
case AST_NODE_AWAIT:
|
|
case AST_NODE_OPTIONAL_TYPE:
|
|
case AST_NODE_SWITCH:
|
|
case AST_NODE_SWITCH_COMMA:
|
|
case AST_NODE_IF_SIMPLE:
|
|
case AST_NODE_IF:
|
|
case AST_NODE_SUSPEND:
|
|
case AST_NODE_RESUME:
|
|
case AST_NODE_CONTINUE:
|
|
case AST_NODE_BREAK:
|
|
case AST_NODE_RETURN:
|
|
case AST_NODE_ANYFRAME_TYPE:
|
|
case AST_NODE_IDENTIFIER:
|
|
case AST_NODE_ANYFRAME_LITERAL:
|
|
case AST_NODE_CHAR_LITERAL:
|
|
case AST_NODE_NUMBER_LITERAL:
|
|
case AST_NODE_UNREACHABLE_LITERAL:
|
|
case AST_NODE_STRING_LITERAL:
|
|
case AST_NODE_MULTILINE_STRING_LITERAL:
|
|
case AST_NODE_GROUPED_EXPRESSION:
|
|
case AST_NODE_BUILTIN_CALL_TWO:
|
|
case AST_NODE_BUILTIN_CALL_TWO_COMMA:
|
|
case AST_NODE_BUILTIN_CALL:
|
|
case AST_NODE_BUILTIN_CALL_COMMA:
|
|
case AST_NODE_ERROR_SET_DECL:
|
|
case AST_NODE_COMPTIME:
|
|
case AST_NODE_NOSUSPEND:
|
|
case AST_NODE_ASM_SIMPLE:
|
|
case AST_NODE_ASM:
|
|
case AST_NODE_ARRAY_TYPE:
|
|
case AST_NODE_ARRAY_TYPE_SENTINEL:
|
|
case AST_NODE_ERROR_VALUE:
|
|
case AST_NODE_PTR_TYPE_ALIGNED:
|
|
case AST_NODE_PTR_TYPE_SENTINEL:
|
|
case AST_NODE_PTR_TYPE:
|
|
case AST_NODE_PTR_TYPE_BIT_RANGE:
|
|
return tree->nodes.main_tokens[n] - end_offset;
|
|
|
|
// Return main_token - 1: dot-prefixed inits and enum_literal
|
|
// (Ast.zig:645-654).
|
|
case AST_NODE_ARRAY_INIT_DOT:
|
|
case AST_NODE_ARRAY_INIT_DOT_COMMA:
|
|
case AST_NODE_ARRAY_INIT_DOT_TWO:
|
|
case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA:
|
|
case AST_NODE_STRUCT_INIT_DOT:
|
|
case AST_NODE_STRUCT_INIT_DOT_COMMA:
|
|
case AST_NODE_STRUCT_INIT_DOT_TWO:
|
|
case AST_NODE_STRUCT_INIT_DOT_TWO_COMMA:
|
|
case AST_NODE_ENUM_LITERAL:
|
|
return tree->nodes.main_tokens[n] - 1 - end_offset;
|
|
|
|
// Recurse into LHS: all binary ops and compound expressions
|
|
// (Ast.zig:656-733).
|
|
case AST_NODE_CATCH:
|
|
case AST_NODE_EQUAL_EQUAL:
|
|
case AST_NODE_BANG_EQUAL:
|
|
case AST_NODE_LESS_THAN:
|
|
case AST_NODE_GREATER_THAN:
|
|
case AST_NODE_LESS_OR_EQUAL:
|
|
case AST_NODE_GREATER_OR_EQUAL:
|
|
case AST_NODE_ASSIGN_MUL:
|
|
case AST_NODE_ASSIGN_DIV:
|
|
case AST_NODE_ASSIGN_MOD:
|
|
case AST_NODE_ASSIGN_ADD:
|
|
case AST_NODE_ASSIGN_SUB:
|
|
case AST_NODE_ASSIGN_SHL:
|
|
case AST_NODE_ASSIGN_SHL_SAT:
|
|
case AST_NODE_ASSIGN_SHR:
|
|
case AST_NODE_ASSIGN_BIT_AND:
|
|
case AST_NODE_ASSIGN_BIT_XOR:
|
|
case AST_NODE_ASSIGN_BIT_OR:
|
|
case AST_NODE_ASSIGN_MUL_WRAP:
|
|
case AST_NODE_ASSIGN_ADD_WRAP:
|
|
case AST_NODE_ASSIGN_SUB_WRAP:
|
|
case AST_NODE_ASSIGN_MUL_SAT:
|
|
case AST_NODE_ASSIGN_ADD_SAT:
|
|
case AST_NODE_ASSIGN_SUB_SAT:
|
|
case AST_NODE_ASSIGN:
|
|
case AST_NODE_MERGE_ERROR_SETS:
|
|
case AST_NODE_MUL:
|
|
case AST_NODE_DIV:
|
|
case AST_NODE_MOD:
|
|
case AST_NODE_ARRAY_MULT:
|
|
case AST_NODE_MUL_WRAP:
|
|
case AST_NODE_MUL_SAT:
|
|
case AST_NODE_ADD:
|
|
case AST_NODE_SUB:
|
|
case AST_NODE_ARRAY_CAT:
|
|
case AST_NODE_ADD_WRAP:
|
|
case AST_NODE_SUB_WRAP:
|
|
case AST_NODE_ADD_SAT:
|
|
case AST_NODE_SUB_SAT:
|
|
case AST_NODE_SHL:
|
|
case AST_NODE_SHL_SAT:
|
|
case AST_NODE_SHR:
|
|
case AST_NODE_BIT_AND:
|
|
case AST_NODE_BIT_XOR:
|
|
case AST_NODE_BIT_OR:
|
|
case AST_NODE_ORELSE:
|
|
case AST_NODE_BOOL_AND:
|
|
case AST_NODE_BOOL_OR:
|
|
case AST_NODE_SLICE_OPEN:
|
|
case AST_NODE_ARRAY_ACCESS:
|
|
case AST_NODE_ARRAY_INIT_ONE:
|
|
case AST_NODE_ARRAY_INIT_ONE_COMMA:
|
|
case AST_NODE_SWITCH_RANGE:
|
|
case AST_NODE_ERROR_UNION:
|
|
case AST_NODE_FOR_RANGE:
|
|
case AST_NODE_CALL_ONE:
|
|
case AST_NODE_CALL_ONE_COMMA:
|
|
case AST_NODE_STRUCT_INIT_ONE:
|
|
case AST_NODE_STRUCT_INIT_ONE_COMMA:
|
|
case AST_NODE_CALL:
|
|
case AST_NODE_CALL_COMMA:
|
|
case AST_NODE_STRUCT_INIT:
|
|
case AST_NODE_STRUCT_INIT_COMMA:
|
|
case AST_NODE_SLICE:
|
|
case AST_NODE_SLICE_SENTINEL:
|
|
case AST_NODE_ARRAY_INIT:
|
|
case AST_NODE_ARRAY_INIT_COMMA:
|
|
case AST_NODE_FIELD_ACCESS:
|
|
case AST_NODE_UNWRAP_OPTIONAL:
|
|
case AST_NODE_DEREF:
|
|
case AST_NODE_ASYNC_CALL_ONE:
|
|
case AST_NODE_ASYNC_CALL_ONE_COMMA:
|
|
case AST_NODE_ASYNC_CALL:
|
|
case AST_NODE_ASYNC_CALL_COMMA:
|
|
n = tree->nodes.datas[n].lhs;
|
|
continue;
|
|
|
|
// Var decls: scan backwards for modifiers (Ast.zig:771-792).
|
|
case AST_NODE_GLOBAL_VAR_DECL:
|
|
case AST_NODE_LOCAL_VAR_DECL:
|
|
case AST_NODE_SIMPLE_VAR_DECL:
|
|
case AST_NODE_ALIGNED_VAR_DECL: {
|
|
uint32_t mt = tree->nodes.main_tokens[n];
|
|
uint32_t i = mt;
|
|
while (i > 0) {
|
|
TokenizerTag tt = tree->tokens.tags[i - 1];
|
|
if (tt == TOKEN_KEYWORD_EXTERN || tt == TOKEN_KEYWORD_EXPORT
|
|
|| tt == TOKEN_KEYWORD_PUB
|
|
|| tt == TOKEN_KEYWORD_THREADLOCAL
|
|
|| tt == TOKEN_KEYWORD_COMPTIME
|
|
|| tt == TOKEN_STRING_LITERAL) {
|
|
i--;
|
|
} else {
|
|
break;
|
|
}
|
|
}
|
|
return i - end_offset;
|
|
}
|
|
// Fn decls: scan backwards for modifiers (Ast.zig:737-759).
|
|
case AST_NODE_FN_DECL:
|
|
case AST_NODE_FN_PROTO_SIMPLE:
|
|
case AST_NODE_FN_PROTO_MULTI:
|
|
case AST_NODE_FN_PROTO_ONE:
|
|
case AST_NODE_FN_PROTO: {
|
|
uint32_t mt = tree->nodes.main_tokens[n];
|
|
uint32_t i = mt;
|
|
while (i > 0) {
|
|
TokenizerTag tt = tree->tokens.tags[i - 1];
|
|
if (tt == TOKEN_KEYWORD_EXTERN || tt == TOKEN_KEYWORD_EXPORT
|
|
|| tt == TOKEN_KEYWORD_PUB || tt == TOKEN_KEYWORD_INLINE
|
|
|| tt == TOKEN_KEYWORD_NOINLINE
|
|
|| tt == TOKEN_STRING_LITERAL) {
|
|
i--;
|
|
} else {
|
|
break;
|
|
}
|
|
}
|
|
return i - end_offset;
|
|
}
|
|
// Container fields: check for preceding comptime (Ast.zig:761-769).
|
|
case AST_NODE_CONTAINER_FIELD_INIT:
|
|
case AST_NODE_CONTAINER_FIELD_ALIGN:
|
|
case AST_NODE_CONTAINER_FIELD: {
|
|
uint32_t mt = tree->nodes.main_tokens[n];
|
|
if (mt > 0 && tree->tokens.tags[mt - 1] == TOKEN_KEYWORD_COMPTIME)
|
|
end_offset++;
|
|
return mt - end_offset;
|
|
}
|
|
// Blocks: check for label (Ast.zig:794-805).
|
|
case AST_NODE_BLOCK:
|
|
case AST_NODE_BLOCK_SEMICOLON:
|
|
case AST_NODE_BLOCK_TWO:
|
|
case AST_NODE_BLOCK_TWO_SEMICOLON: {
|
|
uint32_t lbrace = tree->nodes.main_tokens[n];
|
|
if (lbrace >= 2 && tree->tokens.tags[lbrace - 1] == TOKEN_COLON
|
|
&& tree->tokens.tags[lbrace - 2] == TOKEN_IDENTIFIER)
|
|
end_offset += 2;
|
|
return lbrace - end_offset;
|
|
}
|
|
// Container decls: check for packed/extern (Ast.zig:807-826).
|
|
case AST_NODE_CONTAINER_DECL:
|
|
case AST_NODE_CONTAINER_DECL_TRAILING:
|
|
case AST_NODE_CONTAINER_DECL_TWO:
|
|
case AST_NODE_CONTAINER_DECL_TWO_TRAILING:
|
|
case AST_NODE_CONTAINER_DECL_ARG:
|
|
case AST_NODE_CONTAINER_DECL_ARG_TRAILING:
|
|
case AST_NODE_TAGGED_UNION:
|
|
case AST_NODE_TAGGED_UNION_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_TWO:
|
|
case AST_NODE_TAGGED_UNION_TWO_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG:
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING: {
|
|
uint32_t mt = tree->nodes.main_tokens[n];
|
|
if (mt > 0) {
|
|
TokenizerTag prev = tree->tokens.tags[mt - 1];
|
|
if (prev == TOKEN_KEYWORD_PACKED
|
|
|| prev == TOKEN_KEYWORD_EXTERN)
|
|
end_offset++;
|
|
}
|
|
return mt - end_offset;
|
|
}
|
|
|
|
// Switch cases: check for inline/else/values (Ast.zig:834-847).
|
|
case AST_NODE_SWITCH_CASE_ONE:
|
|
if (tree->nodes.datas[n].lhs == 0)
|
|
return tree->nodes.main_tokens[n] - 1 - end_offset;
|
|
n = tree->nodes.datas[n].lhs;
|
|
continue;
|
|
case AST_NODE_SWITCH_CASE_INLINE_ONE:
|
|
if (tree->nodes.datas[n].lhs == 0)
|
|
return tree->nodes.main_tokens[n] - 2;
|
|
end_offset += 1;
|
|
n = tree->nodes.datas[n].lhs;
|
|
continue;
|
|
case AST_NODE_SWITCH_CASE: {
|
|
uint32_t extra_idx = tree->nodes.datas[n].lhs;
|
|
uint32_t items_start = tree->extra_data.arr[extra_idx];
|
|
uint32_t items_end = tree->extra_data.arr[extra_idx + 1];
|
|
if (items_start == items_end)
|
|
return tree->nodes.main_tokens[n] - 1 - end_offset;
|
|
n = tree->extra_data.arr[items_start];
|
|
continue;
|
|
}
|
|
case AST_NODE_SWITCH_CASE_INLINE: {
|
|
uint32_t extra_idx = tree->nodes.datas[n].lhs;
|
|
uint32_t items_start = tree->extra_data.arr[extra_idx];
|
|
uint32_t items_end = tree->extra_data.arr[extra_idx + 1];
|
|
if (items_start == items_end)
|
|
return tree->nodes.main_tokens[n] - 2;
|
|
end_offset += 1;
|
|
n = tree->extra_data.arr[items_start];
|
|
continue;
|
|
}
|
|
|
|
// Asm output/input: first token is '[' before main_token
|
|
// (Ast.zig:849-852).
|
|
case AST_NODE_ASM_OUTPUT:
|
|
case AST_NODE_ASM_INPUT:
|
|
return tree->nodes.main_tokens[n] - 1 - end_offset;
|
|
|
|
// While/for: check for inline and label (Ast.zig:854-870).
|
|
case AST_NODE_WHILE_SIMPLE:
|
|
case AST_NODE_WHILE_CONT:
|
|
case AST_NODE_WHILE:
|
|
case AST_NODE_FOR_SIMPLE:
|
|
case AST_NODE_FOR: {
|
|
uint32_t result = tree->nodes.main_tokens[n];
|
|
if (result > 0
|
|
&& tree->tokens.tags[result - 1] == TOKEN_KEYWORD_INLINE)
|
|
result--;
|
|
if (result >= 2 && tree->tokens.tags[result - 1] == TOKEN_COLON
|
|
&& tree->tokens.tags[result - 2] == TOKEN_IDENTIFIER)
|
|
result -= 2;
|
|
return result - end_offset;
|
|
}
|
|
|
|
// Assign destructure: recurse into first variable
|
|
// (Ast.zig:735).
|
|
case AST_NODE_ASSIGN_DESTRUCTURE: {
|
|
uint32_t extra_start = tree->nodes.datas[n].lhs;
|
|
// extra_data[extra_start] = variable_count
|
|
// extra_data[extra_start + 1 .. +1+count] = variables
|
|
n = tree->extra_data.arr[extra_start + 1];
|
|
continue;
|
|
}
|
|
|
|
// Fallback for any remaining node types.
|
|
default:
|
|
return tree->nodes.main_tokens[n] - end_offset;
|
|
}
|
|
}
|
|
}
|
|
|
|
// Mirrors AstGen.advanceSourceCursorToNode (AstGen.zig:13335).
|
|
static void advanceSourceCursorToNode(AstGenCtx* ag, uint32_t node) {
|
|
uint32_t ft = firstToken(ag->tree, node);
|
|
uint32_t token_start = ag->tree->tokens.starts[ft];
|
|
(void)0; // cursor backward check disabled temporarily
|
|
advanceSourceCursor(ag, token_start);
|
|
}
|
|
|
|
// Mirrors maybeAdvanceSourceCursorToMainToken (AstGen.zig:13324).
|
|
// Skips advancing when in comptime scope (matching upstream behavior).
|
|
static void advanceSourceCursorToMainToken(
|
|
AstGenCtx* ag, const GenZir* gz, uint32_t node) {
|
|
if (gz->is_comptime)
|
|
return;
|
|
uint32_t main_tok = ag->tree->nodes.main_tokens[node];
|
|
uint32_t token_start = ag->tree->tokens.starts[main_tok];
|
|
advanceSourceCursor(ag, token_start);
|
|
}
|
|
|
|
// --- Token helpers ---
|
|
|
|
// Mirrors GenZir.tokenIndexToRelative (AstGen.zig:11897).
|
|
// Returns destination - base as i32.
|
|
static int32_t tokenIndexToRelative(const GenZir* gz, uint32_t token) {
|
|
uint32_t base = firstToken(gz->astgen->tree, gz->decl_node_index);
|
|
return (int32_t)token - (int32_t)base;
|
|
}
|
|
|
|
// --- String bytes helpers ---
|
|
|
|
// Search for an existing null-terminated string in string_bytes.
|
|
// Returns the index if found, or UINT32_MAX if not found.
|
|
// Mirrors string_table dedup (AstGen.zig:11564).
|
|
// Find a string in string_table (registered strings only).
|
|
// Mirrors AstGen.string_table hash table lookup.
|
|
static uint32_t findExistingString(
|
|
const AstGenCtx* ag, const char* str, uint32_t len) {
|
|
for (uint32_t k = 0; k < ag->string_table_len; k++) {
|
|
uint32_t pos = ag->string_table[k];
|
|
// Compare: string at pos is null-terminated in string_bytes.
|
|
const char* existing = (const char*)ag->string_bytes + pos;
|
|
uint32_t existing_len = (uint32_t)strlen(existing);
|
|
if (existing_len == len && memcmp(existing, str, len) == 0) {
|
|
return pos;
|
|
}
|
|
}
|
|
return UINT32_MAX;
|
|
}
|
|
|
|
// Register a string position in the string table for deduplication.
|
|
static void registerString(AstGenCtx* ag, uint32_t pos) {
|
|
if (ag->string_table_len >= ag->string_table_cap) {
|
|
uint32_t new_cap = ag->string_table_cap * 2;
|
|
if (new_cap < 64)
|
|
new_cap = 64;
|
|
uint32_t* p = realloc(ag->string_table, new_cap * sizeof(uint32_t));
|
|
if (!p)
|
|
exit(1);
|
|
ag->string_table = p;
|
|
ag->string_table_cap = new_cap;
|
|
}
|
|
ag->string_table[ag->string_table_len++] = pos;
|
|
}
|
|
|
|
// Mirrors AstGen.tokenIdentEql (AstGen.zig:6148-6152).
|
|
// Compares two identifier tokens by source text without touching string_bytes.
|
|
static bool tokenIdentEql(const Ast* tree, uint32_t tok1, uint32_t tok2) {
|
|
uint32_t s1 = tree->tokens.starts[tok1];
|
|
uint32_t s2 = tree->tokens.starts[tok2];
|
|
uint32_t e1 = tree->tokens.starts[tok1 + 1];
|
|
uint32_t e2 = tree->tokens.starts[tok2 + 1];
|
|
// Token length includes trailing whitespace in starts delta, but for
|
|
// identifiers the actual content is a contiguous alphanumeric/underscore
|
|
// run. Compute actual identifier lengths.
|
|
uint32_t len1 = 0;
|
|
while (s1 + len1 < e1) {
|
|
char c = tree->source[s1 + len1];
|
|
if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')
|
|
|| (c >= '0' && c <= '9') || c == '_'))
|
|
break;
|
|
len1++;
|
|
}
|
|
uint32_t len2 = 0;
|
|
while (s2 + len2 < e2) {
|
|
char c = tree->source[s2 + len2];
|
|
if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')
|
|
|| (c >= '0' && c <= '9') || c == '_'))
|
|
break;
|
|
len2++;
|
|
}
|
|
return len1 == len2
|
|
&& memcmp(tree->source + s1, tree->source + s2, len1) == 0;
|
|
}
|
|
|
|
// Forward declaration for strLitAsString (used by identAsString for @"..."
|
|
// quoted identifiers with escapes).
|
|
static void strLitAsString(AstGenCtx* ag, uint32_t str_lit_token,
|
|
uint32_t* out_index, uint32_t* out_len);
|
|
|
|
// Mirrors AstGen.identAsString (AstGen.zig:11530).
|
|
// Handles both bare identifiers and @"..." quoted identifiers.
|
|
static uint32_t identAsString(AstGenCtx* ag, uint32_t ident_token) {
|
|
uint32_t start = ag->tree->tokens.starts[ident_token];
|
|
const char* source = ag->tree->source;
|
|
|
|
if (source[start] == '@' && start + 1 < ag->tree->source_len
|
|
&& source[start + 1] == '"') {
|
|
// Quoted identifier: @"name" (AstGen.zig:11297-11308).
|
|
// Extract content between quotes, handling escapes.
|
|
uint32_t si, sl;
|
|
// str_lit_token refers to the same token, content starts after @"
|
|
// We reuse strLitAsString but offset by 1 to skip '@'.
|
|
// Actually, strLitAsString expects a token whose source starts
|
|
// with '"'. The @"..." token starts with '@'. We need to handle
|
|
// the offset manually.
|
|
uint32_t content_start = start + 2; // skip @"
|
|
uint32_t content_end = content_start;
|
|
while (
|
|
content_end < ag->tree->source_len && source[content_end] != '"')
|
|
content_end++;
|
|
// Check for escapes.
|
|
bool has_escapes = false;
|
|
for (uint32_t j = content_start; j < content_end; j++) {
|
|
if (source[j] == '\\') {
|
|
has_escapes = true;
|
|
break;
|
|
}
|
|
}
|
|
|
|
if (!has_escapes) {
|
|
uint32_t content_len = content_end - content_start;
|
|
uint32_t existing
|
|
= findExistingString(ag, source + content_start, content_len);
|
|
if (existing != UINT32_MAX)
|
|
return existing;
|
|
uint32_t str_index = ag->string_bytes_len;
|
|
ensureStringBytesCapacity(ag, content_len + 1);
|
|
memcpy(ag->string_bytes + ag->string_bytes_len,
|
|
source + content_start, content_len);
|
|
ag->string_bytes_len += content_len;
|
|
ag->string_bytes[ag->string_bytes_len++] = 0;
|
|
registerString(ag, str_index);
|
|
return str_index;
|
|
}
|
|
|
|
// With escapes: use strLitAsString-like decoding.
|
|
strLitAsString(ag, ident_token, &si, &sl);
|
|
return si;
|
|
}
|
|
|
|
// Bare identifier: scan alphanumeric + underscore.
|
|
uint32_t end = start;
|
|
while (end < ag->tree->source_len) {
|
|
char ch = source[end];
|
|
if ((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z')
|
|
|| (ch >= '0' && ch <= '9') || ch == '_') {
|
|
end++;
|
|
} else {
|
|
break;
|
|
}
|
|
}
|
|
uint32_t ident_len = end - start;
|
|
|
|
// Check for existing string (dedup).
|
|
uint32_t existing = findExistingString(ag, source + start, ident_len);
|
|
if (existing != UINT32_MAX)
|
|
return existing;
|
|
|
|
uint32_t str_index = ag->string_bytes_len;
|
|
ensureStringBytesCapacity(ag, ident_len + 1);
|
|
memcpy(ag->string_bytes + ag->string_bytes_len, source + start, ident_len);
|
|
ag->string_bytes_len += ident_len;
|
|
ag->string_bytes[ag->string_bytes_len++] = 0;
|
|
registerString(ag, str_index);
|
|
return str_index;
|
|
}
|
|
|
|
// Mirrors AstGen.strLitAsString (AstGen.zig:11553).
|
|
// Decodes string literal, checks for embedded nulls.
|
|
// If embedded null found: store raw bytes without trailing null, no dedup.
|
|
// Otherwise: dedup via string_table, add trailing null.
|
|
static void strLitAsString(AstGenCtx* ag, uint32_t str_lit_token,
|
|
uint32_t* out_index, uint32_t* out_len) {
|
|
uint32_t tok_start = ag->tree->tokens.starts[str_lit_token];
|
|
const char* source = ag->tree->source;
|
|
|
|
// Skip opening quote.
|
|
uint32_t i = tok_start + 1;
|
|
// Find closing quote, skipping escaped characters.
|
|
uint32_t raw_end = i;
|
|
while (raw_end < ag->tree->source_len) {
|
|
if (source[raw_end] == '\\') {
|
|
raw_end += 2; // skip escape + escaped char
|
|
} else if (source[raw_end] == '"') {
|
|
break;
|
|
} else {
|
|
raw_end++;
|
|
}
|
|
}
|
|
|
|
// Check if there are any escape sequences.
|
|
bool has_escapes = false;
|
|
for (uint32_t j = i; j < raw_end; j++) {
|
|
if (source[j] == '\\') {
|
|
has_escapes = true;
|
|
break;
|
|
}
|
|
}
|
|
|
|
if (!has_escapes) {
|
|
// Fast path: no escapes, no embedded nulls possible.
|
|
uint32_t content_len = raw_end - i;
|
|
uint32_t existing = findExistingString(ag, source + i, content_len);
|
|
if (existing != UINT32_MAX) {
|
|
*out_index = existing;
|
|
*out_len = content_len;
|
|
return;
|
|
}
|
|
uint32_t str_index = ag->string_bytes_len;
|
|
ensureStringBytesCapacity(ag, content_len + 1);
|
|
memcpy(
|
|
ag->string_bytes + ag->string_bytes_len, source + i, content_len);
|
|
ag->string_bytes_len += content_len;
|
|
ag->string_bytes[ag->string_bytes_len++] = 0;
|
|
registerString(ag, str_index);
|
|
*out_index = str_index;
|
|
*out_len = content_len;
|
|
return;
|
|
}
|
|
|
|
// Slow path: process escape sequences (AstGen.zig:11558).
|
|
// Decode directly into string_bytes (like upstream).
|
|
uint32_t str_index = ag->string_bytes_len;
|
|
uint32_t max_len = raw_end - i;
|
|
ensureStringBytesCapacity(ag, max_len + 1);
|
|
while (i < raw_end) {
|
|
if (source[i] == '\\') {
|
|
i++;
|
|
if (i >= raw_end)
|
|
break;
|
|
switch (source[i]) {
|
|
case 'n':
|
|
ag->string_bytes[ag->string_bytes_len++] = '\n';
|
|
break;
|
|
case 'r':
|
|
ag->string_bytes[ag->string_bytes_len++] = '\r';
|
|
break;
|
|
case 't':
|
|
ag->string_bytes[ag->string_bytes_len++] = '\t';
|
|
break;
|
|
case '\\':
|
|
ag->string_bytes[ag->string_bytes_len++] = '\\';
|
|
break;
|
|
case '\'':
|
|
ag->string_bytes[ag->string_bytes_len++] = '\'';
|
|
break;
|
|
case '"':
|
|
ag->string_bytes[ag->string_bytes_len++] = '"';
|
|
break;
|
|
case 'x': {
|
|
// \xNN hex escape.
|
|
uint8_t val = 0;
|
|
for (int k = 0; k < 2 && i + 1 < raw_end; k++) {
|
|
i++;
|
|
char c = source[i];
|
|
if (c >= '0' && c <= '9')
|
|
val = (uint8_t)(val * 16 + (uint8_t)(c - '0'));
|
|
else if (c >= 'a' && c <= 'f')
|
|
val = (uint8_t)(val * 16 + 10 + (uint8_t)(c - 'a'));
|
|
else if (c >= 'A' && c <= 'F')
|
|
val = (uint8_t)(val * 16 + 10 + (uint8_t)(c - 'A'));
|
|
}
|
|
ag->string_bytes[ag->string_bytes_len++] = val;
|
|
break;
|
|
}
|
|
case 'u': {
|
|
// \u{NNNNNN} unicode escape (string_literal.zig:194-231).
|
|
// Skip past '{'.
|
|
i++;
|
|
// Parse hex digits until '}'.
|
|
uint32_t codepoint = 0;
|
|
while (i + 1 < raw_end) {
|
|
i++;
|
|
char c = source[i];
|
|
if (c >= '0' && c <= '9') {
|
|
codepoint = codepoint * 16 + (uint32_t)(c - '0');
|
|
} else if (c >= 'a' && c <= 'f') {
|
|
codepoint = codepoint * 16 + 10 + (uint32_t)(c - 'a');
|
|
} else if (c >= 'A' && c <= 'F') {
|
|
codepoint = codepoint * 16 + 10 + (uint32_t)(c - 'A');
|
|
} else {
|
|
// Must be '}', done.
|
|
break;
|
|
}
|
|
}
|
|
// Encode codepoint as UTF-8 (unicode.zig:53-82).
|
|
if (codepoint <= 0x7F) {
|
|
ag->string_bytes[ag->string_bytes_len++]
|
|
= (uint8_t)codepoint;
|
|
} else if (codepoint <= 0x7FF) {
|
|
ag->string_bytes[ag->string_bytes_len++]
|
|
= (uint8_t)(0xC0 | (codepoint >> 6));
|
|
ag->string_bytes[ag->string_bytes_len++]
|
|
= (uint8_t)(0x80 | (codepoint & 0x3F));
|
|
} else if (codepoint <= 0xFFFF) {
|
|
ag->string_bytes[ag->string_bytes_len++]
|
|
= (uint8_t)(0xE0 | (codepoint >> 12));
|
|
ag->string_bytes[ag->string_bytes_len++]
|
|
= (uint8_t)(0x80 | ((codepoint >> 6) & 0x3F));
|
|
ag->string_bytes[ag->string_bytes_len++]
|
|
= (uint8_t)(0x80 | (codepoint & 0x3F));
|
|
} else {
|
|
ag->string_bytes[ag->string_bytes_len++]
|
|
= (uint8_t)(0xF0 | (codepoint >> 18));
|
|
ag->string_bytes[ag->string_bytes_len++]
|
|
= (uint8_t)(0x80 | ((codepoint >> 12) & 0x3F));
|
|
ag->string_bytes[ag->string_bytes_len++]
|
|
= (uint8_t)(0x80 | ((codepoint >> 6) & 0x3F));
|
|
ag->string_bytes[ag->string_bytes_len++]
|
|
= (uint8_t)(0x80 | (codepoint & 0x3F));
|
|
}
|
|
break;
|
|
}
|
|
default:
|
|
ag->string_bytes[ag->string_bytes_len++] = (uint8_t)source[i];
|
|
break;
|
|
}
|
|
} else {
|
|
ag->string_bytes[ag->string_bytes_len++] = (uint8_t)source[i];
|
|
}
|
|
i++;
|
|
}
|
|
uint32_t decoded_len = ag->string_bytes_len - str_index;
|
|
uint8_t* key = ag->string_bytes + str_index;
|
|
|
|
// Check for embedded null bytes (AstGen.zig:11560).
|
|
// If found, skip dedup and don't add trailing null.
|
|
bool has_embedded_null = false;
|
|
for (uint32_t j = 0; j < decoded_len; j++) {
|
|
if (key[j] == 0) {
|
|
has_embedded_null = true;
|
|
break;
|
|
}
|
|
}
|
|
if (has_embedded_null) {
|
|
*out_index = str_index;
|
|
*out_len = decoded_len;
|
|
return;
|
|
}
|
|
|
|
// Dedup against string_table (AstGen.zig:11564-11585).
|
|
uint32_t existing = findExistingString(ag, (const char*)key, decoded_len);
|
|
if (existing != UINT32_MAX) {
|
|
// Shrink back (AstGen.zig:11570).
|
|
ag->string_bytes_len = str_index;
|
|
*out_index = existing;
|
|
*out_len = decoded_len;
|
|
return;
|
|
}
|
|
|
|
// New entry: add trailing null and register.
|
|
ensureStringBytesCapacity(ag, 1);
|
|
ag->string_bytes[ag->string_bytes_len++] = 0;
|
|
registerString(ag, str_index);
|
|
*out_index = str_index;
|
|
*out_len = decoded_len;
|
|
}
|
|
|
|
// --- Declaration helpers ---
|
|
|
|
// Mirrors GenZir.makeDeclaration (AstGen.zig:12906).
|
|
static uint32_t makeDeclaration(AstGenCtx* ag, uint32_t node) {
|
|
ensureInstCapacity(ag, 1);
|
|
uint32_t idx = ag->inst_len;
|
|
ag->inst_tags[idx] = ZIR_INST_DECLARATION;
|
|
ZirInstData data;
|
|
memset(&data, 0, sizeof(data));
|
|
data.declaration.src_node = node;
|
|
// payload_index is set later by setDeclaration.
|
|
ag->inst_datas[idx] = data;
|
|
ag->inst_len++;
|
|
return idx;
|
|
}
|
|
|
|
// Mirrors GenZir.makeBreakCommon (AstGen.zig:12667).
|
|
// Creates a break_inline instruction with a Break payload in extra.
|
|
// Records the instruction in the GenZir body.
|
|
static uint32_t makeBreakInline(GenZir* gz, uint32_t block_inst,
|
|
uint32_t operand, int32_t operand_src_node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
ensureInstCapacity(ag, 1);
|
|
ensureExtraCapacity(ag, 2);
|
|
|
|
// Write Zir.Inst.Break payload to extra (Zir.zig:2489).
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = (uint32_t)operand_src_node;
|
|
ag->extra[ag->extra_len++] = block_inst;
|
|
|
|
uint32_t idx = ag->inst_len;
|
|
ag->inst_tags[idx] = ZIR_INST_BREAK_INLINE;
|
|
ZirInstData data;
|
|
data.break_data.operand = operand;
|
|
data.break_data.payload_index = payload_index;
|
|
ag->inst_datas[idx] = data;
|
|
ag->inst_len++;
|
|
|
|
// Record in sub-block body.
|
|
gzAppendInstruction(gz, idx);
|
|
return idx;
|
|
}
|
|
|
|
// Mirrors GenZir.makeBlockInst (AstGen.zig:12890).
|
|
// Creates a pl_node instruction with payload_index left as 0 (set later).
|
|
// Does NOT append to gz's instruction list.
|
|
// Returns instruction index (not a ref).
|
|
static uint32_t makeBlockInst(
|
|
AstGenCtx* ag, ZirInstTag tag, const GenZir* gz, uint32_t node) {
|
|
ensureInstCapacity(ag, 1);
|
|
uint32_t idx = ag->inst_len;
|
|
ag->inst_tags[idx] = tag;
|
|
ZirInstData data;
|
|
memset(&data, 0, sizeof(data));
|
|
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
data.pl_node.payload_index = 0; // set later
|
|
ag->inst_datas[idx] = data;
|
|
ag->inst_len++;
|
|
return idx;
|
|
}
|
|
|
|
// Mirrors appendPossiblyRefdBodyInst (AstGen.zig:13675-13683).
|
|
// Appends body_inst first, then recursively appends ref_table entry.
|
|
static void appendPossiblyRefdBodyInst(AstGenCtx* ag, uint32_t body_inst) {
|
|
ag->extra[ag->extra_len++] = body_inst;
|
|
uint32_t ref_inst;
|
|
if (refTableFetchRemove(ag, body_inst, &ref_inst)) {
|
|
appendPossiblyRefdBodyInst(ag, ref_inst);
|
|
}
|
|
}
|
|
|
|
// Mirrors appendBodyWithFixupsExtraRefsArrayList (AstGen.zig:13659-13673).
|
|
// First processes extra_refs (e.g. param_insts), prepending their ref_table
|
|
// entries. Then writes body instructions with ref_table fixups.
|
|
static void appendBodyWithFixupsExtraRefs(AstGenCtx* ag, const uint32_t* body,
|
|
uint32_t body_len, const uint32_t* extra_refs, uint32_t extra_refs_len) {
|
|
for (uint32_t i = 0; i < extra_refs_len; i++) {
|
|
uint32_t ref_inst;
|
|
if (refTableFetchRemove(ag, extra_refs[i], &ref_inst)) {
|
|
appendPossiblyRefdBodyInst(ag, ref_inst);
|
|
}
|
|
}
|
|
for (uint32_t i = 0; i < body_len; i++) {
|
|
appendPossiblyRefdBodyInst(ag, body[i]);
|
|
}
|
|
}
|
|
|
|
// Scratch extra capacity helper (for call arg bodies).
|
|
static void ensureScratchExtraCapacity(AstGenCtx* ag, uint32_t additional) {
|
|
uint32_t needed = ag->scratch_extra_len + additional;
|
|
if (needed > ag->scratch_extra_cap) {
|
|
uint32_t new_cap = ag->scratch_extra_cap * 2;
|
|
if (new_cap < needed)
|
|
new_cap = needed;
|
|
if (new_cap < 64)
|
|
new_cap = 64;
|
|
uint32_t* p = realloc(ag->scratch_extra, new_cap * sizeof(uint32_t));
|
|
if (!p)
|
|
exit(1);
|
|
ag->scratch_extra = p;
|
|
ag->scratch_extra_cap = new_cap;
|
|
}
|
|
}
|
|
|
|
// Like appendPossiblyRefdBodyInst but appends to scratch_extra instead of
|
|
// extra.
|
|
static void appendPossiblyRefdBodyInstScratch(
|
|
AstGenCtx* ag, uint32_t body_inst) {
|
|
ag->scratch_extra[ag->scratch_extra_len++] = body_inst;
|
|
uint32_t ref_inst;
|
|
if (refTableFetchRemove(ag, body_inst, &ref_inst)) {
|
|
ensureScratchExtraCapacity(ag, 1);
|
|
appendPossiblyRefdBodyInstScratch(ag, ref_inst);
|
|
}
|
|
}
|
|
|
|
// Mirrors countBodyLenAfterFixupsExtraRefs (AstGen.zig:13694-13711).
|
|
static uint32_t countBodyLenAfterFixupsExtraRefs(AstGenCtx* ag,
|
|
const uint32_t* body, uint32_t body_len, const uint32_t* extra_refs,
|
|
uint32_t extra_refs_len) {
|
|
uint32_t count = body_len;
|
|
for (uint32_t i = 0; i < body_len; i++) {
|
|
uint32_t check_inst = body[i];
|
|
const uint32_t* ref;
|
|
while ((ref = refTableGet(ag, check_inst)) != NULL) {
|
|
count++;
|
|
check_inst = *ref;
|
|
}
|
|
}
|
|
for (uint32_t i = 0; i < extra_refs_len; i++) {
|
|
uint32_t check_inst = extra_refs[i];
|
|
const uint32_t* ref;
|
|
while ((ref = refTableGet(ag, check_inst)) != NULL) {
|
|
count++;
|
|
check_inst = *ref;
|
|
}
|
|
}
|
|
return count;
|
|
}
|
|
|
|
// Mirrors countBodyLenAfterFixups (AstGen.zig:13686-13688).
|
|
static uint32_t countBodyLenAfterFixups(
|
|
AstGenCtx* ag, const uint32_t* body, uint32_t body_len) {
|
|
return countBodyLenAfterFixupsExtraRefs(ag, body, body_len, NULL, 0);
|
|
}
|
|
|
|
// Mirrors fetchRemoveRefEntries (AstGen.zig:14066-14074).
|
|
// For each param_inst, removes its ref_table entry (if present) and saves
|
|
// the value. Returns the number of entries removed.
|
|
static uint32_t fetchRemoveRefEntries(AstGenCtx* ag,
|
|
const uint32_t* param_insts, uint32_t param_insts_len, uint32_t* result,
|
|
uint32_t result_cap) {
|
|
uint32_t count = 0;
|
|
for (uint32_t i = 0; i < param_insts_len; i++) {
|
|
uint32_t ref_inst;
|
|
if (refTableFetchRemove(ag, param_insts[i], &ref_inst)) {
|
|
if (count < result_cap)
|
|
result[count] = ref_inst;
|
|
count++;
|
|
}
|
|
}
|
|
return count;
|
|
}
|
|
|
|
// Mirrors appendBodyWithFixups (AstGen.zig:13649-13657).
|
|
static void appendBodyWithFixups(
|
|
AstGenCtx* ag, const uint32_t* body, uint32_t body_len) {
|
|
for (uint32_t i = 0; i < body_len; i++) {
|
|
appendPossiblyRefdBodyInst(ag, body[i]);
|
|
}
|
|
}
|
|
|
|
// Mirrors GenZir.setBlockBody (AstGen.zig:11949).
|
|
// Writes Block payload (body_len + instruction indices) to extra.
|
|
// Sets the instruction's payload_index. Unstacks gz.
|
|
static void setBlockBody(AstGenCtx* ag, GenZir* gz, uint32_t inst) {
|
|
uint32_t raw_body_len = gzInstructionsLen(gz);
|
|
const uint32_t* body = gzInstructionsSlice(gz);
|
|
uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_body_len);
|
|
ensureExtraCapacity(ag, 1 + body_len);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = body_len;
|
|
for (uint32_t i = 0; i < raw_body_len; i++) {
|
|
appendPossiblyRefdBodyInst(ag, body[i]);
|
|
}
|
|
ag->inst_datas[inst].pl_node.payload_index = payload_index;
|
|
gzUnstack(gz);
|
|
}
|
|
|
|
// Mirrors GenZir.setTryBody (AstGen.zig:11997).
|
|
// Writes Try payload (operand + body_len + instruction indices) to extra.
|
|
// Sets the instruction's payload_index. Unstacks gz.
|
|
static void setTryBody(
|
|
AstGenCtx* ag, GenZir* gz, uint32_t inst, uint32_t operand) {
|
|
uint32_t raw_body_len = gzInstructionsLen(gz);
|
|
const uint32_t* body = gzInstructionsSlice(gz);
|
|
uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_body_len);
|
|
ensureExtraCapacity(ag, 2 + body_len);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = operand; // Try.operand
|
|
ag->extra[ag->extra_len++] = body_len; // Try.body_len
|
|
for (uint32_t i = 0; i < raw_body_len; i++) {
|
|
appendPossiblyRefdBodyInst(ag, body[i]);
|
|
}
|
|
ag->inst_datas[inst].pl_node.payload_index = payload_index;
|
|
gzUnstack(gz);
|
|
}
|
|
|
|
// Mirrors GenZir.setBlockComptimeBody (AstGen.zig:11972).
|
|
// Like setBlockBody but prepends comptime_reason before body_len.
|
|
// Asserts inst is a BLOCK_COMPTIME.
|
|
static void setBlockComptimeBody(
|
|
AstGenCtx* ag, GenZir* gz, uint32_t inst, uint32_t comptime_reason) {
|
|
uint32_t raw_body_len = gzInstructionsLen(gz);
|
|
const uint32_t* body = gzInstructionsSlice(gz);
|
|
uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_body_len);
|
|
ensureExtraCapacity(ag, 2 + body_len);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = comptime_reason;
|
|
ag->extra[ag->extra_len++] = body_len;
|
|
for (uint32_t i = 0; i < raw_body_len; i++) {
|
|
appendPossiblyRefdBodyInst(ag, body[i]);
|
|
}
|
|
ag->inst_datas[inst].pl_node.payload_index = payload_index;
|
|
gzUnstack(gz);
|
|
}
|
|
|
|
// Mirrors GenZir.addBreak (AstGen.zig:12623).
|
|
// Creates a ZIR_INST_BREAK instruction.
|
|
static uint32_t addBreak(GenZir* gz, ZirInstTag tag, uint32_t block_inst,
|
|
uint32_t operand, int32_t operand_src_node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
ensureInstCapacity(ag, 1);
|
|
ensureExtraCapacity(ag, 2);
|
|
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = (uint32_t)operand_src_node;
|
|
ag->extra[ag->extra_len++] = block_inst;
|
|
|
|
uint32_t idx = ag->inst_len;
|
|
ag->inst_tags[idx] = tag;
|
|
ZirInstData data;
|
|
data.break_data.operand = operand;
|
|
data.break_data.payload_index = payload_index;
|
|
ag->inst_datas[idx] = data;
|
|
ag->inst_len++;
|
|
gzAppendInstruction(gz, idx);
|
|
return idx;
|
|
}
|
|
|
|
// Mirrors GenZir.addCondBr (AstGen.zig:12834).
|
|
// Creates condbr instruction placeholder with src_node set.
|
|
// Payload is filled later by setCondBrPayload.
|
|
static uint32_t addCondBr(GenZir* gz, ZirInstTag tag, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
ensureInstCapacity(ag, 1);
|
|
uint32_t idx = ag->inst_len;
|
|
ag->inst_tags[idx] = tag;
|
|
ZirInstData data;
|
|
memset(&data, 0, sizeof(data));
|
|
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
data.pl_node.payload_index = 0; // set later
|
|
ag->inst_datas[idx] = data;
|
|
ag->inst_len++;
|
|
gzAppendInstruction(gz, idx);
|
|
return idx;
|
|
}
|
|
|
|
// Mirrors setCondBrPayload (AstGen.zig:6501).
|
|
// Writes CondBr payload: {condition, then_body_len, else_body_len} then
|
|
// then_body instructions, then else_body instructions. Unstacks both scopes.
|
|
// IMPORTANT: then_gz and else_gz are stacked (else on top of then), so
|
|
// then's instructions must use instructionsSliceUpto(else_gz) to avoid
|
|
// including else_gz's instructions in then's body.
|
|
static void setCondBrPayload(AstGenCtx* ag, uint32_t condbr_inst,
|
|
uint32_t condition, GenZir* then_gz, GenZir* else_gz) {
|
|
uint32_t raw_then_len = gzInstructionsLenUpto(then_gz, else_gz);
|
|
const uint32_t* then_body = gzInstructionsSliceUpto(then_gz, else_gz);
|
|
uint32_t raw_else_len = gzInstructionsLen(else_gz);
|
|
const uint32_t* else_body = gzInstructionsSlice(else_gz);
|
|
|
|
uint32_t then_len = countBodyLenAfterFixups(ag, then_body, raw_then_len);
|
|
uint32_t else_len = countBodyLenAfterFixups(ag, else_body, raw_else_len);
|
|
|
|
ensureExtraCapacity(ag, 3 + then_len + else_len);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = condition; // CondBr.condition
|
|
ag->extra[ag->extra_len++] = then_len; // CondBr.then_body_len
|
|
ag->extra[ag->extra_len++] = else_len; // CondBr.else_body_len
|
|
for (uint32_t i = 0; i < raw_then_len; i++)
|
|
appendPossiblyRefdBodyInst(ag, then_body[i]);
|
|
for (uint32_t i = 0; i < raw_else_len; i++)
|
|
appendPossiblyRefdBodyInst(ag, else_body[i]);
|
|
|
|
ag->inst_datas[condbr_inst].pl_node.payload_index = payload_index;
|
|
gzUnstack(else_gz);
|
|
gzUnstack(then_gz);
|
|
}
|
|
|
|
// Does this Declaration.Flags.Id have a name? (Zir.zig:2762)
|
|
static bool declIdHasName(DeclFlagsId id) {
|
|
return id != DECL_ID_UNNAMED_TEST && id != DECL_ID_COMPTIME;
|
|
}
|
|
|
|
// Does this Declaration.Flags.Id have a lib name? (Zir.zig:2771)
|
|
static bool declIdHasLibName(DeclFlagsId id) {
|
|
switch (id) {
|
|
case DECL_ID_EXTERN_CONST:
|
|
case DECL_ID_PUB_EXTERN_CONST:
|
|
case DECL_ID_EXTERN_VAR:
|
|
case DECL_ID_EXTERN_VAR_THREADLOCAL:
|
|
case DECL_ID_PUB_EXTERN_VAR:
|
|
case DECL_ID_PUB_EXTERN_VAR_THREADLOCAL:
|
|
return true;
|
|
default:
|
|
return false;
|
|
}
|
|
}
|
|
|
|
// Does this Declaration.Flags.Id have a type body? (Zir.zig:2783)
|
|
static bool declIdHasTypeBody(DeclFlagsId id) {
|
|
switch (id) {
|
|
case DECL_ID_UNNAMED_TEST:
|
|
case DECL_ID_TEST:
|
|
case DECL_ID_DECLTEST:
|
|
case DECL_ID_COMPTIME:
|
|
case DECL_ID_CONST_SIMPLE:
|
|
case DECL_ID_PUB_CONST_SIMPLE:
|
|
case DECL_ID_VAR_SIMPLE:
|
|
case DECL_ID_PUB_VAR_SIMPLE:
|
|
return false;
|
|
default:
|
|
return true;
|
|
}
|
|
}
|
|
|
|
// Does this Declaration.Flags.Id have a value body? (Zir.zig:2800)
|
|
static bool declIdHasValueBody(DeclFlagsId id) {
|
|
switch (id) {
|
|
case DECL_ID_EXTERN_CONST_SIMPLE:
|
|
case DECL_ID_EXTERN_CONST:
|
|
case DECL_ID_PUB_EXTERN_CONST_SIMPLE:
|
|
case DECL_ID_PUB_EXTERN_CONST:
|
|
case DECL_ID_EXTERN_VAR:
|
|
case DECL_ID_EXTERN_VAR_THREADLOCAL:
|
|
case DECL_ID_PUB_EXTERN_VAR:
|
|
case DECL_ID_PUB_EXTERN_VAR_THREADLOCAL:
|
|
return false;
|
|
default:
|
|
return true;
|
|
}
|
|
}
|
|
|
|
// Does this Declaration.Flags.Id have special bodies? (Zir.zig:2815)
|
|
static bool declIdHasSpecialBodies(DeclFlagsId id) {
|
|
switch (id) {
|
|
case DECL_ID_UNNAMED_TEST:
|
|
case DECL_ID_TEST:
|
|
case DECL_ID_DECLTEST:
|
|
case DECL_ID_COMPTIME:
|
|
case DECL_ID_CONST_SIMPLE:
|
|
case DECL_ID_CONST_TYPED:
|
|
case DECL_ID_PUB_CONST_SIMPLE:
|
|
case DECL_ID_PUB_CONST_TYPED:
|
|
case DECL_ID_EXTERN_CONST_SIMPLE:
|
|
case DECL_ID_PUB_EXTERN_CONST_SIMPLE:
|
|
case DECL_ID_VAR_SIMPLE:
|
|
case DECL_ID_PUB_VAR_SIMPLE:
|
|
return false;
|
|
default:
|
|
return true;
|
|
}
|
|
}
|
|
|
|
// Mirrors setDeclaration (AstGen.zig:13883).
|
|
// Full version with type/align/linksection/addrspace/value bodies.
|
|
typedef struct {
|
|
uint32_t src_line;
|
|
uint32_t src_column;
|
|
DeclFlagsId id;
|
|
uint32_t name; // NullTerminatedString index
|
|
uint32_t lib_name; // NullTerminatedString index (UINT32_MAX=none)
|
|
const uint32_t* type_body;
|
|
uint32_t type_body_len;
|
|
const uint32_t* align_body;
|
|
uint32_t align_body_len;
|
|
const uint32_t* linksection_body;
|
|
uint32_t linksection_body_len;
|
|
const uint32_t* addrspace_body;
|
|
uint32_t addrspace_body_len;
|
|
const uint32_t* value_body;
|
|
uint32_t value_body_len;
|
|
} SetDeclArgs;
|
|
|
|
static void setDeclaration(
|
|
AstGenCtx* ag, uint32_t decl_inst, SetDeclArgs args) {
|
|
DeclFlagsId id = args.id;
|
|
bool has_name = declIdHasName(id);
|
|
bool has_lib_name = declIdHasLibName(id);
|
|
bool has_type_body_field = declIdHasTypeBody(id);
|
|
bool has_special_bodies = declIdHasSpecialBodies(id);
|
|
bool has_value_body_field = declIdHasValueBody(id);
|
|
|
|
uint32_t type_len
|
|
= countBodyLenAfterFixups(ag, args.type_body, args.type_body_len);
|
|
uint32_t align_len
|
|
= countBodyLenAfterFixups(ag, args.align_body, args.align_body_len);
|
|
uint32_t linksection_len = countBodyLenAfterFixups(
|
|
ag, args.linksection_body, args.linksection_body_len);
|
|
uint32_t addrspace_len = countBodyLenAfterFixups(
|
|
ag, args.addrspace_body, args.addrspace_body_len);
|
|
uint32_t value_len
|
|
= countBodyLenAfterFixups(ag, args.value_body, args.value_body_len);
|
|
|
|
uint32_t need = 6; // src_hash[4] + flags[2]
|
|
if (has_name)
|
|
need++;
|
|
if (has_lib_name)
|
|
need++;
|
|
if (has_type_body_field)
|
|
need++;
|
|
if (has_special_bodies)
|
|
need += 3;
|
|
if (has_value_body_field)
|
|
need++;
|
|
need += type_len + align_len + linksection_len + addrspace_len + value_len;
|
|
ensureExtraCapacity(ag, need);
|
|
|
|
uint32_t payload_start = ag->extra_len;
|
|
|
|
// src_hash (4 words): zero-filled; hash comparison skipped in tests.
|
|
ag->extra[ag->extra_len++] = 0;
|
|
ag->extra[ag->extra_len++] = 0;
|
|
ag->extra[ag->extra_len++] = 0;
|
|
ag->extra[ag->extra_len++] = 0;
|
|
|
|
// Declaration.Flags: packed struct(u64) { src_line: u30, src_column: u29,
|
|
// id: u5 } (Zir.zig:2719)
|
|
uint64_t flags = 0;
|
|
flags |= (uint64_t)(args.src_line & 0x3FFFFFFFu);
|
|
flags |= (uint64_t)(args.src_column & 0x1FFFFFFFu) << 30;
|
|
flags |= (uint64_t)((uint32_t)id & 0x1Fu) << 59;
|
|
ag->extra[ag->extra_len++] = (uint32_t)(flags & 0xFFFFFFFFu);
|
|
ag->extra[ag->extra_len++] = (uint32_t)(flags >> 32);
|
|
|
|
if (has_name)
|
|
ag->extra[ag->extra_len++] = args.name;
|
|
if (has_lib_name) {
|
|
ag->extra[ag->extra_len++]
|
|
= (args.lib_name != UINT32_MAX) ? args.lib_name : 0;
|
|
}
|
|
if (has_type_body_field)
|
|
ag->extra[ag->extra_len++] = type_len;
|
|
if (has_special_bodies) {
|
|
ag->extra[ag->extra_len++] = align_len;
|
|
ag->extra[ag->extra_len++] = linksection_len;
|
|
ag->extra[ag->extra_len++] = addrspace_len;
|
|
}
|
|
if (has_value_body_field)
|
|
ag->extra[ag->extra_len++] = value_len;
|
|
|
|
for (uint32_t i = 0; i < args.type_body_len; i++)
|
|
appendPossiblyRefdBodyInst(ag, args.type_body[i]);
|
|
for (uint32_t i = 0; i < args.align_body_len; i++)
|
|
appendPossiblyRefdBodyInst(ag, args.align_body[i]);
|
|
for (uint32_t i = 0; i < args.linksection_body_len; i++)
|
|
appendPossiblyRefdBodyInst(ag, args.linksection_body[i]);
|
|
for (uint32_t i = 0; i < args.addrspace_body_len; i++)
|
|
appendPossiblyRefdBodyInst(ag, args.addrspace_body[i]);
|
|
for (uint32_t i = 0; i < args.value_body_len; i++)
|
|
appendPossiblyRefdBodyInst(ag, args.value_body[i]);
|
|
|
|
ag->inst_datas[decl_inst].declaration.payload_index = payload_start;
|
|
}
|
|
|
|
// --- StructDecl.Small packing (Zir.zig StructDecl.Small) ---
|
|
|
|
typedef struct {
|
|
bool has_captures_len;
|
|
bool has_fields_len;
|
|
bool has_decls_len;
|
|
bool has_backing_int;
|
|
bool known_non_opv;
|
|
bool known_comptime_only;
|
|
uint8_t name_strategy; // 2 bits
|
|
uint8_t layout; // 2 bits
|
|
bool any_default_inits;
|
|
bool any_comptime_fields;
|
|
bool any_aligned_fields;
|
|
} StructDeclSmall;
|
|
|
|
static uint16_t packStructDeclSmall(StructDeclSmall s) {
|
|
uint16_t r = 0;
|
|
if (s.has_captures_len)
|
|
r |= (1u << 0);
|
|
if (s.has_fields_len)
|
|
r |= (1u << 1);
|
|
if (s.has_decls_len)
|
|
r |= (1u << 2);
|
|
if (s.has_backing_int)
|
|
r |= (1u << 3);
|
|
if (s.known_non_opv)
|
|
r |= (1u << 4);
|
|
if (s.known_comptime_only)
|
|
r |= (1u << 5);
|
|
r |= (uint16_t)(s.name_strategy & 0x3u) << 6;
|
|
r |= (uint16_t)(s.layout & 0x3u) << 8;
|
|
if (s.any_default_inits)
|
|
r |= (1u << 10);
|
|
if (s.any_comptime_fields)
|
|
r |= (1u << 11);
|
|
if (s.any_aligned_fields)
|
|
r |= (1u << 12);
|
|
return r;
|
|
}
|
|
|
|
// Mirrors GenZir.setStruct (AstGen.zig:12935).
|
|
// Writes StructDecl payload and optional length fields.
|
|
// The caller appends captures, backing_int, decls, fields, bodies after.
|
|
static void setStruct(AstGenCtx* ag, uint32_t inst, uint32_t src_node,
|
|
StructDeclSmall small, uint32_t captures_len, uint32_t fields_len,
|
|
uint32_t decls_len) {
|
|
ensureExtraCapacity(ag, 6 + 3);
|
|
|
|
uint32_t payload_index = ag->extra_len;
|
|
|
|
// fields_hash (4 words): zero-filled; hash comparison skipped in tests.
|
|
ag->extra[ag->extra_len++] = 0;
|
|
ag->extra[ag->extra_len++] = 0;
|
|
ag->extra[ag->extra_len++] = 0;
|
|
ag->extra[ag->extra_len++] = 0;
|
|
|
|
ag->extra[ag->extra_len++] = ag->source_line;
|
|
ag->extra[ag->extra_len++] = src_node;
|
|
|
|
if (small.has_captures_len)
|
|
ag->extra[ag->extra_len++] = captures_len;
|
|
if (small.has_fields_len)
|
|
ag->extra[ag->extra_len++] = fields_len;
|
|
if (small.has_decls_len)
|
|
ag->extra[ag->extra_len++] = decls_len;
|
|
|
|
ag->inst_tags[inst] = ZIR_INST_EXTENDED;
|
|
ZirInstData data;
|
|
memset(&data, 0, sizeof(data));
|
|
data.extended.opcode = (uint16_t)ZIR_EXT_STRUCT_DECL;
|
|
data.extended.small = packStructDeclSmall(small);
|
|
data.extended.operand = payload_index;
|
|
ag->inst_datas[inst] = data;
|
|
}
|
|
|
|
// --- scanContainer (AstGen.zig:13384) ---
|
|
|
|
// Mirrors scanContainer (AstGen.zig:13384).
|
|
// Populates the namespace's decl table for identifier resolution.
|
|
static uint32_t scanContainer(AstGenCtx* ag, ScopeNamespace* ns,
|
|
const uint32_t* members, uint32_t member_count) {
|
|
const Ast* tree = ag->tree;
|
|
uint32_t decl_count = 0;
|
|
for (uint32_t i = 0; i < member_count; i++) {
|
|
uint32_t member = members[i];
|
|
AstNodeTag tag = tree->nodes.tags[member];
|
|
switch (tag) {
|
|
case AST_NODE_GLOBAL_VAR_DECL:
|
|
case AST_NODE_LOCAL_VAR_DECL:
|
|
case AST_NODE_SIMPLE_VAR_DECL:
|
|
case AST_NODE_ALIGNED_VAR_DECL: {
|
|
decl_count++;
|
|
uint32_t name_token = tree->nodes.main_tokens[member] + 1;
|
|
uint32_t name_str = identAsString(ag, name_token);
|
|
scopeNamespaceAddDecl(ns, name_str, member);
|
|
break;
|
|
}
|
|
case AST_NODE_FN_PROTO_SIMPLE:
|
|
case AST_NODE_FN_PROTO_MULTI:
|
|
case AST_NODE_FN_PROTO_ONE:
|
|
case AST_NODE_FN_PROTO:
|
|
case AST_NODE_FN_DECL: {
|
|
decl_count++;
|
|
uint32_t name_token = tree->nodes.main_tokens[member] + 1;
|
|
uint32_t name_str = identAsString(ag, name_token);
|
|
scopeNamespaceAddDecl(ns, name_str, member);
|
|
break;
|
|
}
|
|
// Container fields: add field name to string table for ordering
|
|
// (AstGen.zig:13509).
|
|
case AST_NODE_CONTAINER_FIELD_INIT:
|
|
case AST_NODE_CONTAINER_FIELD_ALIGN:
|
|
case AST_NODE_CONTAINER_FIELD: {
|
|
uint32_t main_token = tree->nodes.main_tokens[member];
|
|
identAsString(ag, main_token);
|
|
break;
|
|
}
|
|
case AST_NODE_COMPTIME:
|
|
decl_count++;
|
|
break;
|
|
case AST_NODE_TEST_DECL: {
|
|
decl_count++;
|
|
// Process test name string to match upstream string table
|
|
// ordering (AstGen.zig:13465-13500).
|
|
uint32_t test_name_token = tree->nodes.main_tokens[member] + 1;
|
|
TokenizerTag tt = tree->tokens.tags[test_name_token];
|
|
if (tt == TOKEN_STRING_LITERAL) {
|
|
uint32_t si, sl;
|
|
strLitAsString(ag, test_name_token, &si, &sl);
|
|
} else if (tt == TOKEN_IDENTIFIER) {
|
|
identAsString(ag, test_name_token);
|
|
}
|
|
break;
|
|
}
|
|
default:
|
|
break;
|
|
}
|
|
}
|
|
return decl_count;
|
|
}
|
|
|
|
// --- Import tracking ---
|
|
|
|
static void addImport(AstGenCtx* ag, uint32_t name_index, uint32_t token) {
|
|
// Check for duplicates.
|
|
for (uint32_t i = 0; i < ag->imports_len; i++) {
|
|
if (ag->imports[i].name == name_index)
|
|
return;
|
|
}
|
|
if (ag->imports_len >= ag->imports_cap) {
|
|
uint32_t new_cap = ag->imports_cap > 0 ? ag->imports_cap * 2 : 4;
|
|
ImportEntry* p = realloc(ag->imports, new_cap * sizeof(ImportEntry));
|
|
if (!p)
|
|
exit(1);
|
|
ag->imports = p;
|
|
ag->imports_cap = new_cap;
|
|
}
|
|
ag->imports[ag->imports_len].name = name_index;
|
|
ag->imports[ag->imports_len].token = token;
|
|
ag->imports_len++;
|
|
}
|
|
|
|
// Write imports list to extra (AstGen.zig:227-244).
|
|
static void writeImports(AstGenCtx* ag) {
|
|
if (ag->imports_len == 0) {
|
|
ag->extra[ZIR_EXTRA_IMPORTS] = 0;
|
|
return;
|
|
}
|
|
uint32_t need = 1 + ag->imports_len * 2;
|
|
ensureExtraCapacity(ag, need);
|
|
uint32_t imports_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = ag->imports_len;
|
|
for (uint32_t i = 0; i < ag->imports_len; i++) {
|
|
ag->extra[ag->extra_len++] = ag->imports[i].name;
|
|
ag->extra[ag->extra_len++] = ag->imports[i].token;
|
|
}
|
|
ag->extra[ZIR_EXTRA_IMPORTS] = imports_index;
|
|
}
|
|
|
|
// ri.br() (AstGen.zig:274-282): convert coerced_ty to ty for branching.
|
|
static inline ResultLoc rlBr(ResultLoc rl) {
|
|
if (rl.tag == RL_COERCED_TY) {
|
|
return (ResultLoc) {
|
|
.tag = RL_TY, .data = rl.data, .src_node = 0, .ctx = rl.ctx
|
|
};
|
|
}
|
|
return rl;
|
|
}
|
|
|
|
// setBreakResultInfo (AstGen.zig:11905-11926): compute break result info
|
|
// from parent RL. Converts coerced_ty → ty, discard → discard, else passes
|
|
// through. For ptr/inferred_ptr, converts to ty/none respectively.
|
|
static ResultLoc breakResultInfo(
|
|
GenZir* gz, ResultLoc parent_rl, uint32_t node, bool need_rl) {
|
|
// First: compute block_ri (AstGen.zig:7639-7646).
|
|
// When need_rl is true, forward the rl as-is (don't convert ptr→ty).
|
|
ResultLoc block_ri;
|
|
if (need_rl) {
|
|
block_ri = parent_rl;
|
|
} else {
|
|
switch (parent_rl.tag) {
|
|
case RL_PTR: {
|
|
uint32_t ptr_ty
|
|
= addUnNode(gz, ZIR_INST_TYPEOF, parent_rl.data, node);
|
|
uint32_t ty = addUnNode(gz, ZIR_INST_ELEM_TYPE, ptr_ty, node);
|
|
block_ri = (ResultLoc) {
|
|
.tag = RL_TY, .data = ty, .src_node = 0, .ctx = parent_rl.ctx
|
|
};
|
|
break;
|
|
}
|
|
case RL_INFERRED_PTR:
|
|
block_ri = (ResultLoc) {
|
|
.tag = RL_NONE, .data = 0, .src_node = 0, .ctx = parent_rl.ctx
|
|
};
|
|
break;
|
|
default:
|
|
block_ri = parent_rl;
|
|
break;
|
|
}
|
|
}
|
|
// Then: setBreakResultInfo (AstGen.zig:11910-11925).
|
|
switch (block_ri.tag) {
|
|
case RL_COERCED_TY:
|
|
return (ResultLoc) { .tag = RL_TY,
|
|
.data = block_ri.data,
|
|
.src_node = 0,
|
|
.ctx = block_ri.ctx };
|
|
case RL_DISCARD:
|
|
// Don't forward ctx (AstGen.zig:11916-11920).
|
|
return RL_DISCARD_VAL;
|
|
default:
|
|
return block_ri;
|
|
}
|
|
}
|
|
|
|
// resultType (AstGen.zig:341-351): extract result type from RL.
|
|
// Returns 0 if no result type available.
|
|
static uint32_t rlResultType(GenZir* gz, ResultLoc rl, uint32_t node) {
|
|
switch (rl.tag) {
|
|
case RL_TY:
|
|
case RL_COERCED_TY:
|
|
return rl.data;
|
|
case RL_REF_COERCED_TY:
|
|
// AstGen.zig:345: .ref_coerced_ty => |ptr_ty| gz.addUnNode(.elem_type,
|
|
// ptr_ty, node)
|
|
return addUnNode(gz, ZIR_INST_ELEM_TYPE, rl.data, node);
|
|
case RL_PTR: {
|
|
// typeof(ptr) -> elem_type (AstGen.zig:346-349).
|
|
uint32_t ptr_ty = addUnNode(gz, ZIR_INST_TYPEOF, rl.data, node);
|
|
return addUnNode(gz, ZIR_INST_ELEM_TYPE, ptr_ty, node);
|
|
}
|
|
default:
|
|
return 0;
|
|
}
|
|
}
|
|
|
|
// Mirrors ResultLoc.resultTypeForCast (AstGen.zig:356-368).
|
|
// Like rlResultType but errors if no result type is available.
|
|
static uint32_t rlResultTypeForCast(GenZir* gz, ResultLoc rl, uint32_t node) {
|
|
uint32_t ty = rlResultType(gz, rl, node);
|
|
if (ty != 0)
|
|
return ty;
|
|
SET_ERROR(gz->astgen);
|
|
return 0;
|
|
}
|
|
|
|
static bool endsWithNoReturn(GenZir* gz);
|
|
|
|
// Mirrors Zir.Inst.Tag.isAlwaysVoid (Zir.zig:1343-1608).
|
|
static bool isAlwaysVoid(ZirInstTag tag, ZirInstData data) {
|
|
switch (tag) {
|
|
case ZIR_INST_DBG_STMT:
|
|
case ZIR_INST_DBG_VAR_PTR:
|
|
case ZIR_INST_DBG_VAR_VAL:
|
|
case ZIR_INST_ENSURE_RESULT_USED:
|
|
case ZIR_INST_ENSURE_RESULT_NON_ERROR:
|
|
case ZIR_INST_ENSURE_ERR_UNION_PAYLOAD_VOID:
|
|
case ZIR_INST_SET_EVAL_BRANCH_QUOTA:
|
|
case ZIR_INST_ATOMIC_STORE:
|
|
case ZIR_INST_STORE_NODE:
|
|
case ZIR_INST_STORE_TO_INFERRED_PTR:
|
|
case ZIR_INST_VALIDATE_DEREF:
|
|
case ZIR_INST_VALIDATE_DESTRUCTURE:
|
|
case ZIR_INST_EXPORT:
|
|
case ZIR_INST_SET_RUNTIME_SAFETY:
|
|
case ZIR_INST_MEMCPY:
|
|
case ZIR_INST_MEMSET:
|
|
case ZIR_INST_MEMMOVE:
|
|
case ZIR_INST_CHECK_COMPTIME_CONTROL_FLOW:
|
|
case ZIR_INST_DEFER:
|
|
case ZIR_INST_DEFER_ERR_CODE:
|
|
case ZIR_INST_SAVE_ERR_RET_INDEX:
|
|
case ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL:
|
|
case ZIR_INST_RESTORE_ERR_RET_INDEX_FN_ENTRY:
|
|
case ZIR_INST_VALIDATE_STRUCT_INIT_TY:
|
|
case ZIR_INST_VALIDATE_STRUCT_INIT_RESULT_TY:
|
|
case ZIR_INST_VALIDATE_PTR_STRUCT_INIT:
|
|
case ZIR_INST_VALIDATE_ARRAY_INIT_TY:
|
|
case ZIR_INST_VALIDATE_ARRAY_INIT_RESULT_TY:
|
|
case ZIR_INST_VALIDATE_PTR_ARRAY_INIT:
|
|
case ZIR_INST_VALIDATE_REF_TY:
|
|
case ZIR_INST_VALIDATE_CONST:
|
|
return true;
|
|
case ZIR_INST_EXTENDED:
|
|
switch (data.extended.opcode) {
|
|
case ZIR_EXT_BRANCH_HINT:
|
|
case ZIR_EXT_BREAKPOINT:
|
|
case ZIR_EXT_DISABLE_INSTRUMENTATION:
|
|
case ZIR_EXT_DISABLE_INTRINSICS:
|
|
return true;
|
|
default:
|
|
return false;
|
|
}
|
|
default:
|
|
return false;
|
|
}
|
|
}
|
|
|
|
// rvalue (AstGen.zig:11051-11224): apply result location wrapping.
|
|
static uint32_t rvalue(
|
|
GenZir* gz, ResultLoc rl, uint32_t result, uint32_t node) {
|
|
// isAlwaysVoid check (AstGen.zig:11058-11067).
|
|
if (result >= ZIR_REF_START_INDEX) {
|
|
uint32_t result_index = result - ZIR_REF_START_INDEX;
|
|
if (isAlwaysVoid(gz->astgen->inst_tags[result_index],
|
|
gz->astgen->inst_datas[result_index])) {
|
|
result = ZIR_REF_VOID_VALUE;
|
|
}
|
|
}
|
|
// endsWithNoReturn check (AstGen.zig:11068).
|
|
if (endsWithNoReturn(gz))
|
|
return result;
|
|
switch (rl.tag) {
|
|
case RL_NONE:
|
|
case RL_COERCED_TY:
|
|
return result;
|
|
case RL_DISCARD:
|
|
// ensure_result_non_error (AstGen.zig:11071-11074).
|
|
addUnNode(gz, ZIR_INST_ENSURE_RESULT_NON_ERROR, result, node);
|
|
return ZIR_REF_VOID_VALUE;
|
|
case RL_REF:
|
|
case RL_REF_COERCED_TY: {
|
|
// coerce_ptr_elem_ty for ref_coerced_ty (AstGen.zig:11077-11083).
|
|
uint32_t coerced_result = result;
|
|
if (rl.tag == RL_REF_COERCED_TY) {
|
|
coerced_result = addPlNodeBin(
|
|
gz, ZIR_INST_COERCE_PTR_ELEM_TY, node, rl.data, result);
|
|
}
|
|
AstGenCtx* ag = gz->astgen;
|
|
uint32_t src_token = firstToken(ag->tree, node);
|
|
// If result is not an instruction index (e.g. a well-known ref),
|
|
// emit ref directly (AstGen.zig:11091-11092).
|
|
if (coerced_result < ZIR_REF_START_INDEX) {
|
|
return addUnTok(gz, ZIR_INST_REF, coerced_result, src_token);
|
|
}
|
|
// Deduplication via ref_table (AstGen.zig:11093-11097).
|
|
uint32_t result_index = coerced_result - ZIR_REF_START_INDEX;
|
|
bool found;
|
|
uint32_t* val_ptr = refTableGetOrPut(ag, result_index, &found);
|
|
if (!found) {
|
|
*val_ptr = makeUnTok(gz, ZIR_INST_REF, coerced_result, src_token);
|
|
}
|
|
return *val_ptr + ZIR_REF_START_INDEX;
|
|
}
|
|
case RL_TY: {
|
|
// Quick elimination of common, unnecessary type coercions
|
|
// (AstGen.zig:11099-11209).
|
|
#define RC(t, v) (((uint64_t)(t) << 32) | (uint64_t)(v))
|
|
uint64_t combined = RC(rl.data, result);
|
|
switch (combined) {
|
|
// Identity: type of result is already correct
|
|
// (AstGen.zig:11109-11176).
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U1_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U8_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_I8_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U16_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U29_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_I16_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U32_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_I32_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U64_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_I64_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_U128_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_I128_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_USIZE_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ISIZE_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_CHAR_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_SHORT_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_USHORT_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_INT_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_UINT_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_LONG_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_ULONG_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_LONGLONG_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_ULONGLONG_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_C_LONGDOUBLE_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_F16_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_F32_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_F64_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_F80_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_F128_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ANYOPAQUE_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_BOOL_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_VOID_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_TYPE_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ANYERROR_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_COMPTIME_INT_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_COMPTIME_FLOAT_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_NORETURN_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ANYFRAME_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_NULL_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_UNDEFINED_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ENUM_LITERAL_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_PTR_USIZE_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_PTR_CONST_COMPTIME_INT_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_MANYPTR_U8_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_MANYPTR_CONST_U8_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_MANYPTR_CONST_U8_SENTINEL_0_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_SLICE_CONST_U8_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_SLICE_CONST_U8_SENTINEL_0_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_ANYERROR_VOID_ERROR_UNION_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_GENERIC_POISON_TYPE):
|
|
case RC(ZIR_REF_TYPE_TYPE, ZIR_REF_EMPTY_TUPLE_TYPE):
|
|
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ZERO):
|
|
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ONE):
|
|
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_NEGATIVE_ONE):
|
|
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_UNDEF_USIZE):
|
|
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ZERO_USIZE):
|
|
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ONE_USIZE):
|
|
case RC(ZIR_REF_U1_TYPE, ZIR_REF_UNDEF_U1):
|
|
case RC(ZIR_REF_U1_TYPE, ZIR_REF_ZERO_U1):
|
|
case RC(ZIR_REF_U1_TYPE, ZIR_REF_ONE_U1):
|
|
case RC(ZIR_REF_U8_TYPE, ZIR_REF_ZERO_U8):
|
|
case RC(ZIR_REF_U8_TYPE, ZIR_REF_ONE_U8):
|
|
case RC(ZIR_REF_U8_TYPE, ZIR_REF_FOUR_U8):
|
|
case RC(ZIR_REF_BOOL_TYPE, ZIR_REF_UNDEF_BOOL):
|
|
case RC(ZIR_REF_BOOL_TYPE, ZIR_REF_BOOL_TRUE):
|
|
case RC(ZIR_REF_BOOL_TYPE, ZIR_REF_BOOL_FALSE):
|
|
case RC(ZIR_REF_VOID_TYPE, ZIR_REF_VOID_VALUE):
|
|
return result;
|
|
// Conversions (AstGen.zig:11178-11202).
|
|
case RC(ZIR_REF_BOOL_TYPE, ZIR_REF_UNDEF):
|
|
return ZIR_REF_UNDEF_BOOL;
|
|
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_UNDEF):
|
|
return ZIR_REF_UNDEF_USIZE;
|
|
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_UNDEF_U1):
|
|
return ZIR_REF_UNDEF_USIZE;
|
|
case RC(ZIR_REF_U1_TYPE, ZIR_REF_UNDEF):
|
|
return ZIR_REF_UNDEF_U1;
|
|
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ZERO):
|
|
return ZIR_REF_ZERO_USIZE;
|
|
case RC(ZIR_REF_U1_TYPE, ZIR_REF_ZERO):
|
|
return ZIR_REF_ZERO_U1;
|
|
case RC(ZIR_REF_U8_TYPE, ZIR_REF_ZERO):
|
|
return ZIR_REF_ZERO_U8;
|
|
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ONE):
|
|
return ZIR_REF_ONE_USIZE;
|
|
case RC(ZIR_REF_U1_TYPE, ZIR_REF_ONE):
|
|
return ZIR_REF_ONE_U1;
|
|
case RC(ZIR_REF_U8_TYPE, ZIR_REF_ONE):
|
|
return ZIR_REF_ONE_U8;
|
|
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ZERO_USIZE):
|
|
return ZIR_REF_ZERO;
|
|
case RC(ZIR_REF_U1_TYPE, ZIR_REF_ZERO_USIZE):
|
|
return ZIR_REF_ZERO_U1;
|
|
case RC(ZIR_REF_U8_TYPE, ZIR_REF_ZERO_USIZE):
|
|
return ZIR_REF_ZERO_U8;
|
|
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ONE_USIZE):
|
|
return ZIR_REF_ONE;
|
|
case RC(ZIR_REF_U1_TYPE, ZIR_REF_ONE_USIZE):
|
|
return ZIR_REF_ONE_U1;
|
|
case RC(ZIR_REF_U8_TYPE, ZIR_REF_ONE_USIZE):
|
|
return ZIR_REF_ONE_U8;
|
|
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ZERO_U1):
|
|
return ZIR_REF_ZERO;
|
|
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ZERO_U8):
|
|
return ZIR_REF_ZERO;
|
|
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ZERO_U1):
|
|
return ZIR_REF_ZERO_USIZE;
|
|
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ZERO_U8):
|
|
return ZIR_REF_ZERO_USIZE;
|
|
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ONE_U1):
|
|
return ZIR_REF_ONE;
|
|
case RC(ZIR_REF_COMPTIME_INT_TYPE, ZIR_REF_ONE_U8):
|
|
return ZIR_REF_ONE;
|
|
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ONE_U1):
|
|
return ZIR_REF_ONE_USIZE;
|
|
case RC(ZIR_REF_USIZE_TYPE, ZIR_REF_ONE_U8):
|
|
return ZIR_REF_ONE_USIZE;
|
|
default: {
|
|
ZirInstTag as_tag = (rl.ctx == RI_CTX_SHIFT_OP)
|
|
? ZIR_INST_AS_SHIFT_OPERAND
|
|
: ZIR_INST_AS_NODE;
|
|
return addPlNodeBin(gz, as_tag, node, rl.data, result);
|
|
}
|
|
}
|
|
#undef RC
|
|
}
|
|
case RL_PTR:
|
|
// store_node (AstGen.zig:11211-11216).
|
|
addPlNodeBin(gz, ZIR_INST_STORE_NODE,
|
|
rl.src_node != 0 ? rl.src_node : node, rl.data, result);
|
|
return ZIR_REF_VOID_VALUE;
|
|
case RL_INFERRED_PTR:
|
|
// store_to_inferred_ptr (AstGen.zig:11218-11223).
|
|
addPlNodeBin(
|
|
gz, ZIR_INST_STORE_TO_INFERRED_PTR, node, rl.data, result);
|
|
return ZIR_REF_VOID_VALUE;
|
|
case RL_DESTRUCTURE: {
|
|
// validate_destructure (AstGen.zig:11225-11258).
|
|
uint32_t ds_node = rl.src_node;
|
|
uint32_t comp_len = rl.components_len;
|
|
// Emit validate_destructure: pl_node with ValidateDestructure payload.
|
|
// Payload: { operand, destructure_node (relative), expect_len }
|
|
{
|
|
uint32_t payload_idx = gz->astgen->extra_len;
|
|
ensureExtraCapacity(gz->astgen, 3);
|
|
gz->astgen->extra[gz->astgen->extra_len++] = result; // operand
|
|
gz->astgen->extra[gz->astgen->extra_len++]
|
|
= (uint32_t)((int32_t)ds_node
|
|
- (int32_t)gz->decl_node_index); // destructure_node
|
|
gz->astgen->extra[gz->astgen->extra_len++]
|
|
= comp_len; // expect_len
|
|
addPlNodePayloadIndex(
|
|
gz, ZIR_INST_VALIDATE_DESTRUCTURE, node, payload_idx);
|
|
}
|
|
for (uint32_t i = 0; i < comp_len; i++) {
|
|
const DestructureComponent* comp = &rl.components[i];
|
|
if (comp->tag == DC_DISCARD)
|
|
continue;
|
|
// elem_val_imm: operand=result, idx=i.
|
|
uint32_t elem_inst = reserveInstructionIndex(gz->astgen);
|
|
gz->astgen->inst_tags[elem_inst] = ZIR_INST_ELEM_VAL_IMM;
|
|
memset(&gz->astgen->inst_datas[elem_inst], 0, sizeof(ZirInstData));
|
|
gz->astgen->inst_datas[elem_inst].elem_val_imm.operand = result;
|
|
gz->astgen->inst_datas[elem_inst].elem_val_imm.idx = i;
|
|
gzAppendInstruction(gz, elem_inst);
|
|
uint32_t elem_ref = elem_inst + ZIR_REF_START_INDEX;
|
|
switch (comp->tag) {
|
|
case DC_TYPED_PTR:
|
|
addPlNodeBin(gz, ZIR_INST_STORE_NODE,
|
|
comp->src_node != 0 ? comp->src_node : node, comp->inst,
|
|
elem_ref);
|
|
break;
|
|
case DC_INFERRED_PTR:
|
|
addPlNodeBin(gz, ZIR_INST_STORE_TO_INFERRED_PTR, node,
|
|
comp->inst, elem_ref);
|
|
break;
|
|
case DC_DISCARD:
|
|
break; // unreachable
|
|
}
|
|
}
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
}
|
|
return result;
|
|
}
|
|
|
|
// rvalueNoCoercePreRef (AstGen.zig:11042-11049): like rvalue but does NOT
|
|
// emit coerce_ptr_elem_ty for RL_REF_COERCED_TY. Used for local var refs.
|
|
static uint32_t rvalueNoCoercePreRef(
|
|
GenZir* gz, ResultLoc rl, uint32_t result, uint32_t node) {
|
|
if (rl.tag == RL_REF_COERCED_TY) {
|
|
ResultLoc ref_rl = rl;
|
|
ref_rl.tag = RL_REF;
|
|
return rvalue(gz, ref_rl, result, node);
|
|
}
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
|
|
// --- Expression evaluation (AstGen.zig:634) ---
|
|
|
|
// Forward declarations.
|
|
static uint32_t expr(GenZir* gz, Scope* scope, uint32_t node);
|
|
// --- DefersToEmit (AstGen.zig:3008) ---
|
|
#define DEFER_NORMAL_ONLY 0
|
|
#define DEFER_BOTH_SANS_ERR 1
|
|
|
|
// --- DeferCounts (AstGen.zig:2966) ---
|
|
typedef struct {
|
|
bool have_any;
|
|
bool have_normal;
|
|
bool have_err;
|
|
bool need_err_code;
|
|
} DeferCounts;
|
|
static DeferCounts countDefers(const Scope* outer_scope, Scope* inner_scope);
|
|
|
|
static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node);
|
|
static void assignStmt(GenZir* gz, Scope* scope, uint32_t infix_node);
|
|
static void assignDestructure(GenZir* gz, Scope* scope, uint32_t node);
|
|
static Scope* assignDestructureMaybeDecls(GenZir* gz, Scope* scope,
|
|
uint32_t node, ScopeLocalVal* val_scopes, uint32_t* val_idx,
|
|
ScopeLocalPtr* ptr_scopes, uint32_t* ptr_idx, uint32_t max_scopes);
|
|
static void assignOp(
|
|
GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag);
|
|
static void assignShift(
|
|
GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag);
|
|
static void assignShiftSat(GenZir* gz, Scope* scope, uint32_t infix_node);
|
|
static uint32_t shiftOp(
|
|
GenZir* gz, Scope* scope, uint32_t node, ZirInstTag tag);
|
|
static void emitDbgStmt(GenZir* gz, uint32_t line, uint32_t column);
|
|
static void genDefers(
|
|
GenZir* gz, const Scope* outer_scope, Scope* inner_scope, int which);
|
|
static void emitDbgStmtForceCurrentIndex(
|
|
GenZir* gz, uint32_t line, uint32_t column);
|
|
static void emitDbgNode(GenZir* gz, uint32_t node);
|
|
static void addDbgVar(
|
|
GenZir* gz, ZirInstTag tag, uint32_t name, uint32_t inst);
|
|
static bool addEnsureResult(
|
|
GenZir* gz, uint32_t maybe_unused_result, uint32_t statement);
|
|
static void blockExprStmts(
|
|
GenZir* gz, Scope* scope, const uint32_t* statements, uint32_t stmt_count);
|
|
static uint32_t fullBodyExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node);
|
|
static bool nameStratExpr(GenZir* gz, Scope* scope, ResultLoc rl,
|
|
uint32_t node, uint8_t name_strategy, uint32_t* out_ref);
|
|
static bool tokenIsUnderscore(const Ast* tree, uint32_t ident_token);
|
|
static uint32_t containerDecl(
|
|
GenZir* gz, Scope* scope, uint32_t node, uint8_t name_strategy);
|
|
static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
|
|
const uint32_t* members, uint32_t members_len, uint8_t layout,
|
|
uint32_t backing_int_node, uint8_t name_strategy);
|
|
static uint32_t tupleDecl(AstGenCtx* ag, GenZir* gz, uint32_t node,
|
|
const uint32_t* members, uint32_t members_len, uint8_t layout,
|
|
uint32_t backing_int_node);
|
|
static uint32_t enumDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
|
|
const uint32_t* members, uint32_t members_len, uint32_t arg_node,
|
|
uint8_t name_strategy);
|
|
static uint32_t blockExprExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node);
|
|
static uint32_t ifExpr(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node);
|
|
static uint32_t forExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node, bool is_statement);
|
|
static uint32_t orelseCatchExpr(GenZir* gz, Scope* scope, ResultLoc rl,
|
|
uint32_t node, ZirInstTag cond_op, ZirInstTag unwrap_op,
|
|
ZirInstTag unwrap_code_op, uint32_t payload_token);
|
|
static uint32_t arrayInitDotExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node);
|
|
static uint32_t switchExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node);
|
|
static uint32_t switchExprErrUnion(GenZir* gz, Scope* scope, ResultLoc rl,
|
|
uint32_t catch_or_if_node,
|
|
int node_ty); // 0=catch, 1=if
|
|
static uint32_t whileExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node, bool is_statement);
|
|
#define EVAL_TO_ERROR_NEVER 0
|
|
#define EVAL_TO_ERROR_ALWAYS 1
|
|
#define EVAL_TO_ERROR_MAYBE 2
|
|
static int nodeMayEvalToError(const Ast* tree, uint32_t node);
|
|
static bool nodeMayAppendToErrorTrace(const Ast* tree, uint32_t node);
|
|
static void addSaveErrRetIndex(GenZir* gz, uint32_t operand);
|
|
static void addRestoreErrRetIndexBlock(
|
|
GenZir* gz, uint32_t block_inst, uint32_t operand, uint32_t node);
|
|
static void restoreErrRetIndex(GenZir* gz, uint32_t block_inst, ResultLoc rl,
|
|
uint32_t node, uint32_t result);
|
|
static uint32_t identAsString(AstGenCtx* ag, uint32_t token);
|
|
static uint32_t lastToken(const Ast* tree, uint32_t node);
|
|
static uint32_t simpleBinOp(
|
|
GenZir* gz, Scope* scope, uint32_t node, ZirInstTag tag);
|
|
|
|
// Mirrors GenZir.endsWithNoReturn (AstGen.zig:11770).
|
|
static bool endsWithNoReturn(GenZir* gz) {
|
|
uint32_t len = gzInstructionsLen(gz);
|
|
if (len == 0)
|
|
return false;
|
|
uint32_t last = gzInstructionsSlice(gz)[len - 1];
|
|
ZirInstTag tag = gz->astgen->inst_tags[last];
|
|
switch (tag) {
|
|
case ZIR_INST_BREAK:
|
|
case ZIR_INST_BREAK_INLINE:
|
|
case ZIR_INST_CONDBR:
|
|
case ZIR_INST_CONDBR_INLINE:
|
|
case ZIR_INST_COMPILE_ERROR:
|
|
case ZIR_INST_RET_NODE:
|
|
case ZIR_INST_RET_LOAD:
|
|
case ZIR_INST_RET_IMPLICIT:
|
|
case ZIR_INST_RET_ERR_VALUE:
|
|
case ZIR_INST_UNREACHABLE:
|
|
case ZIR_INST_REPEAT:
|
|
case ZIR_INST_REPEAT_INLINE:
|
|
case ZIR_INST_PANIC:
|
|
case ZIR_INST_TRAP:
|
|
case ZIR_INST_CHECK_COMPTIME_CONTROL_FLOW:
|
|
case ZIR_INST_SWITCH_CONTINUE:
|
|
return true;
|
|
default:
|
|
return false;
|
|
}
|
|
}
|
|
|
|
// Mirrors GenZir.refIsNoReturn (AstGen.zig:11885).
|
|
static bool refIsNoReturn(GenZir* gz, uint32_t inst_ref) {
|
|
if (inst_ref == ZIR_REF_UNREACHABLE_VALUE)
|
|
return true;
|
|
if (inst_ref >= ZIR_REF_START_INDEX) {
|
|
uint32_t inst_index = inst_ref - ZIR_REF_START_INDEX;
|
|
ZirInstTag tag = gz->astgen->inst_tags[inst_index];
|
|
switch (tag) {
|
|
case ZIR_INST_BREAK:
|
|
case ZIR_INST_BREAK_INLINE:
|
|
case ZIR_INST_CONDBR:
|
|
case ZIR_INST_CONDBR_INLINE:
|
|
case ZIR_INST_COMPILE_ERROR:
|
|
case ZIR_INST_RET_NODE:
|
|
case ZIR_INST_RET_LOAD:
|
|
case ZIR_INST_RET_IMPLICIT:
|
|
case ZIR_INST_RET_ERR_VALUE:
|
|
case ZIR_INST_UNREACHABLE:
|
|
case ZIR_INST_REPEAT:
|
|
case ZIR_INST_REPEAT_INLINE:
|
|
case ZIR_INST_PANIC:
|
|
case ZIR_INST_TRAP:
|
|
case ZIR_INST_CHECK_COMPTIME_CONTROL_FLOW:
|
|
case ZIR_INST_SWITCH_CONTINUE:
|
|
return true;
|
|
default:
|
|
return false;
|
|
}
|
|
}
|
|
return false;
|
|
}
|
|
|
|
// Mirrors reachableExpr (AstGen.zig:408-416).
|
|
// Wraps exprRl and emits an error if the result is noreturn.
|
|
static uint32_t reachableExpr(GenZir* gz, Scope* scope, ResultLoc rl,
|
|
uint32_t node, uint32_t reachable_node) {
|
|
uint32_t result = exprRl(gz, scope, rl, node);
|
|
if (refIsNoReturn(gz, result)) {
|
|
(void)reachable_node;
|
|
SET_ERROR(gz->astgen);
|
|
}
|
|
return result;
|
|
}
|
|
|
|
// Forward declaration needed by reachableExprComptime.
|
|
static uint32_t comptimeExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node, uint32_t reason);
|
|
|
|
// Mirrors reachableExprComptime (AstGen.zig:418-438).
|
|
// Like reachableExpr but optionally wraps in comptimeExpr when
|
|
// comptime_reason is non-zero (i.e. force_comptime is set).
|
|
static uint32_t reachableExprComptime(GenZir* gz, Scope* scope, ResultLoc rl,
|
|
uint32_t node, uint32_t reachable_node, uint32_t comptime_reason) {
|
|
uint32_t result;
|
|
if (comptime_reason != 0)
|
|
result = comptimeExpr(gz, scope, rl, node, comptime_reason);
|
|
else
|
|
result = exprRl(gz, scope, rl, node);
|
|
if (refIsNoReturn(gz, result)) {
|
|
(void)reachable_node;
|
|
SET_ERROR(gz->astgen);
|
|
}
|
|
return result;
|
|
}
|
|
|
|
static uint32_t tryResolvePrimitiveIdent(GenZir* gz, uint32_t node);
|
|
|
|
// SimpleComptimeReason (std.zig:727) — values used in block_comptime payload.
|
|
#define COMPTIME_REASON_C_INCLUDE 9
|
|
#define COMPTIME_REASON_C_UNDEF 10
|
|
#define COMPTIME_REASON_TYPE 29
|
|
#define COMPTIME_REASON_ARRAY_SENTINEL 30
|
|
#define COMPTIME_REASON_POINTER_SENTINEL 31
|
|
#define COMPTIME_REASON_SLICE_SENTINEL 32
|
|
#define COMPTIME_REASON_ARRAY_LENGTH 33
|
|
#define COMPTIME_REASON_ALIGN 50
|
|
#define COMPTIME_REASON_ADDRSPACE 51
|
|
#define COMPTIME_REASON_FIELD_NAME 42
|
|
#define COMPTIME_REASON_COMPTIME_KEYWORD 53
|
|
#define COMPTIME_REASON_ARRAY_MUL_FACTOR 22
|
|
#define COMPTIME_REASON_COMPILE_ERROR_STRING 19
|
|
#define COMPTIME_REASON_SWITCH_ITEM 56
|
|
#define COMPTIME_REASON_TUPLE_FIELD_DEFAULT_VALUE 57
|
|
#define COMPTIME_REASON_UNION_FIELD_NAME 45
|
|
|
|
// Mirrors comptimeExpr2 (AstGen.zig:1982).
|
|
// Evaluates a node in a comptime block_comptime scope.
|
|
static uint32_t comptimeExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node, uint32_t reason) {
|
|
// Skip wrapping when already in comptime context (AstGen.zig:1990).
|
|
if (gz->is_comptime)
|
|
return exprRl(gz, scope, rl, node);
|
|
// Optimization: certain node types are trivially comptime and don't need
|
|
// a block_comptime wrapper (AstGen.zig:1997-2046).
|
|
AstGenCtx* ag = gz->astgen;
|
|
AstNodeTag tag = ag->tree->nodes.tags[node];
|
|
switch (tag) {
|
|
// Identifier handling (AstGen.zig:2000-2003):
|
|
// Upstream calls identifier() with force_comptime which resolves
|
|
// primitives/int types directly and only wraps others in block_comptime.
|
|
// We mirror this by resolving primitives here and falling through for
|
|
// non-primitives.
|
|
case AST_NODE_IDENTIFIER: {
|
|
uint32_t prim = tryResolvePrimitiveIdent(gz, node);
|
|
if (prim != ZIR_REF_NONE)
|
|
return prim;
|
|
break; // non-primitive: fall through to block_comptime wrapping
|
|
}
|
|
case AST_NODE_NUMBER_LITERAL:
|
|
case AST_NODE_CHAR_LITERAL:
|
|
case AST_NODE_STRING_LITERAL:
|
|
case AST_NODE_MULTILINE_STRING_LITERAL:
|
|
case AST_NODE_ENUM_LITERAL:
|
|
case AST_NODE_ERROR_VALUE:
|
|
// Type expressions that force comptime eval of sub-expressions
|
|
// (AstGen.zig:2017-2042).
|
|
case AST_NODE_ERROR_UNION:
|
|
case AST_NODE_MERGE_ERROR_SETS:
|
|
case AST_NODE_OPTIONAL_TYPE:
|
|
case AST_NODE_PTR_TYPE_ALIGNED:
|
|
case AST_NODE_PTR_TYPE_SENTINEL:
|
|
case AST_NODE_PTR_TYPE:
|
|
case AST_NODE_PTR_TYPE_BIT_RANGE:
|
|
case AST_NODE_ARRAY_TYPE:
|
|
case AST_NODE_ARRAY_TYPE_SENTINEL:
|
|
case AST_NODE_FN_PROTO_SIMPLE:
|
|
case AST_NODE_FN_PROTO_MULTI:
|
|
case AST_NODE_FN_PROTO_ONE:
|
|
case AST_NODE_FN_PROTO:
|
|
case AST_NODE_CONTAINER_DECL:
|
|
case AST_NODE_CONTAINER_DECL_TRAILING:
|
|
case AST_NODE_CONTAINER_DECL_ARG:
|
|
case AST_NODE_CONTAINER_DECL_ARG_TRAILING:
|
|
case AST_NODE_CONTAINER_DECL_TWO:
|
|
case AST_NODE_CONTAINER_DECL_TWO_TRAILING:
|
|
case AST_NODE_TAGGED_UNION:
|
|
case AST_NODE_TAGGED_UNION_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG:
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_TWO:
|
|
case AST_NODE_TAGGED_UNION_TWO_TRAILING:
|
|
return exprRl(gz, scope, rl, node);
|
|
default:
|
|
break;
|
|
}
|
|
// General case: wrap in block_comptime (AstGen.zig:2078-2096).
|
|
uint32_t block_inst = makeBlockInst(ag, ZIR_INST_BLOCK_COMPTIME, gz, node);
|
|
GenZir block_scope = makeSubBlock(gz, scope);
|
|
block_scope.is_comptime = true;
|
|
// Transform RL to type-only (AstGen.zig:2084-2090).
|
|
ResultLoc ty_only_rl;
|
|
uint32_t res_ty = rlResultType(gz, rl, node);
|
|
if (res_ty != 0)
|
|
ty_only_rl = (ResultLoc) {
|
|
.tag = RL_COERCED_TY, .data = res_ty, .src_node = 0, .ctx = rl.ctx
|
|
};
|
|
else
|
|
ty_only_rl = (ResultLoc) {
|
|
.tag = RL_NONE, .data = 0, .src_node = 0, .ctx = rl.ctx
|
|
};
|
|
uint32_t result = exprRl(&block_scope, scope, ty_only_rl, node);
|
|
addBreak(&block_scope, ZIR_INST_BREAK_INLINE, block_inst, result,
|
|
AST_NODE_OFFSET_NONE);
|
|
setBlockComptimeBody(ag, &block_scope, block_inst, reason);
|
|
gzAppendInstruction(gz, block_inst);
|
|
return rvalue(gz, rl, block_inst + ZIR_REF_START_INDEX, node);
|
|
}
|
|
|
|
// Mirrors typeExpr (AstGen.zig:394).
|
|
static uint32_t typeExpr(GenZir* gz, Scope* scope, uint32_t node) {
|
|
ResultLoc rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_TYPE_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
return comptimeExpr(gz, scope, rl, node, COMPTIME_REASON_TYPE);
|
|
}
|
|
|
|
// Sign parameter for numberLiteral (AstGen.zig:8674).
|
|
enum NumSign { NUM_SIGN_POSITIVE, NUM_SIGN_NEGATIVE };
|
|
|
|
// Mirrors numberLiteral (AstGen.zig:8679).
|
|
// Parses integer and float literals, returns appropriate ZIR ref.
|
|
// source_node is the node used for rvalue/error reporting (may differ from
|
|
// node when called from negation).
|
|
static uint32_t numberLiteral(
|
|
GenZir* gz, uint32_t node, uint32_t source_node, enum NumSign sign) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
uint32_t num_token = ag->tree->nodes.main_tokens[node];
|
|
uint32_t tok_start = ag->tree->tokens.starts[num_token];
|
|
const char* source = ag->tree->source;
|
|
|
|
// Determine token length by scanning to next non-number character.
|
|
uint32_t tok_end = tok_start;
|
|
while (tok_end < ag->tree->source_len
|
|
&& ((source[tok_end] >= '0' && source[tok_end] <= '9')
|
|
|| source[tok_end] == '_' || source[tok_end] == '.'
|
|
|| source[tok_end] == 'x' || source[tok_end] == 'o'
|
|
|| source[tok_end] == 'b'
|
|
|| (source[tok_end] >= 'a' && source[tok_end] <= 'f')
|
|
|| (source[tok_end] >= 'A' && source[tok_end] <= 'F'))) {
|
|
tok_end++;
|
|
}
|
|
|
|
// Parse the integer value (simplified: decimal and hex).
|
|
uint64_t value = 0;
|
|
bool is_hex = false;
|
|
uint32_t pos = tok_start;
|
|
if (tok_end - tok_start >= 2 && source[tok_start] == '0'
|
|
&& source[tok_start + 1] == 'x') {
|
|
is_hex = true;
|
|
pos = tok_start + 2;
|
|
}
|
|
|
|
if (is_hex) {
|
|
for (; pos < tok_end; pos++) {
|
|
if (source[pos] == '_')
|
|
continue;
|
|
if (source[pos] >= '0' && source[pos] <= '9')
|
|
value = value * 16 + (uint64_t)(source[pos] - '0');
|
|
else if (source[pos] >= 'a' && source[pos] <= 'f')
|
|
value = value * 16 + 10 + (uint64_t)(source[pos] - 'a');
|
|
else if (source[pos] >= 'A' && source[pos] <= 'F')
|
|
value = value * 16 + 10 + (uint64_t)(source[pos] - 'A');
|
|
}
|
|
} else {
|
|
for (; pos < tok_end; pos++) {
|
|
if (source[pos] == '_')
|
|
continue;
|
|
if (source[pos] == '.')
|
|
break; // float — not handled yet
|
|
if (source[pos] >= '0' && source[pos] <= '9')
|
|
value = value * 10 + (uint64_t)(source[pos] - '0');
|
|
}
|
|
}
|
|
|
|
// Special cases for 0 and 1 (AstGen.zig:8687-8703).
|
|
// Note: upstream errors on negative zero for integers; we return zero
|
|
// regardless of sign to match the same codegen path (AstGen.zig:8687).
|
|
if (value == 0)
|
|
return ZIR_REF_ZERO;
|
|
if (value == 1) {
|
|
return (sign == NUM_SIGN_POSITIVE) ? ZIR_REF_ONE
|
|
: ZIR_REF_NEGATIVE_ONE;
|
|
}
|
|
|
|
// For other integers, emit the positive value and negate if needed
|
|
// (AstGen.zig:8751-8756).
|
|
uint32_t result = addInt(gz, value);
|
|
if (sign == NUM_SIGN_NEGATIVE) {
|
|
return addUnNode(gz, ZIR_INST_NEGATE, result, source_node);
|
|
}
|
|
return result;
|
|
}
|
|
|
|
// Mirrors builtinCall (AstGen.zig:9191), @import case (AstGen.zig:9242).
|
|
static uint32_t builtinCallImport(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
|
|
(void)scope;
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
|
|
// For builtin_call_two: data.lhs = first arg node.
|
|
AstData node_data = tree->nodes.datas[node];
|
|
uint32_t operand_node = node_data.lhs;
|
|
|
|
assert(tree->nodes.tags[operand_node] == AST_NODE_STRING_LITERAL);
|
|
uint32_t str_lit_token = tree->nodes.main_tokens[operand_node];
|
|
|
|
uint32_t str_index, str_len;
|
|
strLitAsString(ag, str_lit_token, &str_index, &str_len);
|
|
|
|
// Compute res_ty from result location (AstGen.zig:9257).
|
|
uint32_t res_ty_raw = rlResultType(gz, rl, node);
|
|
uint32_t res_ty = (res_ty_raw != 0) ? res_ty_raw : ZIR_REF_NONE;
|
|
|
|
// Write Import payload to extra (Zir.Inst.Import: res_ty, path).
|
|
ensureExtraCapacity(ag, 2);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = res_ty;
|
|
ag->extra[ag->extra_len++] = str_index; // path
|
|
|
|
// Create .import instruction with pl_tok data.
|
|
ZirInstData data;
|
|
data.pl_tok.src_tok = tokenIndexToRelative(gz, str_lit_token);
|
|
data.pl_tok.payload_index = payload_index;
|
|
uint32_t result_ref = addInstruction(gz, ZIR_INST_IMPORT, data);
|
|
|
|
// Track import (AstGen.zig:9269).
|
|
addImport(ag, str_index, str_lit_token);
|
|
|
|
return rvalue(gz, rl, result_ref, node);
|
|
}
|
|
|
|
// Mirrors cImport (AstGen.zig:10011).
|
|
static uint32_t cImportExpr(GenZir* gz, Scope* scope, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
AstData nd = ag->tree->nodes.datas[node];
|
|
uint32_t body_node = nd.lhs; // first arg = body
|
|
|
|
uint32_t block_inst = makeBlockInst(ag, ZIR_INST_C_IMPORT, gz, node);
|
|
|
|
GenZir block_scope = makeSubBlock(gz, scope);
|
|
block_scope.is_comptime = true;
|
|
block_scope.c_import = true;
|
|
|
|
// Use fullBodyExpr to inline unlabeled block body (AstGen.zig:10028).
|
|
uint32_t block_result = fullBodyExpr(
|
|
&block_scope, &block_scope.base, RL_NONE_VAL, body_node);
|
|
|
|
// ensure_result_used on gz (parent), not block_scope (AstGen.zig:10029).
|
|
addUnNode(gz, ZIR_INST_ENSURE_RESULT_USED, block_result, node);
|
|
|
|
// break_inline (AstGen.zig:10030-10032).
|
|
makeBreakInline(
|
|
&block_scope, block_inst, ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
|
|
|
|
setBlockBody(ag, &block_scope, block_inst);
|
|
// block_scope unstacked now, can add to gz.
|
|
gzAppendInstruction(gz, block_inst);
|
|
|
|
return block_inst + ZIR_REF_START_INDEX; // toRef()
|
|
}
|
|
|
|
// Mirrors simpleCBuiltin (AstGen.zig:9938).
|
|
static uint32_t simpleCBuiltin(GenZir* gz, Scope* scope, ResultLoc rl,
|
|
uint32_t node, uint32_t operand_node, uint16_t ext_tag) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
|
|
// Check c_import scope (AstGen.zig:9947).
|
|
if (!gz->c_import) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
// Evaluate operand as comptimeExpr with coerced_ty=slice_const_u8_type
|
|
// (AstGen.zig:9948-9954).
|
|
uint32_t comptime_reason = (ext_tag == (uint16_t)ZIR_EXT_C_UNDEF)
|
|
? COMPTIME_REASON_C_UNDEF
|
|
: COMPTIME_REASON_C_INCLUDE;
|
|
ResultLoc operand_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_SLICE_CONST_U8_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t operand
|
|
= comptimeExpr(gz, scope, operand_rl, operand_node, comptime_reason);
|
|
|
|
// Emit extended instruction with UnNode payload (AstGen.zig:9955).
|
|
ensureExtraCapacity(ag, 2);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++]
|
|
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
|
|
ag->extra[ag->extra_len++] = operand;
|
|
|
|
ZirInstData data;
|
|
data.extended.opcode = ext_tag;
|
|
data.extended.small = 0xAAAAu; // undefined (addExtendedPayload passes
|
|
// undefined for small)
|
|
data.extended.operand = payload_index;
|
|
addInstruction(gz, ZIR_INST_EXTENDED, data);
|
|
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
}
|
|
|
|
// Mirrors builtinCall (AstGen.zig:9191) dispatch.
|
|
static uint32_t builtinCall(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
|
|
uint32_t builtin_token = tree->nodes.main_tokens[node];
|
|
uint32_t tok_start = tree->tokens.starts[builtin_token];
|
|
const char* source = tree->source;
|
|
|
|
// Identify builtin name from source.
|
|
// Skip '@' prefix and scan identifier.
|
|
uint32_t name_start = tok_start + 1; // skip '@'
|
|
uint32_t name_end = name_start;
|
|
while (name_end < tree->source_len
|
|
&& ((source[name_end] >= 'a' && source[name_end] <= 'z')
|
|
|| (source[name_end] >= 'A' && source[name_end] <= 'Z')
|
|
|| source[name_end] == '_')) {
|
|
name_end++;
|
|
}
|
|
uint32_t name_len = name_end - name_start;
|
|
|
|
// clang-format off
|
|
if (name_len == 6 && memcmp(source + name_start, "import", 6) == 0)
|
|
return builtinCallImport(gz, scope, rl, node);
|
|
if (name_len == 7 && memcmp(source + name_start, "cImport", 7) == 0)
|
|
return cImportExpr(gz, scope, node);
|
|
if (name_len == 8 && memcmp(source + name_start, "cInclude", 8) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
return simpleCBuiltin(gz, scope, rl, node, nd.lhs, (uint16_t)ZIR_EXT_C_INCLUDE);
|
|
}
|
|
// @intCast — typeCast pattern (AstGen.zig:9416, 9807-9826).
|
|
if (name_len == 7 && memcmp(source + name_start, "intCast", 7) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
uint32_t result_type = rlResultTypeForCast(gz, rl, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
return rvalue(gz, rl, addPlNodeBin(gz, ZIR_INST_INT_CAST, node,
|
|
result_type, operand), node);
|
|
}
|
|
// @embedFile — simpleUnOp with coerced_ty (AstGen.zig:9390).
|
|
if (name_len == 9 && memcmp(source + name_start, "embedFile", 9) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
ResultLoc operand_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_SLICE_CONST_U8_TYPE, .src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t operand = exprRl(gz, scope, operand_rl, nd.lhs);
|
|
uint32_t result = addUnNode(gz, ZIR_INST_EMBED_FILE, operand, node);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @intFromEnum — simpleUnOp (AstGen.zig:9388).
|
|
if (name_len == 11 && memcmp(source + name_start, "intFromEnum", 11) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
uint32_t result = addUnNode(gz, ZIR_INST_INT_FROM_ENUM, operand, node);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @tagName — simpleUnOp with dbg_stmt (AstGen.zig:9407).
|
|
if (name_len == 7 && memcmp(source + name_start, "tagName", 7) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
uint32_t result = addUnNode(gz, ZIR_INST_TAG_NAME, operand, node);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @as (AstGen.zig:8909-8920).
|
|
if (name_len == 2 && memcmp(source + name_start, "as", 2) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t dest_type = typeExpr(gz, scope, nd.lhs);
|
|
ResultLoc as_rl = { .tag = RL_TY, .data = dest_type, .src_node = 0,
|
|
.ctx = rl.ctx };
|
|
uint32_t operand = exprRl(gz, scope, as_rl, nd.rhs);
|
|
return rvalue(gz, rl, operand, node);
|
|
}
|
|
// @truncate — typeCast pattern (AstGen.zig:9417, 9807-9826).
|
|
if (name_len == 8 && memcmp(source + name_start, "truncate", 8) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
uint32_t result_type = rlResultTypeForCast(gz, rl, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
return rvalue(gz, rl, addPlNodeBin(gz, ZIR_INST_TRUNCATE, node,
|
|
result_type, operand), node);
|
|
}
|
|
// @ptrCast — typeCast pattern (AstGen.zig:9056, 9807-9826).
|
|
// TODO: Issue 14 — upstream routes through ptrCast() for nested
|
|
// pointer cast collapsing. Currently uses simple typeCast path.
|
|
if (name_len == 7 && memcmp(source + name_start, "ptrCast", 7) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
uint32_t result_type = rlResultTypeForCast(gz, rl, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
return rvalue(gz, rl, addPlNodeBin(gz, ZIR_INST_PTR_CAST, node,
|
|
result_type, operand), node);
|
|
}
|
|
// @enumFromInt — typeCast pattern (AstGen.zig:9414, 9807-9826).
|
|
if (name_len == 11 && memcmp(source + name_start, "enumFromInt", 11) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
uint32_t result_type = rlResultTypeForCast(gz, rl, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
return rvalue(gz, rl, addPlNodeBin(gz, ZIR_INST_ENUM_FROM_INT, node,
|
|
result_type, operand), node);
|
|
}
|
|
// @bitCast (AstGen.zig:8944-8958, dispatched at 9313).
|
|
if (name_len == 7 && memcmp(source + name_start, "bitCast", 7) == 0) {
|
|
uint32_t result_type = rlResultTypeForCast(gz, rl, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
return rvalue(gz, rl, addPlNodeBin(gz, ZIR_INST_BITCAST, node,
|
|
result_type, operand), node);
|
|
}
|
|
// @memcpy (AstGen.zig:9631-9637).
|
|
if (name_len == 6 && memcmp(source + name_start, "memcpy", 6) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t dst = expr(gz, scope, nd.lhs);
|
|
uint32_t src = expr(gz, scope, nd.rhs);
|
|
addPlNodeBin(gz, ZIR_INST_MEMCPY, node, dst, src);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
}
|
|
// @memset (AstGen.zig:9638-9647).
|
|
if (name_len == 6 && memcmp(source + name_start, "memset", 6) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t lhs = expr(gz, scope, nd.lhs);
|
|
uint32_t lhs_ty = addUnNode(gz, ZIR_INST_TYPEOF, lhs, nd.lhs);
|
|
uint32_t elem_ty =
|
|
addUnNode(gz, ZIR_INST_INDEXABLE_PTR_ELEM_TYPE, lhs_ty, nd.lhs);
|
|
ResultLoc val_rl = {
|
|
.tag = RL_COERCED_TY, .data = elem_ty, .src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t val = exprRl(gz, scope, val_rl, nd.rhs);
|
|
addPlNodeBin(gz, ZIR_INST_MEMSET, node, lhs, val);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
}
|
|
// @min (AstGen.zig:9149-9172).
|
|
if (name_len == 3 && memcmp(source + name_start, "min", 3) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t a = expr(gz, scope, nd.lhs);
|
|
uint32_t b = expr(gz, scope, nd.rhs);
|
|
return rvalue(gz, rl,
|
|
addPlNodeBin(gz, ZIR_INST_MIN, node, a, b), node);
|
|
}
|
|
// @max (AstGen.zig:9149-9172).
|
|
if (name_len == 3 && memcmp(source + name_start, "max", 3) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t a = expr(gz, scope, nd.lhs);
|
|
uint32_t b = expr(gz, scope, nd.rhs);
|
|
return rvalue(gz, rl,
|
|
addPlNodeBin(gz, ZIR_INST_MAX, node, a, b), node);
|
|
}
|
|
// @panic — simpleUnOp with dbg_node (AstGen.zig:9429-9432).
|
|
if (name_len == 5 && memcmp(source + name_start, "panic", 5) == 0) {
|
|
emitDbgNode(gz, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
ResultLoc panic_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_SLICE_CONST_U8_TYPE,
|
|
.src_node = 0, .ctx = RI_CTX_NONE };
|
|
uint32_t operand = exprRl(gz, scope, panic_rl, nd.lhs);
|
|
uint32_t result = addUnNode(gz, ZIR_INST_PANIC, operand, node);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @errorName — simpleUnOp with dbg_stmt (AstGen.zig:9391).
|
|
if (name_len == 9 && memcmp(source + name_start, "errorName", 9) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
AstData nd = tree->nodes.datas[node];
|
|
ResultLoc err_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_ANYERROR_TYPE,
|
|
.src_node = 0, .ctx = RI_CTX_NONE };
|
|
uint32_t operand = exprRl(gz, scope, err_rl, nd.lhs);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
uint32_t result = addUnNode(gz, ZIR_INST_ERROR_NAME, operand, node);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @field (AstGen.zig:9288-9300).
|
|
if (name_len == 5 && memcmp(source + name_start, "field", 5) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
ResultLoc field_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_SLICE_CONST_U8_TYPE,
|
|
.src_node = 0, .ctx = RI_CTX_NONE };
|
|
if (RL_IS_REF(rl)) {
|
|
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs);
|
|
uint32_t fname = comptimeExpr(
|
|
gz, scope, field_rl, nd.rhs, COMPTIME_REASON_FIELD_NAME);
|
|
return addPlNodeBin(
|
|
gz, ZIR_INST_FIELD_PTR_NAMED, node, lhs, fname);
|
|
}
|
|
uint32_t lhs = expr(gz, scope, nd.lhs);
|
|
uint32_t fname = comptimeExpr(
|
|
gz, scope, field_rl, nd.rhs, COMPTIME_REASON_FIELD_NAME);
|
|
uint32_t result = addPlNodeBin(
|
|
gz, ZIR_INST_FIELD_VAL_NAMED, node, lhs, fname);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @sizeOf — simpleUnOpType (AstGen.zig:9381).
|
|
if (name_len == 6 && memcmp(source + name_start, "sizeOf", 6) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = typeExpr(gz, scope, nd.lhs);
|
|
uint32_t result = addUnNode(gz, ZIR_INST_SIZE_OF, operand, node);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @alignOf — simpleUnOpType (AstGen.zig:9383).
|
|
if (name_len == 7 && memcmp(source + name_start, "alignOf", 7) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = typeExpr(gz, scope, nd.lhs);
|
|
uint32_t result = addUnNode(gz, ZIR_INST_ALIGN_OF, operand, node);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @typeInfo — simpleUnOpType (AstGen.zig:9380).
|
|
if (name_len == 8 && memcmp(source + name_start, "typeInfo", 8) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = typeExpr(gz, scope, nd.lhs);
|
|
uint32_t result = addUnNode(gz, ZIR_INST_TYPE_INFO, operand, node);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @compileError — simpleUnOp (AstGen.zig:9386, 9841-9861).
|
|
if (name_len == 12
|
|
&& memcmp(source + name_start, "compileError", 12) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
ResultLoc operand_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_SLICE_CONST_U8_TYPE, .src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t operand = comptimeExpr(gz, scope, operand_rl, nd.lhs,
|
|
COMPTIME_REASON_COMPILE_ERROR_STRING);
|
|
uint32_t result
|
|
= addUnNode(gz, ZIR_INST_COMPILE_ERROR, operand, node);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @setEvalBranchQuota — simpleUnOp (AstGen.zig:9387, 9841-9861).
|
|
if (name_len == 18
|
|
&& memcmp(source + name_start, "setEvalBranchQuota", 18) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
ResultLoc operand_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_U32_TYPE, .src_node = 0, .ctx = RI_CTX_NONE };
|
|
uint32_t operand = exprRl(gz, scope, operand_rl, nd.lhs);
|
|
uint32_t result = addUnNode(
|
|
gz, ZIR_INST_SET_EVAL_BRANCH_QUOTA, operand, node);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @typeName — simpleUnOp (AstGen.zig:9408, 9841-9861).
|
|
if (name_len == 8 && memcmp(source + name_start, "typeName", 8) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
uint32_t result = addUnNode(gz, ZIR_INST_TYPE_NAME, operand, node);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @This (AstGen.zig:9371).
|
|
if (name_len == 4 && memcmp(source + name_start, "This", 4) == 0)
|
|
return rvalue(
|
|
gz, rl, addNodeExtended(gz, (uint16_t)ZIR_EXT_THIS, node), node);
|
|
// @memmove (AstGen.zig:9648-9654).
|
|
if (name_len == 7 && memcmp(source + name_start, "memmove", 7) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t dst = expr(gz, scope, nd.lhs);
|
|
uint32_t src_op = expr(gz, scope, nd.rhs);
|
|
addPlNodeBin(gz, ZIR_INST_MEMMOVE, node, dst, src_op);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
}
|
|
// @FieldType (AstGen.zig:9301-9309).
|
|
if (name_len == 9 && memcmp(source + name_start, "FieldType", 9) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t ty_inst = typeExpr(gz, scope, nd.lhs);
|
|
ResultLoc name_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_SLICE_CONST_U8_TYPE, .src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t name_inst = comptimeExpr(
|
|
gz, scope, name_rl, nd.rhs, COMPTIME_REASON_FIELD_NAME);
|
|
uint32_t result = addPlNodeBin(
|
|
gz, ZIR_INST_FIELD_TYPE_REF, node, ty_inst, name_inst);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @reduce (AstGen.zig:9539-9548).
|
|
if (name_len == 6 && memcmp(source + name_start, "reduce", 6) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t reduce_op_ty
|
|
= addBuiltinValue(gz, node, ZIR_BUILTIN_VALUE_REDUCE_OP);
|
|
ResultLoc op_rl = { .tag = RL_COERCED_TY,
|
|
.data = reduce_op_ty, .src_node = 0, .ctx = RI_CTX_NONE };
|
|
uint32_t op = exprRl(gz, scope, op_rl, nd.lhs);
|
|
uint32_t scalar = expr(gz, scope, nd.rhs);
|
|
uint32_t result
|
|
= addPlNodeBin(gz, ZIR_INST_REDUCE, node, op, scalar);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @addWithOverflow — overflowArithmetic (AstGen.zig:9550, 10040-10056).
|
|
if (name_len == 15
|
|
&& memcmp(source + name_start, "addWithOverflow", 15) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t lhs = expr(gz, scope, nd.lhs);
|
|
uint32_t rhs = expr(gz, scope, nd.rhs);
|
|
ensureExtraCapacity(ag, 3);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++]
|
|
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
|
|
ag->extra[ag->extra_len++] = lhs;
|
|
ag->extra[ag->extra_len++] = rhs;
|
|
uint32_t result = addExtendedPayload(
|
|
gz, (uint16_t)ZIR_EXT_ADD_WITH_OVERFLOW, payload_index);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @subWithOverflow (AstGen.zig:9551, 10040-10056).
|
|
if (name_len == 15
|
|
&& memcmp(source + name_start, "subWithOverflow", 15) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t lhs = expr(gz, scope, nd.lhs);
|
|
uint32_t rhs = expr(gz, scope, nd.rhs);
|
|
ensureExtraCapacity(ag, 3);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++]
|
|
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
|
|
ag->extra[ag->extra_len++] = lhs;
|
|
ag->extra[ag->extra_len++] = rhs;
|
|
uint32_t result = addExtendedPayload(
|
|
gz, (uint16_t)ZIR_EXT_SUB_WITH_OVERFLOW, payload_index);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @mulWithOverflow (AstGen.zig:9552, 10040-10056).
|
|
if (name_len == 15
|
|
&& memcmp(source + name_start, "mulWithOverflow", 15) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t lhs = expr(gz, scope, nd.lhs);
|
|
uint32_t rhs = expr(gz, scope, nd.rhs);
|
|
ensureExtraCapacity(ag, 3);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++]
|
|
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
|
|
ag->extra[ag->extra_len++] = lhs;
|
|
ag->extra[ag->extra_len++] = rhs;
|
|
uint32_t result = addExtendedPayload(
|
|
gz, (uint16_t)ZIR_EXT_MUL_WITH_OVERFLOW, payload_index);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @shlWithOverflow (AstGen.zig:9553, 10040-10056).
|
|
if (name_len == 15
|
|
&& memcmp(source + name_start, "shlWithOverflow", 15) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t lhs = expr(gz, scope, nd.lhs);
|
|
uint32_t rhs = expr(gz, scope, nd.rhs);
|
|
ensureExtraCapacity(ag, 3);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++]
|
|
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
|
|
ag->extra[ag->extra_len++] = lhs;
|
|
ag->extra[ag->extra_len++] = rhs;
|
|
uint32_t result = addExtendedPayload(
|
|
gz, (uint16_t)ZIR_EXT_SHL_WITH_OVERFLOW, payload_index);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @alignCast — ptrCast family (AstGen.zig:9464-9469, 8969-9087).
|
|
// Simplified: standalone @alignCast uses ptr_cast_full with align_cast flag.
|
|
if (name_len == 9
|
|
&& memcmp(source + name_start, "alignCast", 9) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
uint32_t result_type = rlResultTypeForCast(gz, rl, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
// align_cast flag = bit 1 (FullPtrCastFlags: ptr_cast=0, align_cast=1)
|
|
uint16_t flags = 0x02;
|
|
ensureExtraCapacity(ag, 3);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++]
|
|
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
|
|
ag->extra[ag->extra_len++] = result_type;
|
|
ag->extra[ag->extra_len++] = operand;
|
|
uint32_t result = addExtendedPayloadSmall(
|
|
gz, (uint16_t)ZIR_EXT_PTR_CAST_FULL, flags, payload_index);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @constCast (AstGen.zig:9464-9469, 8969-9087).
|
|
if (name_len == 9
|
|
&& memcmp(source + name_start, "constCast", 9) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
// const_cast flag = bit 3 (FullPtrCastFlags packed u5)
|
|
uint16_t flags = 0x08;
|
|
ensureExtraCapacity(ag, 2);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++]
|
|
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
|
|
ag->extra[ag->extra_len++] = operand;
|
|
uint32_t result = addExtendedPayloadSmall(
|
|
gz, (uint16_t)ZIR_EXT_PTR_CAST_NO_DEST, flags, payload_index);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @volatileCast (AstGen.zig:9464-9469, 8969-9087).
|
|
if (name_len == 12
|
|
&& memcmp(source + name_start, "volatileCast", 12) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
// volatile_cast flag = bit 4 (FullPtrCastFlags packed u5)
|
|
uint16_t flags = 0x10;
|
|
ensureExtraCapacity(ag, 2);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++]
|
|
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
|
|
ag->extra[ag->extra_len++] = operand;
|
|
uint32_t result = addExtendedPayloadSmall(
|
|
gz, (uint16_t)ZIR_EXT_PTR_CAST_NO_DEST, flags, payload_index);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @Type (reify) (AstGen.zig:9426-9428, 9747-9781).
|
|
if (name_len == 4 && memcmp(source + name_start, "Type", 4) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t type_info_ty
|
|
= addBuiltinValue(gz, node, ZIR_BUILTIN_VALUE_TYPE_INFO);
|
|
ResultLoc operand_rl = { .tag = RL_COERCED_TY,
|
|
.data = type_info_ty, .src_node = 0, .ctx = RI_CTX_NONE };
|
|
uint32_t operand = exprRl(gz, scope, operand_rl, nd.lhs);
|
|
// Reify payload: absolute node, operand, src_line.
|
|
ensureExtraCapacity(ag, 3);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = node; // absolute node index
|
|
ag->extra[ag->extra_len++] = operand;
|
|
ag->extra[ag->extra_len++] = ag->source_line;
|
|
// name_strat = .anon = 2
|
|
uint32_t result = addExtendedPayloadSmall(
|
|
gz, (uint16_t)ZIR_EXT_REIFY, 2, payload_index);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @TypeOf (AstGen.zig:9314, 9089-9147) — single-arg case.
|
|
if (name_len == 6 && memcmp(source + name_start, "TypeOf", 6) == 0) {
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t typeof_inst
|
|
= makeBlockInst(ag, ZIR_INST_TYPEOF_BUILTIN, gz, node);
|
|
GenZir typeof_scope = makeSubBlock(gz, &gz->base);
|
|
typeof_scope.is_comptime = false;
|
|
typeof_scope.is_typeof = true;
|
|
typeof_scope.c_import = false;
|
|
uint32_t ty_expr_ref = reachableExpr(
|
|
&typeof_scope, &typeof_scope.base, RL_NONE_VAL, nd.lhs, node);
|
|
if (!refIsNoReturn(&typeof_scope, ty_expr_ref)) {
|
|
addBreak(&typeof_scope, ZIR_INST_BREAK_INLINE,
|
|
typeof_inst, ty_expr_ref,
|
|
(int32_t)nd.lhs - (int32_t)gz->decl_node_index);
|
|
}
|
|
setBlockBody(ag, &typeof_scope, typeof_inst);
|
|
// typeof_scope unstacked now, add instruction to gz.
|
|
gzAppendInstruction(gz, typeof_inst);
|
|
return rvalue(gz, rl, typeof_inst + ZIR_REF_START_INDEX, node);
|
|
}
|
|
// @intFromPtr — simpleUnOp with dbg_stmt (AstGen.zig:9392).
|
|
if (name_len == 10
|
|
&& memcmp(source + name_start, "intFromPtr", 10) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
uint32_t result
|
|
= addUnNode(gz, ZIR_INST_INT_FROM_PTR, operand, node);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @intFromBool — simpleUnOp (AstGen.zig:9389).
|
|
if (name_len == 11
|
|
&& memcmp(source + name_start, "intFromBool", 11) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
uint32_t result
|
|
= addUnNode(gz, ZIR_INST_INT_FROM_BOOL, operand, node);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// @floatFromInt — typeCast (AstGen.zig:9418, 9807-9826).
|
|
if (name_len == 12
|
|
&& memcmp(source + name_start, "floatFromInt", 12) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
uint32_t result_type = rlResultTypeForCast(gz, rl, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
return rvalue(gz, rl, addPlNodeBin(gz, ZIR_INST_FLOAT_FROM_INT,
|
|
node, result_type, operand), node);
|
|
}
|
|
// @intFromFloat — typeCast (AstGen.zig:9419, 9807-9826).
|
|
if (name_len == 12
|
|
&& memcmp(source + name_start, "intFromFloat", 12) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
uint32_t result_type = rlResultTypeForCast(gz, rl, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
return rvalue(gz, rl, addPlNodeBin(gz, ZIR_INST_INT_FROM_FLOAT,
|
|
node, result_type, operand), node);
|
|
}
|
|
// @floatCast — typeCast (AstGen.zig:9420, 9807-9826).
|
|
if (name_len == 9
|
|
&& memcmp(source + name_start, "floatCast", 9) == 0) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
uint32_t result_type = rlResultTypeForCast(gz, rl, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
return rvalue(gz, rl, addPlNodeBin(gz, ZIR_INST_FLOAT_CAST,
|
|
node, result_type, operand), node);
|
|
}
|
|
// @errSetCast — extended error_cast (AstGen.zig:9454-9463).
|
|
if (name_len == 10
|
|
&& memcmp(source + name_start, "errSetCast", 10) == 0) {
|
|
emitDbgNode(gz, node);
|
|
uint32_t result_type = rlResultTypeForCast(gz, rl, node);
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand = expr(gz, scope, nd.lhs);
|
|
ensureExtraCapacity(ag, 3);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++]
|
|
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
|
|
ag->extra[ag->extra_len++] = result_type;
|
|
ag->extra[ag->extra_len++] = operand;
|
|
uint32_t result = addExtendedPayload(
|
|
gz, (uint16_t)ZIR_EXT_ERROR_CAST, payload_index);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// clang-format on
|
|
|
|
// TODO: handle other builtins.
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
// Mirrors builtinCall for 3+ arg builtins (AST_NODE_BUILTIN_CALL).
|
|
// params are in extra_data[params_start..params_end].
|
|
static uint32_t builtinCallMultiArg(GenZir* gz, Scope* scope, ResultLoc rl,
|
|
uint32_t node, uint32_t params_start, uint32_t params_end) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
|
|
uint32_t builtin_token = tree->nodes.main_tokens[node];
|
|
uint32_t tok_start = tree->tokens.starts[builtin_token];
|
|
const char* source = tree->source;
|
|
uint32_t name_start = tok_start + 1;
|
|
uint32_t name_end = name_start;
|
|
while (name_end < tree->source_len
|
|
&& ((source[name_end] >= 'a' && source[name_end] <= 'z')
|
|
|| (source[name_end] >= 'A' && source[name_end] <= 'Z')
|
|
|| source[name_end] == '_')) {
|
|
name_end++;
|
|
}
|
|
uint32_t name_len = name_end - name_start;
|
|
const uint32_t* params = tree->extra_data.arr + params_start;
|
|
uint32_t param_count = params_end - params_start;
|
|
|
|
// @unionInit (AstGen.zig:9315, 8922-8942).
|
|
if (name_len == 9 && memcmp(source + name_start, "unionInit", 9) == 0
|
|
&& param_count == 3) {
|
|
uint32_t union_type = typeExpr(gz, scope, params[0]);
|
|
ResultLoc name_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_SLICE_CONST_U8_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t field_name_ref = comptimeExpr(
|
|
gz, scope, name_rl, params[1], COMPTIME_REASON_UNION_FIELD_NAME);
|
|
// Get field type via field_type_ref.
|
|
uint32_t field_type = addPlNodeBin(
|
|
gz, ZIR_INST_FIELD_TYPE_REF, node, union_type, field_name_ref);
|
|
// Evaluate init value coerced to field type.
|
|
ResultLoc init_rl = {
|
|
.tag = RL_TY, .data = field_type, .src_node = 0, .ctx = rl.ctx
|
|
};
|
|
uint32_t init = reachableExpr(gz, scope, init_rl, params[2], node);
|
|
// Emit union_init: payload = union_type, init, field_name_ref.
|
|
ensureExtraCapacity(ag, 3);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = union_type;
|
|
ag->extra[ag->extra_len++] = init;
|
|
ag->extra[ag->extra_len++] = field_name_ref;
|
|
uint32_t result = addPlNodePayloadIndex(
|
|
gz, ZIR_INST_UNION_INIT, node, payload_index);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
|
|
// TODO: handle other multi-arg builtins.
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
// --- identifier (AstGen.zig:8282) ---
|
|
// Simplified: handles decl_val resolution for container-level declarations.
|
|
|
|
// Tries to resolve an identifier as a primitive type or integer type.
|
|
// Returns the ZIR ref if it's a primitive/int type, or ZIR_REF_NONE.
|
|
// Mirrors primitive_instrs + integer type checks in identifier()
|
|
// (AstGen.zig:8298-8337).
|
|
static uint32_t tryResolvePrimitiveIdent(GenZir* gz, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
uint32_t ident_token = ag->tree->nodes.main_tokens[node];
|
|
uint32_t tok_start = ag->tree->tokens.starts[ident_token];
|
|
const char* source = ag->tree->source;
|
|
uint32_t tok_end = tok_start;
|
|
while (tok_end < ag->tree->source_len
|
|
&& ((source[tok_end] >= 'a' && source[tok_end] <= 'z')
|
|
|| (source[tok_end] >= 'A' && source[tok_end] <= 'Z')
|
|
|| (source[tok_end] >= '0' && source[tok_end] <= '9')
|
|
|| source[tok_end] == '_'))
|
|
tok_end++;
|
|
uint32_t tok_len = tok_end - tok_start;
|
|
|
|
// Check well-known primitive refs (primitive_instrs map,
|
|
// AstGen.zig:10236-10281).
|
|
// clang-format off
|
|
if (tok_len == 2 && memcmp(source+tok_start, "u1", 2) == 0) return ZIR_REF_U1_TYPE;
|
|
if (tok_len == 2 && memcmp(source+tok_start, "u8", 2) == 0) return ZIR_REF_U8_TYPE;
|
|
if (tok_len == 2 && memcmp(source+tok_start, "i8", 2) == 0) return ZIR_REF_I8_TYPE;
|
|
if (tok_len == 3 && memcmp(source+tok_start, "u16", 3) == 0) return ZIR_REF_U16_TYPE;
|
|
if (tok_len == 3 && memcmp(source+tok_start, "i16", 3) == 0) return ZIR_REF_I16_TYPE;
|
|
if (tok_len == 3 && memcmp(source+tok_start, "u29", 3) == 0) return ZIR_REF_U29_TYPE;
|
|
if (tok_len == 3 && memcmp(source+tok_start, "u32", 3) == 0) return ZIR_REF_U32_TYPE;
|
|
if (tok_len == 3 && memcmp(source+tok_start, "i32", 3) == 0) return ZIR_REF_I32_TYPE;
|
|
if (tok_len == 3 && memcmp(source+tok_start, "u64", 3) == 0) return ZIR_REF_U64_TYPE;
|
|
if (tok_len == 3 && memcmp(source+tok_start, "i64", 3) == 0) return ZIR_REF_I64_TYPE;
|
|
if (tok_len == 4 && memcmp(source+tok_start, "u128", 4) == 0) return ZIR_REF_U128_TYPE;
|
|
if (tok_len == 4 && memcmp(source+tok_start, "i128", 4) == 0) return ZIR_REF_I128_TYPE;
|
|
if (tok_len == 5 && memcmp(source+tok_start, "usize", 5) == 0) return ZIR_REF_USIZE_TYPE;
|
|
if (tok_len == 5 && memcmp(source+tok_start, "isize", 5) == 0) return ZIR_REF_ISIZE_TYPE;
|
|
if (tok_len == 6 && memcmp(source+tok_start, "c_char", 6) == 0) return ZIR_REF_C_CHAR_TYPE;
|
|
if (tok_len == 7 && memcmp(source+tok_start, "c_short", 7) == 0) return ZIR_REF_C_SHORT_TYPE;
|
|
if (tok_len == 8 && memcmp(source+tok_start, "c_ushort", 8) == 0) return ZIR_REF_C_USHORT_TYPE;
|
|
if (tok_len == 5 && memcmp(source+tok_start, "c_int", 5) == 0) return ZIR_REF_C_INT_TYPE;
|
|
if (tok_len == 6 && memcmp(source+tok_start, "c_uint", 6) == 0) return ZIR_REF_C_UINT_TYPE;
|
|
if (tok_len == 6 && memcmp(source+tok_start, "c_long", 6) == 0) return ZIR_REF_C_LONG_TYPE;
|
|
if (tok_len == 7 && memcmp(source+tok_start, "c_ulong", 7) == 0) return ZIR_REF_C_ULONG_TYPE;
|
|
if (tok_len == 10 && memcmp(source+tok_start, "c_longlong", 10) == 0) return ZIR_REF_C_LONGLONG_TYPE;
|
|
if (tok_len == 11 && memcmp(source+tok_start, "c_ulonglong", 11) == 0) return ZIR_REF_C_ULONGLONG_TYPE;
|
|
if (tok_len == 14 && memcmp(source+tok_start, "comptime_float", 14) == 0) return ZIR_REF_COMPTIME_FLOAT_TYPE;
|
|
if (tok_len == 12 && memcmp(source+tok_start, "comptime_int", 12) == 0) return ZIR_REF_COMPTIME_INT_TYPE;
|
|
if (tok_len == 3 && memcmp(source+tok_start, "f16", 3) == 0) return ZIR_REF_F16_TYPE;
|
|
if (tok_len == 3 && memcmp(source+tok_start, "f32", 3) == 0) return ZIR_REF_F32_TYPE;
|
|
if (tok_len == 3 && memcmp(source+tok_start, "f64", 3) == 0) return ZIR_REF_F64_TYPE;
|
|
if (tok_len == 3 && memcmp(source+tok_start, "f80", 3) == 0) return ZIR_REF_F80_TYPE;
|
|
if (tok_len == 4 && memcmp(source+tok_start, "f128", 4) == 0) return ZIR_REF_F128_TYPE;
|
|
if (tok_len == 9 && memcmp(source+tok_start, "anyopaque", 9) == 0) return ZIR_REF_ANYOPAQUE_TYPE;
|
|
if (tok_len == 4 && memcmp(source+tok_start, "bool", 4) == 0) return ZIR_REF_BOOL_TYPE;
|
|
if (tok_len == 4 && memcmp(source+tok_start, "void", 4) == 0) return ZIR_REF_VOID_TYPE;
|
|
if (tok_len == 4 && memcmp(source+tok_start, "type", 4) == 0) return ZIR_REF_TYPE_TYPE;
|
|
if (tok_len == 8 && memcmp(source+tok_start, "anyerror", 8) == 0) return ZIR_REF_ANYERROR_TYPE;
|
|
if (tok_len == 8 && memcmp(source+tok_start, "noreturn", 8) == 0) return ZIR_REF_NORETURN_TYPE;
|
|
if (tok_len == 4 && memcmp(source+tok_start, "true", 4) == 0) return ZIR_REF_BOOL_TRUE;
|
|
if (tok_len == 5 && memcmp(source+tok_start, "false", 5) == 0) return ZIR_REF_BOOL_FALSE;
|
|
if (tok_len == 4 && memcmp(source+tok_start, "null", 4) == 0) return ZIR_REF_NULL_VALUE;
|
|
if (tok_len == 9 && memcmp(source+tok_start, "undefined", 9) == 0) return ZIR_REF_UNDEF;
|
|
// clang-format on
|
|
|
|
// Integer type detection: u29, i13, etc. (AstGen.zig:8304-8336).
|
|
if (tok_len >= 2
|
|
&& (source[tok_start] == 'u' || source[tok_start] == 'i')) {
|
|
// Zig Signedness enum: unsigned=1, signed=0
|
|
uint8_t signedness = (source[tok_start] == 'u') ? 1 : 0;
|
|
// Reject leading zeros (e.g. u01, i007) but allow u0/i0.
|
|
if (tok_len >= 3 && source[tok_start + 1] == '0') {
|
|
return ZIR_REF_NONE;
|
|
}
|
|
uint16_t bit_count = 0;
|
|
bool valid = true;
|
|
for (uint32_t k = tok_start + 1; k < tok_end; k++) {
|
|
if (source[k] >= '0' && source[k] <= '9') {
|
|
bit_count
|
|
= (uint16_t)(bit_count * 10 + (uint16_t)(source[k] - '0'));
|
|
} else {
|
|
valid = false;
|
|
break;
|
|
}
|
|
}
|
|
if (valid) {
|
|
ZirInstData data;
|
|
data.int_type.src_node
|
|
= (int32_t)node - (int32_t)gz->decl_node_index;
|
|
data.int_type.signedness = signedness;
|
|
data.int_type._pad = 0;
|
|
data.int_type.bit_count = bit_count;
|
|
return addInstruction(gz, ZIR_INST_INT_TYPE, data);
|
|
}
|
|
}
|
|
return ZIR_REF_NONE;
|
|
}
|
|
|
|
// Capture tag values for packed Capture u32 (Zir.zig:3391-3435).
|
|
#define CAPTURE_TAG_NESTED 0
|
|
#define CAPTURE_TAG_INSTRUCTION 1
|
|
#define CAPTURE_TAG_INSTRUCTION_LOAD 2
|
|
#define CAPTURE_TAG_DECL_VAL 3
|
|
#define CAPTURE_TAG_DECL_REF 4
|
|
|
|
static uint32_t packCapture(uint32_t tag, uint32_t data) {
|
|
return (tag & 0x7u) | (data << 3);
|
|
}
|
|
|
|
// Mirrors tunnelThroughClosure (AstGen.zig:8526-8624).
|
|
// Access a ZIR instruction/decl through closure. May tunnel through
|
|
// arbitrarily many namespaces, adding closure captures as required.
|
|
// Returns the ref of the closure_get instruction added to gz.
|
|
//
|
|
// capture_tag: one of CAPTURE_TAG_* constants.
|
|
// capture_data: the instruction index or NullTerminatedString index.
|
|
static uint32_t tunnelThroughClosure(GenZir* gz, uint32_t inner_ref_node,
|
|
uint32_t num_tunnels, uint32_t capture_tag, uint32_t capture_data,
|
|
uint32_t name_str_index) {
|
|
// For trivial refs (instruction tag with data mapping to a non-index ref),
|
|
// no tunnel is needed. In the upstream, this checks if
|
|
// ref.toIndex()==null. A ref with index < ZIR_REF_START_INDEX is
|
|
// non-indexed (e.g. void, true).
|
|
if (capture_tag == CAPTURE_TAG_INSTRUCTION
|
|
&& capture_data + ZIR_REF_START_INDEX < ZIR_REF_START_INDEX) {
|
|
return capture_data + ZIR_REF_START_INDEX;
|
|
}
|
|
|
|
// Walk gz->parent to find the namespace scopes we're tunneling through.
|
|
// The root namespace is the outermost (first found when counting
|
|
// num_tunnels-1 intermediate tunnels from the inside).
|
|
// Allocate on the stack for typical small counts.
|
|
ScopeNamespace* intermediates[16];
|
|
ScopeNamespace** inter = intermediates;
|
|
ScopeNamespace** inter_heap = NULL;
|
|
if (num_tunnels > 17) {
|
|
inter_heap = malloc((num_tunnels - 1) * sizeof(ScopeNamespace*));
|
|
if (!inter_heap)
|
|
exit(1);
|
|
inter = inter_heap;
|
|
}
|
|
|
|
// Find the root namespace and intermediate tunnels (AstGen.zig:8568-8582).
|
|
ScopeNamespace* root_ns = NULL;
|
|
{
|
|
uint32_t remaining = num_tunnels - 1;
|
|
Scope* s = gz->parent;
|
|
while (s != NULL && remaining > 0) {
|
|
if (s->tag == SCOPE_NAMESPACE) {
|
|
remaining--;
|
|
inter[remaining] = (ScopeNamespace*)s;
|
|
}
|
|
s = scopeParent(s);
|
|
}
|
|
// Find the root namespace.
|
|
while (s != NULL) {
|
|
if (s->tag == SCOPE_NAMESPACE) {
|
|
root_ns = (ScopeNamespace*)s;
|
|
break;
|
|
}
|
|
s = scopeParent(s);
|
|
}
|
|
}
|
|
|
|
// root_ns must have been found — we are tunneling through at least one
|
|
// namespace, so there must be a root namespace in the scope chain.
|
|
if (root_ns == NULL) {
|
|
free(inter_heap);
|
|
SET_ERROR(gz->astgen);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
// Add the root capture (AstGen.zig:8584-8594).
|
|
uint32_t root_capture = packCapture(capture_tag, capture_data);
|
|
uint32_t cur_capture_index
|
|
= scopeNamespaceAddCapture(root_ns, root_capture, name_str_index);
|
|
|
|
// Chain through intermediate namespaces (AstGen.zig:8596-8616).
|
|
for (uint32_t i = 0; i < num_tunnels - 1; i++) {
|
|
uint32_t nested_capture
|
|
= packCapture(CAPTURE_TAG_NESTED, cur_capture_index);
|
|
cur_capture_index = scopeNamespaceAddCapture(
|
|
inter[i], nested_capture, name_str_index);
|
|
}
|
|
|
|
free(inter_heap);
|
|
|
|
// Add closure_get instruction (AstGen.zig:8622-8623).
|
|
return addNodeExtendedSmall(gz, (uint16_t)ZIR_EXT_CLOSURE_GET,
|
|
inner_ref_node, (uint16_t)cur_capture_index);
|
|
}
|
|
|
|
// Mirrors localVarRef (AstGen.zig:8367-8521).
|
|
static uint32_t localVarRef(GenZir* gz, Scope* scope, ResultLoc rl,
|
|
uint32_t node, uint32_t ident_token, uint32_t name_str) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
uint32_t found_already = UINT32_MAX; // node index if found
|
|
bool found_needs_tunnel = false;
|
|
uint32_t found_namespaces_out = 0;
|
|
uint32_t num_namespaces_out = 0;
|
|
ScopeNamespace* capturing_namespace = NULL;
|
|
|
|
for (Scope* s = scope; s != NULL;) {
|
|
switch (s->tag) {
|
|
case SCOPE_LOCAL_VAL: {
|
|
ScopeLocalVal* lv = (ScopeLocalVal*)s;
|
|
if (lv->name == name_str) {
|
|
// Track usage for generic detection (AstGen.zig:8399).
|
|
if (lv->is_used_or_discarded != NULL)
|
|
*lv->is_used_or_discarded = true;
|
|
// Locals cannot shadow, no ambiguity check needed.
|
|
uint32_t value_inst;
|
|
if (num_namespaces_out != 0) {
|
|
value_inst = tunnelThroughClosure(gz, node,
|
|
num_namespaces_out, CAPTURE_TAG_INSTRUCTION,
|
|
lv->inst - ZIR_REF_START_INDEX, name_str);
|
|
} else {
|
|
value_inst = lv->inst;
|
|
}
|
|
return rvalueNoCoercePreRef(gz, rl, value_inst, node);
|
|
}
|
|
s = lv->parent;
|
|
continue;
|
|
}
|
|
case SCOPE_LOCAL_PTR: {
|
|
ScopeLocalPtr* lp = (ScopeLocalPtr*)s;
|
|
if (lp->name == name_str) {
|
|
// Can't close over a runtime variable
|
|
// (AstGen.zig:8424-8432).
|
|
if (num_namespaces_out != 0 && !lp->maybe_comptime
|
|
&& !gz->is_typeof) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
if (RL_IS_REF(rl)) {
|
|
uint32_t ptr_inst;
|
|
if (num_namespaces_out != 0) {
|
|
ptr_inst = tunnelThroughClosure(gz, node,
|
|
num_namespaces_out, CAPTURE_TAG_INSTRUCTION,
|
|
lp->ptr - ZIR_REF_START_INDEX, name_str);
|
|
} else {
|
|
ptr_inst = lp->ptr;
|
|
}
|
|
return ptr_inst;
|
|
} else {
|
|
uint32_t val_inst;
|
|
if (num_namespaces_out != 0) {
|
|
val_inst = tunnelThroughClosure(gz, node,
|
|
num_namespaces_out, CAPTURE_TAG_INSTRUCTION_LOAD,
|
|
lp->ptr - ZIR_REF_START_INDEX, name_str);
|
|
} else {
|
|
val_inst = addUnNode(gz, ZIR_INST_LOAD, lp->ptr, node);
|
|
}
|
|
return rvalueNoCoercePreRef(gz, rl, val_inst, node);
|
|
}
|
|
}
|
|
s = lp->parent;
|
|
continue;
|
|
}
|
|
case SCOPE_GEN_ZIR: {
|
|
s = ((GenZir*)s)->parent;
|
|
continue;
|
|
}
|
|
case SCOPE_DEFER_NORMAL:
|
|
case SCOPE_DEFER_ERROR: {
|
|
s = ((ScopeDefer*)s)->parent;
|
|
continue;
|
|
}
|
|
case SCOPE_LABEL: {
|
|
s = ((ScopeLabel*)s)->parent;
|
|
continue;
|
|
}
|
|
case SCOPE_NAMESPACE: {
|
|
// Check namespace decls, then continue walking
|
|
// (AstGen.zig:8455-8472).
|
|
ScopeNamespace* ns = (ScopeNamespace*)s;
|
|
uint32_t decl_node = scopeNamespaceFindDecl(ns, name_str);
|
|
if (decl_node != UINT32_MAX) {
|
|
if (found_already != UINT32_MAX) {
|
|
// Ambiguous reference (AstGen.zig:8458-8462).
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
found_already = decl_node;
|
|
found_needs_tunnel = ns->maybe_generic;
|
|
found_namespaces_out = num_namespaces_out;
|
|
}
|
|
num_namespaces_out++;
|
|
capturing_namespace = ns;
|
|
s = ns->parent;
|
|
continue;
|
|
}
|
|
case SCOPE_TOP:
|
|
goto done_walk;
|
|
}
|
|
}
|
|
done_walk:
|
|
(void)capturing_namespace;
|
|
|
|
if (found_already == UINT32_MAX) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
// Decl references (AstGen.zig:8484-8520).
|
|
if (found_namespaces_out > 0 && found_needs_tunnel) {
|
|
if (RL_IS_REF(rl)) {
|
|
return tunnelThroughClosure(gz, node, found_namespaces_out,
|
|
CAPTURE_TAG_DECL_REF, name_str, name_str);
|
|
} else {
|
|
uint32_t result
|
|
= tunnelThroughClosure(gz, node, found_namespaces_out,
|
|
CAPTURE_TAG_DECL_VAL, name_str, name_str);
|
|
return rvalueNoCoercePreRef(gz, rl, result, node);
|
|
}
|
|
}
|
|
|
|
// Simple decl reference (AstGen.zig:8512-8520).
|
|
ZirInstData data;
|
|
data.str_tok.start = name_str;
|
|
data.str_tok.src_tok = tokenIndexToRelative(gz, ident_token);
|
|
if (RL_IS_REF(rl)) {
|
|
return addInstruction(gz, ZIR_INST_DECL_REF, data);
|
|
} else {
|
|
uint32_t result = addInstruction(gz, ZIR_INST_DECL_VAL, data);
|
|
return rvalueNoCoercePreRef(gz, rl, result, node);
|
|
}
|
|
}
|
|
|
|
static uint32_t identifierExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
uint32_t ident_token = ag->tree->nodes.main_tokens[node];
|
|
|
|
// Check for primitive types FIRST (AstGen.zig:8298-8338).
|
|
uint32_t prim = tryResolvePrimitiveIdent(gz, node);
|
|
if (prim != ZIR_REF_NONE)
|
|
return rvalue(gz, rl, prim, node);
|
|
|
|
// Local variables and container-level declarations
|
|
// (AstGen.zig:8340-8365).
|
|
return localVarRef(
|
|
gz, scope, rl, node, ident_token, identAsString(ag, ident_token));
|
|
}
|
|
|
|
// --- fieldAccess (AstGen.zig:6154) ---
|
|
// Simplified: emits field_val instruction with Field payload.
|
|
|
|
static uint32_t fieldAccessExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
AstData nd = tree->nodes.datas[node];
|
|
|
|
// data.lhs = object node, data.rhs = field identifier token.
|
|
uint32_t object_node = nd.lhs;
|
|
uint32_t field_ident = nd.rhs;
|
|
|
|
// Get field name as string (AstGen.zig:6180).
|
|
uint32_t str_index = identAsString(ag, field_ident);
|
|
|
|
// Evaluate the LHS object expression (AstGen.zig:6181).
|
|
// For .ref rl, LHS is also evaluated with .ref (AstGen.zig:6161).
|
|
ResultLoc lhs_rl = (RL_IS_REF(rl)) ? RL_REF_VAL : RL_NONE_VAL;
|
|
uint32_t lhs = exprRl(gz, scope, lhs_rl, object_node);
|
|
|
|
// Emit dbg_stmt for the dot token (AstGen.zig:6183-6184).
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
{
|
|
uint32_t line = ag->source_line - gz->decl_line;
|
|
uint32_t column = ag->source_column;
|
|
emitDbgStmt(gz, line, column);
|
|
}
|
|
|
|
// Emit field_val instruction with Field payload (AstGen.zig:6186-6189).
|
|
ensureExtraCapacity(ag, 2);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = lhs; // Field.lhs
|
|
ag->extra[ag->extra_len++] = str_index; // Field.field_name_start
|
|
|
|
// .ref → field_ptr, else → field_val (AstGen.zig:6160-6164).
|
|
ZirInstTag ftag
|
|
= (RL_IS_REF(rl)) ? ZIR_INST_FIELD_PTR : ZIR_INST_FIELD_VAL;
|
|
ZirInstData data;
|
|
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
data.pl_node.payload_index = payload_index;
|
|
uint32_t access = addInstruction(gz, ftag, data);
|
|
// For ref, return directly; otherwise apply rvalue (AstGen.zig:6161-6164).
|
|
if (RL_IS_REF(rl))
|
|
return access;
|
|
return rvalue(gz, rl, access, node);
|
|
}
|
|
|
|
// --- ptrType (AstGen.zig:3833) ---
|
|
|
|
static uint32_t ptrTypeExpr(GenZir* gz, Scope* scope, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
AstNodeTag tag = tree->nodes.tags[node];
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t main_tok = tree->nodes.main_tokens[node];
|
|
|
|
// child_type is always in rhs for all ptr_type variants.
|
|
uint32_t child_type_node = nd.rhs;
|
|
|
|
// Determine size from main_token (Ast.zig:2122-2131).
|
|
// Pointer.Size: one=0, many=1, slice=2, c=3.
|
|
uint8_t size;
|
|
TokenizerTag main_tok_tag = tree->tokens.tags[main_tok];
|
|
if (main_tok_tag == TOKEN_ASTERISK
|
|
|| main_tok_tag == TOKEN_ASTERISK_ASTERISK) {
|
|
size = 0; // one
|
|
} else {
|
|
assert(main_tok_tag == TOKEN_L_BRACKET);
|
|
TokenizerTag next_tag = tree->tokens.tags[main_tok + 1];
|
|
if (next_tag == TOKEN_ASTERISK) {
|
|
// [*c]T vs [*]T: c-pointer if next-next is identifier.
|
|
if (tree->tokens.tags[main_tok + 2] == TOKEN_IDENTIFIER)
|
|
size = 3; // c
|
|
else
|
|
size = 1; // many
|
|
} else {
|
|
size = 2; // slice
|
|
}
|
|
}
|
|
|
|
// Determine sentinel, align, addrspace, bit_range nodes from AST variant
|
|
// (Ast.zig:1656-1696).
|
|
uint32_t sentinel_node = UINT32_MAX;
|
|
uint32_t align_node = UINT32_MAX;
|
|
uint32_t addrspace_node = UINT32_MAX;
|
|
uint32_t bit_range_start = UINT32_MAX;
|
|
uint32_t bit_range_end = UINT32_MAX;
|
|
|
|
if (tag == AST_NODE_PTR_TYPE_ALIGNED) {
|
|
// opt_node_and_node: lhs = optional align_node (0=none), rhs = child.
|
|
if (nd.lhs != 0)
|
|
align_node = nd.lhs;
|
|
} else if (tag == AST_NODE_PTR_TYPE_SENTINEL) {
|
|
// opt_node_and_node: lhs = optional sentinel (0=none), rhs = child.
|
|
if (nd.lhs != 0)
|
|
sentinel_node = nd.lhs;
|
|
} else if (tag == AST_NODE_PTR_TYPE) {
|
|
// extra_and_node: lhs = extra index to AstPtrType, rhs = child_type.
|
|
const AstPtrType* pt
|
|
= (const AstPtrType*)(tree->extra_data.arr + nd.lhs);
|
|
if (pt->sentinel != UINT32_MAX)
|
|
sentinel_node = pt->sentinel;
|
|
if (pt->align_node != UINT32_MAX)
|
|
align_node = pt->align_node;
|
|
if (pt->addrspace_node != UINT32_MAX)
|
|
addrspace_node = pt->addrspace_node;
|
|
} else if (tag == AST_NODE_PTR_TYPE_BIT_RANGE) {
|
|
// extra_and_node: lhs = extra index to AstPtrTypeBitRange.
|
|
const AstPtrTypeBitRange* pt
|
|
= (const AstPtrTypeBitRange*)(tree->extra_data.arr + nd.lhs);
|
|
if (pt->sentinel != UINT32_MAX)
|
|
sentinel_node = pt->sentinel;
|
|
align_node = pt->align_node;
|
|
if (pt->addrspace_node != UINT32_MAX)
|
|
addrspace_node = pt->addrspace_node;
|
|
bit_range_start = pt->bit_range_start;
|
|
bit_range_end = pt->bit_range_end;
|
|
}
|
|
|
|
// Scan tokens between main_token and child_type to find const/volatile/
|
|
// allowzero (Ast.zig:2139-2164).
|
|
bool has_const = false;
|
|
bool has_volatile = false;
|
|
bool has_allowzero = false;
|
|
{
|
|
uint32_t i;
|
|
if (sentinel_node != UINT32_MAX) {
|
|
i = lastToken(tree, sentinel_node) + 1;
|
|
} else if (size == 1 || size == 3) {
|
|
// many or c: start after main_token.
|
|
i = main_tok + 1;
|
|
} else {
|
|
i = main_tok;
|
|
}
|
|
uint32_t end = firstToken(tree, child_type_node);
|
|
while (i < end) {
|
|
TokenizerTag tt = tree->tokens.tags[i];
|
|
if (tt == TOKEN_KEYWORD_ALLOWZERO) {
|
|
has_allowzero = true;
|
|
} else if (tt == TOKEN_KEYWORD_CONST) {
|
|
has_const = true;
|
|
} else if (tt == TOKEN_KEYWORD_VOLATILE) {
|
|
has_volatile = true;
|
|
} else if (tt == TOKEN_KEYWORD_ALIGN) {
|
|
// Skip over align expression.
|
|
if (bit_range_end != UINT32_MAX)
|
|
i = lastToken(tree, bit_range_end) + 1;
|
|
else if (align_node != UINT32_MAX)
|
|
i = lastToken(tree, align_node) + 1;
|
|
}
|
|
i++;
|
|
}
|
|
}
|
|
|
|
// C pointers always allow address zero (AstGen.zig:3840-3842).
|
|
if (size == 3 && has_allowzero) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
// Save source cursor before typeExpr so we can restore it before each
|
|
// trailing expression (AstGen.zig:3844-3846).
|
|
uint32_t saved_source_offset = ag->source_offset;
|
|
uint32_t saved_source_line = ag->source_line;
|
|
uint32_t saved_source_column = ag->source_column;
|
|
|
|
// Evaluate element type (AstGen.zig:3847).
|
|
uint32_t elem_type = typeExpr(gz, scope, child_type_node);
|
|
|
|
// Evaluate trailing expressions (AstGen.zig:3856-3897).
|
|
uint32_t sentinel_ref = ZIR_REF_NONE;
|
|
uint32_t align_ref = ZIR_REF_NONE;
|
|
uint32_t addrspace_ref = ZIR_REF_NONE;
|
|
uint32_t bit_start_ref = ZIR_REF_NONE;
|
|
uint32_t bit_end_ref = ZIR_REF_NONE;
|
|
uint32_t trailing_count = 0;
|
|
|
|
if (sentinel_node != UINT32_MAX) {
|
|
// Restore source cursor (AstGen.zig:3859-3861).
|
|
ag->source_offset = saved_source_offset;
|
|
ag->source_line = saved_source_line;
|
|
ag->source_column = saved_source_column;
|
|
uint32_t reason = (size == 2) ? COMPTIME_REASON_SLICE_SENTINEL
|
|
: COMPTIME_REASON_POINTER_SENTINEL;
|
|
ResultLoc srl = {
|
|
.tag = RL_TY, .data = elem_type, .src_node = 0, .ctx = RI_CTX_NONE
|
|
};
|
|
sentinel_ref = comptimeExpr(gz, scope, srl, sentinel_node, reason);
|
|
trailing_count++;
|
|
}
|
|
if (addrspace_node != UINT32_MAX) {
|
|
// Restore source cursor (AstGen.zig:3876-3878).
|
|
ag->source_offset = saved_source_offset;
|
|
ag->source_line = saved_source_line;
|
|
ag->source_column = saved_source_column;
|
|
// Upstream creates addrspace_ty via addBuiltinValue, we don't have
|
|
// that yet, so pass RL_NONE (matching previous behavior).
|
|
addrspace_ref = comptimeExpr(
|
|
gz, scope, RL_NONE_VAL, addrspace_node, COMPTIME_REASON_ADDRSPACE);
|
|
trailing_count++;
|
|
}
|
|
if (align_node != UINT32_MAX) {
|
|
// Restore source cursor (AstGen.zig:3885-3887).
|
|
ag->source_offset = saved_source_offset;
|
|
ag->source_line = saved_source_line;
|
|
ag->source_column = saved_source_column;
|
|
ResultLoc arl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_U29_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
align_ref
|
|
= comptimeExpr(gz, scope, arl, align_node, COMPTIME_REASON_ALIGN);
|
|
trailing_count++;
|
|
}
|
|
if (bit_range_start != UINT32_MAX) {
|
|
ResultLoc brl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_U16_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
bit_start_ref = comptimeExpr(
|
|
gz, scope, brl, bit_range_start, COMPTIME_REASON_TYPE);
|
|
bit_end_ref = comptimeExpr(
|
|
gz, scope, brl, bit_range_end, COMPTIME_REASON_TYPE);
|
|
trailing_count += 2;
|
|
}
|
|
|
|
// Build PtrType payload: { elem_type, src_node } + trailing
|
|
// (AstGen.zig:3905-3921).
|
|
ensureExtraCapacity(ag, 2 + trailing_count);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = elem_type;
|
|
ag->extra[ag->extra_len++]
|
|
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
|
|
if (sentinel_ref != ZIR_REF_NONE)
|
|
ag->extra[ag->extra_len++] = sentinel_ref;
|
|
if (align_ref != ZIR_REF_NONE)
|
|
ag->extra[ag->extra_len++] = align_ref;
|
|
if (addrspace_ref != ZIR_REF_NONE)
|
|
ag->extra[ag->extra_len++] = addrspace_ref;
|
|
if (bit_start_ref != ZIR_REF_NONE) {
|
|
ag->extra[ag->extra_len++] = bit_start_ref;
|
|
ag->extra[ag->extra_len++] = bit_end_ref;
|
|
}
|
|
|
|
// Build flags packed byte (AstGen.zig:3927-3934).
|
|
uint8_t flags = 0;
|
|
if (has_allowzero)
|
|
flags |= (1 << 0); // is_allowzero
|
|
if (!has_const)
|
|
flags |= (1 << 1); // is_mutable
|
|
if (has_volatile)
|
|
flags |= (1 << 2); // is_volatile
|
|
if (sentinel_ref != ZIR_REF_NONE)
|
|
flags |= (1 << 3); // has_sentinel
|
|
if (align_ref != ZIR_REF_NONE)
|
|
flags |= (1 << 4); // has_align
|
|
if (addrspace_ref != ZIR_REF_NONE)
|
|
flags |= (1 << 5); // has_addrspace
|
|
if (bit_start_ref != ZIR_REF_NONE)
|
|
flags |= (1 << 6); // has_bit_range
|
|
|
|
ZirInstData data;
|
|
data.ptr_type.flags = flags;
|
|
data.ptr_type.size = size;
|
|
data.ptr_type._pad = 0;
|
|
data.ptr_type.payload_index = payload_index;
|
|
return addInstruction(gz, ZIR_INST_PTR_TYPE, data);
|
|
}
|
|
|
|
// --- arrayType (AstGen.zig:940) ---
|
|
|
|
static uint32_t arrayTypeExpr(GenZir* gz, Scope* scope, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
AstData nd = tree->nodes.datas[node];
|
|
|
|
// data.lhs = length expr node, data.rhs = element type node.
|
|
// Check for `_` identifier → compile error (AstGen.zig:3950-3953).
|
|
if (tree->nodes.tags[nd.lhs] == AST_NODE_IDENTIFIER
|
|
&& isUnderscoreIdent(tree, nd.lhs)) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
ResultLoc len_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_USIZE_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t len
|
|
= comptimeExpr(gz, scope, len_rl, nd.lhs, COMPTIME_REASON_TYPE);
|
|
uint32_t elem_type = typeExpr(gz, scope, nd.rhs);
|
|
return addPlNodeBin(gz, ZIR_INST_ARRAY_TYPE, node, len, elem_type);
|
|
}
|
|
|
|
// --- arrayInitExpr (AstGen.zig:1431) ---
|
|
// Handles typed array init: [_]T{...}, [_:s]T{...}, and [N]T{...}.
|
|
|
|
static uint32_t arrayInitExprTyped(GenZir* gz, Scope* scope, uint32_t node,
|
|
const uint32_t* elements, uint32_t elem_count, uint32_t ty_inst,
|
|
uint32_t elem_ty, bool is_ref);
|
|
|
|
static uint32_t arrayInitExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
AstNodeTag tag = tree->nodes.tags[node];
|
|
AstData nd = tree->nodes.datas[node];
|
|
|
|
// Get elements and type expression based on the variant.
|
|
uint32_t type_expr_node = 0;
|
|
uint32_t elem_buf[2];
|
|
const uint32_t* elements = NULL;
|
|
uint32_t elem_count = 0;
|
|
|
|
switch (tag) {
|
|
case AST_NODE_ARRAY_INIT_ONE:
|
|
case AST_NODE_ARRAY_INIT_ONE_COMMA: {
|
|
type_expr_node = nd.lhs;
|
|
if (nd.rhs != 0) {
|
|
elem_buf[0] = nd.rhs;
|
|
elements = elem_buf;
|
|
elem_count = 1;
|
|
}
|
|
break;
|
|
}
|
|
case AST_NODE_ARRAY_INIT:
|
|
case AST_NODE_ARRAY_INIT_COMMA: {
|
|
// data = node_and_extra: lhs = type_expr, rhs = extra_index.
|
|
// extra[rhs] = SubRange.start, extra[rhs+1] = SubRange.end.
|
|
// Elements are extra_data[start..end].
|
|
type_expr_node = nd.lhs;
|
|
uint32_t extra_idx = nd.rhs;
|
|
uint32_t range_start = tree->extra_data.arr[extra_idx];
|
|
uint32_t range_end = tree->extra_data.arr[extra_idx + 1];
|
|
elements = tree->extra_data.arr + range_start;
|
|
elem_count = range_end - range_start;
|
|
break;
|
|
}
|
|
default:
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
if (type_expr_node == 0 || elem_count == 0) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
// Determine array_ty and elem_ty (AstGen.zig:1443-1482).
|
|
uint32_t array_ty = ZIR_REF_NONE;
|
|
uint32_t elem_ty = ZIR_REF_NONE;
|
|
|
|
// Check if the type is [_]T or [_:s]T (inferred length)
|
|
// (AstGen.zig:1446-1474, fullArrayType handles both array_type and
|
|
// array_type_sentinel).
|
|
AstNodeTag type_tag = tree->nodes.tags[type_expr_node];
|
|
if (type_tag == AST_NODE_ARRAY_TYPE
|
|
|| type_tag == AST_NODE_ARRAY_TYPE_SENTINEL) {
|
|
AstData type_nd = tree->nodes.datas[type_expr_node];
|
|
uint32_t elem_count_node = type_nd.lhs;
|
|
|
|
// This intentionally does not support `@"_"` syntax.
|
|
if (tree->nodes.tags[elem_count_node] == AST_NODE_IDENTIFIER
|
|
&& isUnderscoreIdent(tree, elem_count_node)) {
|
|
// Inferred length: addInt(elem_count) (AstGen.zig:1452).
|
|
uint32_t len_inst = addInt(gz, elem_count);
|
|
|
|
if (type_tag == AST_NODE_ARRAY_TYPE) {
|
|
// [_]T: elem_type_node is rhs (AstGen.zig:1454-1459).
|
|
uint32_t elem_type_node = type_nd.rhs;
|
|
elem_ty = typeExpr(gz, scope, elem_type_node);
|
|
array_ty = addPlNodeBin(gz, ZIR_INST_ARRAY_TYPE,
|
|
type_expr_node, len_inst, elem_ty);
|
|
} else {
|
|
// [_:s]T: sentinel and elem_type from extra data
|
|
// (AstGen.zig:1460-1473).
|
|
uint32_t sentinel_node = tree->extra_data.arr[type_nd.rhs];
|
|
uint32_t elem_type_node
|
|
= tree->extra_data.arr[type_nd.rhs + 1];
|
|
elem_ty = typeExpr(gz, scope, elem_type_node);
|
|
ResultLoc sent_rl = { .tag = RL_TY,
|
|
.data = elem_ty,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t sentinel = comptimeExpr(gz, scope, sent_rl,
|
|
sentinel_node, COMPTIME_REASON_ARRAY_SENTINEL);
|
|
array_ty = addPlNodeTriple(gz, ZIR_INST_ARRAY_TYPE_SENTINEL,
|
|
type_expr_node, len_inst, elem_ty, sentinel);
|
|
}
|
|
goto typed_init;
|
|
}
|
|
}
|
|
|
|
// Non-inferred length: evaluate type normally (AstGen.zig:1476-1481).
|
|
array_ty = typeExpr(gz, scope, type_expr_node);
|
|
// validate_array_init_ty: ArrayInit{ty, init_count}
|
|
addPlNodeBin(
|
|
gz, ZIR_INST_VALIDATE_ARRAY_INIT_TY, node, array_ty, elem_count);
|
|
elem_ty = ZIR_REF_NONE;
|
|
|
|
typed_init:
|
|
// Typed inits do not use RLS for language simplicity
|
|
// (AstGen.zig:1484-1513).
|
|
if (rl.tag == RL_DISCARD) {
|
|
// discard RL: evaluate elements but don't emit array_init
|
|
// (AstGen.zig:1487-1506).
|
|
if (elem_ty != ZIR_REF_NONE) {
|
|
ResultLoc elem_rl
|
|
= { .tag = RL_TY, .data = elem_ty, .src_node = 0 };
|
|
for (uint32_t i = 0; i < elem_count; i++) {
|
|
exprRl(gz, scope, elem_rl, elements[i]);
|
|
}
|
|
} else {
|
|
for (uint32_t i = 0; i < elem_count; i++) {
|
|
uint32_t this_elem_ty
|
|
= addBin(gz, ZIR_INST_ARRAY_INIT_ELEM_TYPE, array_ty, i);
|
|
ResultLoc elem_rl
|
|
= { .tag = RL_TY, .data = this_elem_ty, .src_node = 0 };
|
|
exprRl(gz, scope, elem_rl, elements[i]);
|
|
}
|
|
}
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
if (rl.tag == RL_REF) {
|
|
// ref RL: arrayInitExprTyped with is_ref=true
|
|
// (AstGen.zig:1507).
|
|
return arrayInitExprTyped(
|
|
gz, scope, node, elements, elem_count, array_ty, elem_ty, true);
|
|
}
|
|
|
|
// All other RLs: arrayInitExprTyped + rvalue (AstGen.zig:1508-1511).
|
|
uint32_t array_inst = arrayInitExprTyped(
|
|
gz, scope, node, elements, elem_count, array_ty, elem_ty, false);
|
|
return rvalue(gz, rl, array_inst, node);
|
|
}
|
|
|
|
// arrayInitExprTyped (AstGen.zig:1598-1642).
|
|
// Emits array_init or array_init_ref instruction.
|
|
static uint32_t arrayInitExprTyped(GenZir* gz, Scope* scope, uint32_t node,
|
|
const uint32_t* elements, uint32_t elem_count, uint32_t ty_inst,
|
|
uint32_t elem_ty, bool is_ref) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
uint32_t operands_len = elem_count + 1; // +1 for type
|
|
ensureExtraCapacity(ag, 1 + operands_len);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = operands_len;
|
|
ag->extra[ag->extra_len++] = ty_inst;
|
|
uint32_t extra_start = ag->extra_len;
|
|
ag->extra_len += elem_count;
|
|
|
|
if (elem_ty != ZIR_REF_NONE) {
|
|
// Known elem type: use coerced_ty RL (AstGen.zig:1617-1623).
|
|
ResultLoc elem_rl
|
|
= { .tag = RL_COERCED_TY, .data = elem_ty, .src_node = 0 };
|
|
for (uint32_t i = 0; i < elem_count; i++) {
|
|
uint32_t elem_ref = exprRl(gz, scope, elem_rl, elements[i]);
|
|
ag->extra[extra_start + i] = elem_ref;
|
|
}
|
|
} else {
|
|
// Unknown elem type: use array_init_elem_type per element
|
|
// (AstGen.zig:1625-1637).
|
|
for (uint32_t i = 0; i < elem_count; i++) {
|
|
uint32_t this_elem_ty
|
|
= addBin(gz, ZIR_INST_ARRAY_INIT_ELEM_TYPE, ty_inst, i);
|
|
ResultLoc elem_rl = {
|
|
.tag = RL_COERCED_TY, .data = this_elem_ty, .src_node = 0
|
|
};
|
|
uint32_t elem_ref = exprRl(gz, scope, elem_rl, elements[i]);
|
|
ag->extra[extra_start + i] = elem_ref;
|
|
}
|
|
}
|
|
|
|
ZirInstTag init_tag
|
|
= is_ref ? ZIR_INST_ARRAY_INIT_REF : ZIR_INST_ARRAY_INIT;
|
|
return addPlNodePayloadIndex(gz, init_tag, node, payload_index);
|
|
}
|
|
|
|
// --- simpleBinOp (AstGen.zig:2204) ---
|
|
|
|
static uint32_t simpleBinOp(
|
|
GenZir* gz, Scope* scope, uint32_t node, ZirInstTag op_tag) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
AstData nd = ag->tree->nodes.datas[node];
|
|
uint32_t lhs = exprRl(gz, scope, RL_NONE_VAL, nd.lhs);
|
|
// For arithmetic ops, advance cursor before RHS (AstGen.zig:6245-6256).
|
|
uint32_t saved_line = 0, saved_col = 0;
|
|
bool need_dbg = false;
|
|
if (op_tag == ZIR_INST_ADD || op_tag == ZIR_INST_SUB
|
|
|| op_tag == ZIR_INST_MUL || op_tag == ZIR_INST_DIV
|
|
|| op_tag == ZIR_INST_MOD_REM) {
|
|
if (!gz->is_comptime) {
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
}
|
|
saved_line = ag->source_line - gz->decl_line;
|
|
saved_col = ag->source_column;
|
|
need_dbg = true;
|
|
}
|
|
uint32_t rhs = exprRl(gz, scope, RL_NONE_VAL, nd.rhs);
|
|
if (need_dbg) {
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
}
|
|
return addPlNodeBin(gz, op_tag, node, lhs, rhs);
|
|
}
|
|
|
|
// --- shiftOp (AstGen.zig:9978) ---
|
|
|
|
static uint32_t shiftOp(
|
|
GenZir* gz, Scope* scope, uint32_t node, ZirInstTag tag) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
AstData nd = ag->tree->nodes.datas[node];
|
|
uint32_t lhs = exprRl(gz, scope, RL_NONE_VAL, nd.lhs);
|
|
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
|
|
uint32_t log2_int_type
|
|
= addUnNode(gz, ZIR_INST_TYPEOF_LOG2_INT_TYPE, lhs, nd.lhs);
|
|
ResultLoc rhs_rl = { .tag = RL_TY,
|
|
.data = log2_int_type,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_SHIFT_OP };
|
|
uint32_t rhs = exprRl(gz, scope, rhs_rl, nd.rhs);
|
|
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
|
|
return addPlNodeBin(gz, tag, node, lhs, rhs);
|
|
}
|
|
|
|
// --- multilineStringLiteral (AstGen.zig:8645) ---
|
|
// Port of strLitNodeAsString for multiline strings.
|
|
static uint32_t multilineStringLiteral(
|
|
GenZir* gz, Scope* scope, uint32_t node) {
|
|
(void)scope;
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t start_tok = nd.lhs;
|
|
uint32_t end_tok = nd.rhs;
|
|
|
|
uint32_t str_index = ag->string_bytes_len;
|
|
|
|
// First line: no preceding newline.
|
|
for (uint32_t tok_i = start_tok; tok_i <= end_tok; tok_i++) {
|
|
uint32_t tok_start = tree->tokens.starts[tok_i];
|
|
const char* source = tree->source;
|
|
// Skip leading `\\` (2 chars).
|
|
uint32_t content_start = tok_start + 2;
|
|
// Find end of line.
|
|
uint32_t content_end = content_start;
|
|
while (content_end < tree->source_len && source[content_end] != '\n')
|
|
content_end++;
|
|
uint32_t line_len = content_end - content_start;
|
|
|
|
if (tok_i > start_tok) {
|
|
// Prepend newline for lines after the first.
|
|
ensureStringBytesCapacity(ag, line_len + 1);
|
|
ag->string_bytes[ag->string_bytes_len++] = '\n';
|
|
} else {
|
|
ensureStringBytesCapacity(ag, line_len);
|
|
}
|
|
memcpy(ag->string_bytes + ag->string_bytes_len, source + content_start,
|
|
line_len);
|
|
ag->string_bytes_len += line_len;
|
|
}
|
|
|
|
uint32_t len = ag->string_bytes_len - str_index;
|
|
ensureStringBytesCapacity(ag, 1);
|
|
ag->string_bytes[ag->string_bytes_len++] = 0; // null terminator
|
|
|
|
ZirInstData data;
|
|
data.str.start = str_index;
|
|
data.str.len = len;
|
|
return addInstruction(gz, ZIR_INST_STR, data);
|
|
}
|
|
|
|
// --- ret (AstGen.zig:8119) ---
|
|
static uint32_t retExpr(GenZir* gz, Scope* scope, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
|
|
// AstGen.zig:8123: return outside function is an error.
|
|
if (ag->fn_block == NULL) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_UNREACHABLE_VALUE;
|
|
}
|
|
|
|
// AstGen.zig:8127-8135: cannot return from defer expression.
|
|
if (gz->any_defer_node != UINT32_MAX) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_UNREACHABLE_VALUE;
|
|
}
|
|
|
|
// Ensure debug line/column information is emitted for this return
|
|
// expression (AstGen.zig:8141-8144).
|
|
if (!gz->is_comptime) {
|
|
emitDbgNode(gz, node);
|
|
}
|
|
uint32_t ret_lc_line = ag->source_line - gz->decl_line;
|
|
uint32_t ret_lc_column = ag->source_column;
|
|
|
|
const Scope* defer_outer = &((GenZir*)ag->fn_block)->base;
|
|
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t operand_node = nd.lhs; // optional
|
|
|
|
if (operand_node == 0) {
|
|
// Void return (AstGen.zig:8148-8156).
|
|
genDefers(gz, defer_outer, scope, DEFER_NORMAL_ONLY);
|
|
// Restore error trace unconditionally (AstGen.zig:8153).
|
|
ZirInstData rdata;
|
|
rdata.un_node.operand = ZIR_REF_NONE;
|
|
rdata.un_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
addInstruction(
|
|
gz, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata);
|
|
addUnNode(gz, ZIR_INST_RET_NODE, ZIR_REF_VOID_VALUE, node);
|
|
return ZIR_REF_UNREACHABLE_VALUE;
|
|
}
|
|
|
|
// Fast path: return error.Foo (AstGen.zig:8159-8175).
|
|
if (tree->nodes.tags[operand_node] == AST_NODE_ERROR_VALUE) {
|
|
uint32_t error_token = tree->nodes.main_tokens[operand_node] + 2;
|
|
uint32_t err_name_str = identAsString(ag, error_token);
|
|
DeferCounts dc = countDefers(defer_outer, scope);
|
|
if (!dc.need_err_code) {
|
|
genDefers(gz, defer_outer, scope, DEFER_BOTH_SANS_ERR);
|
|
emitDbgStmt(gz, ret_lc_line, ret_lc_column);
|
|
addStrTok(gz, ZIR_INST_RET_ERR_VALUE, err_name_str, error_token);
|
|
return ZIR_REF_UNREACHABLE_VALUE;
|
|
}
|
|
// need_err_code path: not implemented yet, fall through to general.
|
|
}
|
|
|
|
// Evaluate operand with result location (AstGen.zig:8178-8186).
|
|
// If nodes_need_rl contains this return node, use ptr-based RL;
|
|
// otherwise use coerced_ty.
|
|
ResultLoc ret_rl = RL_NONE_VAL;
|
|
bool use_ptr = nodesNeedRlContains(ag, node);
|
|
uint32_t ret_ptr_inst = 0;
|
|
if (use_ptr) {
|
|
// Create ret_ptr instruction (AstGen.zig:8179).
|
|
ZirInstData rpdata;
|
|
rpdata.node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
ret_ptr_inst = addInstruction(gz, ZIR_INST_RET_PTR, rpdata);
|
|
ret_rl.tag = RL_PTR;
|
|
ret_rl.data = ret_ptr_inst;
|
|
} else if (ag->fn_ret_ty != 0) {
|
|
ret_rl.tag = RL_COERCED_TY;
|
|
ret_rl.data = ag->fn_ret_ty;
|
|
}
|
|
ret_rl.ctx = RI_CTX_RETURN;
|
|
// nameStratExpr with .func name strategy (AstGen.zig:8185).
|
|
uint32_t operand;
|
|
if (!nameStratExpr(
|
|
gz, scope, ret_rl, operand_node, 1 /* func */, &operand)) {
|
|
operand = reachableExpr(gz, scope, ret_rl, operand_node, node);
|
|
}
|
|
|
|
// Emit RESTORE_ERR_RET_INDEX based on nodeMayEvalToError
|
|
// (AstGen.zig:8188-8253).
|
|
int eval_to_err = nodeMayEvalToError(tree, operand_node);
|
|
if (eval_to_err == EVAL_TO_ERROR_NEVER) {
|
|
// Returning non-error: pop error trace unconditionally
|
|
// (AstGen.zig:8190-8198).
|
|
genDefers(gz, defer_outer, scope, DEFER_NORMAL_ONLY);
|
|
ZirInstData rdata;
|
|
rdata.un_node.operand = ZIR_REF_NONE;
|
|
rdata.un_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
addInstruction(
|
|
gz, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata);
|
|
emitDbgStmt(gz, ret_lc_line, ret_lc_column);
|
|
// addRet (AstGen.zig:13188-13194).
|
|
if (use_ptr) {
|
|
addUnNode(gz, ZIR_INST_RET_LOAD, ret_ptr_inst, node);
|
|
} else {
|
|
addUnNode(gz, ZIR_INST_RET_NODE, operand, node);
|
|
}
|
|
return ZIR_REF_UNREACHABLE_VALUE;
|
|
} else if (eval_to_err == EVAL_TO_ERROR_ALWAYS) {
|
|
// .always: emit both error defers and regular defers
|
|
// (AstGen.zig:8200-8206).
|
|
uint32_t err_code = use_ptr
|
|
? addUnNode(gz, ZIR_INST_LOAD, ret_ptr_inst, node)
|
|
: operand;
|
|
(void)err_code;
|
|
// TODO: genDefers with .both = err_code when errdefer is implemented.
|
|
genDefers(gz, defer_outer, scope, DEFER_NORMAL_ONLY);
|
|
emitDbgStmt(gz, ret_lc_line, ret_lc_column);
|
|
if (use_ptr) {
|
|
addUnNode(gz, ZIR_INST_RET_LOAD, ret_ptr_inst, node);
|
|
} else {
|
|
addUnNode(gz, ZIR_INST_RET_NODE, operand, node);
|
|
}
|
|
return ZIR_REF_UNREACHABLE_VALUE;
|
|
} else {
|
|
// .maybe (AstGen.zig:8208-8252).
|
|
DeferCounts dc = countDefers(defer_outer, scope);
|
|
if (!dc.have_err) {
|
|
// Only regular defers; no branch needed (AstGen.zig:8210-8220).
|
|
genDefers(gz, defer_outer, scope, DEFER_NORMAL_ONLY);
|
|
emitDbgStmt(gz, ret_lc_line, ret_lc_column);
|
|
uint32_t result = use_ptr
|
|
? addUnNode(gz, ZIR_INST_LOAD, ret_ptr_inst, node)
|
|
: operand;
|
|
ZirInstData rdata;
|
|
rdata.un_node.operand = result;
|
|
rdata.un_node.src_node
|
|
= (int32_t)node - (int32_t)gz->decl_node_index;
|
|
addInstruction(gz, ZIR_INST_RESTORE_ERR_RET_INDEX_FN_ENTRY, rdata);
|
|
if (use_ptr) {
|
|
addUnNode(gz, ZIR_INST_RET_LOAD, ret_ptr_inst, node);
|
|
} else {
|
|
addUnNode(gz, ZIR_INST_RET_NODE, operand, node);
|
|
}
|
|
return ZIR_REF_UNREACHABLE_VALUE;
|
|
}
|
|
// have_err path: emit conditional branch (not yet implemented).
|
|
// Fall through to simplified path.
|
|
genDefers(gz, defer_outer, scope, DEFER_NORMAL_ONLY);
|
|
emitDbgStmt(gz, ret_lc_line, ret_lc_column);
|
|
if (use_ptr) {
|
|
addUnNode(gz, ZIR_INST_RET_LOAD, ret_ptr_inst, node);
|
|
} else {
|
|
addUnNode(gz, ZIR_INST_RET_NODE, operand, node);
|
|
}
|
|
return ZIR_REF_UNREACHABLE_VALUE;
|
|
}
|
|
}
|
|
|
|
// --- calleeExpr (AstGen.zig:10183) ---
|
|
// Returns: 0 = direct call, 1 = field call.
|
|
|
|
typedef struct {
|
|
bool is_field;
|
|
uint32_t obj_ptr; // for field calls: ref to object
|
|
uint32_t field_name_start; // for field calls: string index
|
|
uint32_t direct; // for direct calls: ref to callee
|
|
} Callee;
|
|
|
|
static Callee calleeExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t fn_expr_node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
AstNodeTag tag = tree->nodes.tags[fn_expr_node];
|
|
|
|
if (tag == AST_NODE_FIELD_ACCESS) {
|
|
AstData nd = tree->nodes.datas[fn_expr_node];
|
|
uint32_t object_node = nd.lhs;
|
|
uint32_t field_ident = nd.rhs;
|
|
uint32_t str_index = identAsString(ag, field_ident);
|
|
// Evaluate object with .ref rl (AstGen.zig:10207).
|
|
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, object_node);
|
|
|
|
// Advance to main token (the `.` dot) — not first token
|
|
// (AstGen.zig:10209).
|
|
advanceSourceCursorToMainToken(ag, gz, fn_expr_node);
|
|
{
|
|
uint32_t line = ag->source_line - gz->decl_line;
|
|
uint32_t column = ag->source_column;
|
|
emitDbgStmt(gz, line, column);
|
|
}
|
|
|
|
Callee c;
|
|
c.is_field = true;
|
|
c.obj_ptr = lhs;
|
|
c.field_name_start = str_index;
|
|
c.direct = 0;
|
|
return c;
|
|
}
|
|
|
|
// enum_literal callee: decl literal call syntax (AstGen.zig:10217-10233).
|
|
if (tag == AST_NODE_ENUM_LITERAL) {
|
|
uint32_t res_ty = rlResultType(gz, rl, fn_expr_node);
|
|
if (res_ty != 0) {
|
|
uint32_t str_index
|
|
= identAsString(ag, tree->nodes.main_tokens[fn_expr_node]);
|
|
uint32_t callee = addPlNodeBin(gz, ZIR_INST_DECL_LITERAL_NO_COERCE,
|
|
fn_expr_node, res_ty, str_index);
|
|
Callee c;
|
|
c.is_field = false;
|
|
c.direct = callee;
|
|
c.obj_ptr = 0;
|
|
c.field_name_start = 0;
|
|
return c;
|
|
}
|
|
// No result type: fall through to expr with rl=none.
|
|
}
|
|
|
|
// Default: direct call (AstGen.zig:10235).
|
|
Callee c;
|
|
c.is_field = false;
|
|
c.direct = expr(gz, scope, fn_expr_node);
|
|
c.obj_ptr = 0;
|
|
c.field_name_start = 0;
|
|
return c;
|
|
}
|
|
|
|
// --- callExpr (AstGen.zig:10058) ---
|
|
static uint32_t callExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
AstNodeTag tag = tree->nodes.tags[node];
|
|
AstData nd = tree->nodes.datas[node];
|
|
|
|
// Extract callee and args from AST.
|
|
uint32_t fn_expr_node;
|
|
uint32_t arg_buf[2];
|
|
const uint32_t* args = NULL;
|
|
uint32_t args_len = 0;
|
|
uint32_t lparen_tok;
|
|
|
|
switch (tag) {
|
|
case AST_NODE_CALL_ONE:
|
|
case AST_NODE_CALL_ONE_COMMA: {
|
|
fn_expr_node = nd.lhs;
|
|
lparen_tok = tree->nodes.main_tokens[node];
|
|
if (nd.rhs != 0) {
|
|
arg_buf[0] = nd.rhs;
|
|
args = arg_buf;
|
|
args_len = 1;
|
|
}
|
|
break;
|
|
}
|
|
case AST_NODE_CALL:
|
|
case AST_NODE_CALL_COMMA: {
|
|
fn_expr_node = nd.lhs;
|
|
lparen_tok = tree->nodes.main_tokens[node];
|
|
uint32_t extra_idx = nd.rhs;
|
|
uint32_t range_start = tree->extra_data.arr[extra_idx];
|
|
uint32_t range_end = tree->extra_data.arr[extra_idx + 1];
|
|
args = tree->extra_data.arr + range_start;
|
|
args_len = range_end - range_start;
|
|
break;
|
|
}
|
|
default:
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
Callee callee = calleeExpr(gz, scope, rl, fn_expr_node);
|
|
|
|
// dbg_stmt before call (AstGen.zig:10078-10083).
|
|
{
|
|
advanceSourceCursor(ag, tree->tokens.starts[lparen_tok]);
|
|
uint32_t line = ag->source_line - gz->decl_line;
|
|
uint32_t column = ag->source_column;
|
|
emitDbgStmtForceCurrentIndex(gz, line, column);
|
|
}
|
|
|
|
// Reserve instruction slot for call (AstGen.zig:10093).
|
|
uint32_t call_index = ag->inst_len;
|
|
ensureInstCapacity(ag, 1);
|
|
memset(&ag->inst_datas[call_index], 0, sizeof(ZirInstData));
|
|
ag->inst_tags[call_index] = (ZirInstTag)0;
|
|
ag->inst_len++;
|
|
gzAppendInstruction(gz, call_index);
|
|
|
|
// Process arguments in sub-blocks (AstGen.zig:10096-10116).
|
|
// Upstream uses a separate scratch array; we use a local buffer for body
|
|
// lengths and append body instructions to scratch_extra, then copy all
|
|
// to extra after the call payload.
|
|
uint32_t call_inst = call_index + ZIR_REF_START_INDEX;
|
|
ResultLoc arg_rl = { .tag = RL_COERCED_TY,
|
|
.data = call_inst,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_FN_ARG };
|
|
|
|
// Use scratch_extra to collect body lengths + body instructions,
|
|
// mirroring upstream's scratch array (AstGen.zig:10096-10116).
|
|
uint32_t scratch_top = ag->scratch_extra_len;
|
|
// Reserve space for cumulative body lengths (one per arg).
|
|
ensureScratchExtraCapacity(ag, args_len);
|
|
ag->scratch_extra_len += args_len;
|
|
|
|
for (uint32_t i = 0; i < args_len; i++) {
|
|
GenZir arg_block = makeSubBlock(gz, scope);
|
|
uint32_t arg_ref
|
|
= exprRl(&arg_block, &arg_block.base, arg_rl, args[i]);
|
|
|
|
// break_inline with param_node src (AstGen.zig:10108).
|
|
int32_t param_src
|
|
= (int32_t)args[i] - (int32_t)arg_block.decl_node_index;
|
|
makeBreakInline(&arg_block, call_index, arg_ref, param_src);
|
|
|
|
// Append arg_block body to scratch_extra (with ref_table fixups).
|
|
uint32_t raw_body_len = gzInstructionsLen(&arg_block);
|
|
const uint32_t* body = gzInstructionsSlice(&arg_block);
|
|
uint32_t fixup_len = countBodyLenAfterFixups(ag, body, raw_body_len);
|
|
ensureScratchExtraCapacity(ag, fixup_len);
|
|
for (uint32_t j = 0; j < raw_body_len; j++) {
|
|
appendPossiblyRefdBodyInstScratch(ag, body[j]);
|
|
}
|
|
// Record cumulative body length (AstGen.zig:10114).
|
|
ag->scratch_extra[scratch_top + i]
|
|
= ag->scratch_extra_len - scratch_top;
|
|
gzUnstack(&arg_block);
|
|
}
|
|
|
|
// Build call payload (AstGen.zig:10118-10168).
|
|
// Upstream layout: [flags, callee/obj_ptr, field_name_start], then
|
|
// body_lengths + body_instructions from scratch.
|
|
// Flags layout (packed): modifier:u3, ensure_result_used:bool,
|
|
// pop_error_return_trace:bool, args_len:u27.
|
|
// pop_error_return_trace = !propagate_error_trace
|
|
// (AstGen.zig:10121-10124).
|
|
bool propagate_error_trace
|
|
= (rl.ctx == RI_CTX_ERROR_HANDLING_EXPR || rl.ctx == RI_CTX_RETURN
|
|
|| rl.ctx == RI_CTX_FN_ARG || rl.ctx == RI_CTX_CONST_INIT);
|
|
uint32_t flags = (propagate_error_trace ? 0u : (1u << 4))
|
|
| ((args_len & 0x7FFFFFFu) << 5); // args_len
|
|
|
|
if (callee.is_field) {
|
|
// FieldCall: {flags, obj_ptr, field_name_start} (AstGen.zig:10148).
|
|
ensureExtraCapacity(ag, 3 + (ag->scratch_extra_len - scratch_top));
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = flags;
|
|
ag->extra[ag->extra_len++] = callee.obj_ptr;
|
|
ag->extra[ag->extra_len++] = callee.field_name_start;
|
|
// Append scratch data (body lengths + body instructions).
|
|
if (args_len != 0) {
|
|
memcpy(ag->extra + ag->extra_len, ag->scratch_extra + scratch_top,
|
|
(ag->scratch_extra_len - scratch_top) * sizeof(uint32_t));
|
|
ag->extra_len += ag->scratch_extra_len - scratch_top;
|
|
}
|
|
ag->inst_tags[call_index] = ZIR_INST_FIELD_CALL;
|
|
ag->inst_datas[call_index].pl_node.src_node
|
|
= (int32_t)node - (int32_t)gz->decl_node_index;
|
|
ag->inst_datas[call_index].pl_node.payload_index = payload_index;
|
|
} else {
|
|
// Call: {flags, callee} (AstGen.zig:10128).
|
|
ensureExtraCapacity(ag, 2 + (ag->scratch_extra_len - scratch_top));
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = flags;
|
|
ag->extra[ag->extra_len++] = callee.direct;
|
|
// Append scratch data (body lengths + body instructions).
|
|
if (args_len != 0) {
|
|
memcpy(ag->extra + ag->extra_len, ag->scratch_extra + scratch_top,
|
|
(ag->scratch_extra_len - scratch_top) * sizeof(uint32_t));
|
|
ag->extra_len += ag->scratch_extra_len - scratch_top;
|
|
}
|
|
ag->inst_tags[call_index] = ZIR_INST_CALL;
|
|
ag->inst_datas[call_index].pl_node.src_node
|
|
= (int32_t)node - (int32_t)gz->decl_node_index;
|
|
ag->inst_datas[call_index].pl_node.payload_index = payload_index;
|
|
}
|
|
|
|
// Restore scratch (AstGen.zig:10097 defer).
|
|
ag->scratch_extra_len = scratch_top;
|
|
|
|
return call_index + ZIR_REF_START_INDEX;
|
|
}
|
|
|
|
// structInitExprAnon (AstGen.zig:1865-1893).
|
|
// Anonymous struct init using struct_init_anon instruction.
|
|
static uint32_t structInitExprAnon(GenZir* gz, Scope* scope, uint32_t node,
|
|
const uint32_t* fields, uint32_t fields_len) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
|
|
ensureExtraCapacity(ag, 3 + fields_len * 2);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = node; // abs_node
|
|
ag->extra[ag->extra_len++] = ag->source_line; // abs_line
|
|
ag->extra[ag->extra_len++] = fields_len;
|
|
uint32_t items_start = ag->extra_len;
|
|
ag->extra_len += fields_len * 2;
|
|
|
|
for (uint32_t i = 0; i < fields_len; i++) {
|
|
uint32_t field_init = fields[i];
|
|
uint32_t name_token = firstToken(tree, field_init) - 2;
|
|
uint32_t str_index = identAsString(ag, name_token);
|
|
uint32_t init_ref = expr(gz, scope, field_init);
|
|
ag->extra[items_start + i * 2] = str_index;
|
|
ag->extra[items_start + i * 2 + 1] = init_ref;
|
|
}
|
|
|
|
return addPlNodePayloadIndex(
|
|
gz, ZIR_INST_STRUCT_INIT_ANON, node, payload_index);
|
|
}
|
|
|
|
// structInitExprTyped (AstGen.zig:1896-1931).
|
|
// Typed struct init using struct_init or struct_init_ref instruction.
|
|
static uint32_t structInitExprTyped(GenZir* gz, Scope* scope, uint32_t node,
|
|
const uint32_t* fields, uint32_t fields_len, uint32_t ty_inst,
|
|
bool is_ref) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
|
|
ensureExtraCapacity(ag, 3 + fields_len * 2);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = node; // abs_node
|
|
ag->extra[ag->extra_len++] = ag->source_line; // abs_line
|
|
ag->extra[ag->extra_len++] = fields_len;
|
|
uint32_t items_start = ag->extra_len;
|
|
ag->extra_len += fields_len * 2;
|
|
|
|
for (uint32_t i = 0; i < fields_len; i++) {
|
|
uint32_t field_init = fields[i];
|
|
uint32_t name_token = firstToken(tree, field_init) - 2;
|
|
uint32_t str_index = identAsString(ag, name_token);
|
|
uint32_t field_ty_inst = addPlNodeBin(gz,
|
|
ZIR_INST_STRUCT_INIT_FIELD_TYPE, field_init, ty_inst, str_index);
|
|
ResultLoc elem_rl = { .tag = RL_COERCED_TY,
|
|
.data = field_ty_inst,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t init_ref = exprRl(gz, scope, elem_rl, field_init);
|
|
ag->extra[items_start + i * 2]
|
|
= field_ty_inst - ZIR_REF_START_INDEX; // .toIndex()
|
|
ag->extra[items_start + i * 2 + 1] = init_ref;
|
|
}
|
|
|
|
ZirInstTag init_tag
|
|
= is_ref ? ZIR_INST_STRUCT_INIT_REF : ZIR_INST_STRUCT_INIT;
|
|
return addPlNodePayloadIndex(gz, init_tag, node, payload_index);
|
|
}
|
|
|
|
// --- structInitExpr (AstGen.zig:1674) ---
|
|
static uint32_t structInitExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
AstNodeTag tag = tree->nodes.tags[node];
|
|
AstData nd = tree->nodes.datas[node];
|
|
|
|
// Extract type_expr and fields.
|
|
uint32_t type_expr_node = 0; // 0 = anonymous (.{...})
|
|
uint32_t field_buf[2];
|
|
const uint32_t* fields = NULL;
|
|
uint32_t fields_len = 0;
|
|
|
|
switch (tag) {
|
|
case AST_NODE_STRUCT_INIT_DOT_TWO:
|
|
case AST_NODE_STRUCT_INIT_DOT_TWO_COMMA: {
|
|
// .{.a = lhs, .b = rhs}
|
|
uint32_t idx = 0;
|
|
if (nd.lhs != 0)
|
|
field_buf[idx++] = nd.lhs;
|
|
if (nd.rhs != 0)
|
|
field_buf[idx++] = nd.rhs;
|
|
fields = field_buf;
|
|
fields_len = idx;
|
|
break;
|
|
}
|
|
case AST_NODE_STRUCT_INIT_DOT:
|
|
case AST_NODE_STRUCT_INIT_DOT_COMMA: {
|
|
uint32_t start = nd.lhs;
|
|
uint32_t end = nd.rhs;
|
|
fields = tree->extra_data.arr + start;
|
|
fields_len = end - start;
|
|
break;
|
|
}
|
|
case AST_NODE_STRUCT_INIT_ONE:
|
|
case AST_NODE_STRUCT_INIT_ONE_COMMA: {
|
|
type_expr_node = nd.lhs;
|
|
if (nd.rhs != 0) {
|
|
field_buf[0] = nd.rhs;
|
|
fields = field_buf;
|
|
fields_len = 1;
|
|
}
|
|
break;
|
|
}
|
|
case AST_NODE_STRUCT_INIT:
|
|
case AST_NODE_STRUCT_INIT_COMMA: {
|
|
type_expr_node = nd.lhs;
|
|
uint32_t extra_idx = nd.rhs;
|
|
uint32_t range_start = tree->extra_data.arr[extra_idx];
|
|
uint32_t range_end = tree->extra_data.arr[extra_idx + 1];
|
|
fields = tree->extra_data.arr + range_start;
|
|
fields_len = range_end - range_start;
|
|
break;
|
|
}
|
|
default:
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
if (type_expr_node == 0 && fields_len == 0) {
|
|
// .{} — depends on result location (AstGen.zig:1687-1698).
|
|
if (rl.tag == RL_REF_COERCED_TY) {
|
|
return addUnNode(
|
|
gz, ZIR_INST_STRUCT_INIT_EMPTY_REF_RESULT, rl.data, node);
|
|
}
|
|
if (rl.tag == RL_TY || rl.tag == RL_COERCED_TY) {
|
|
return addUnNode(
|
|
gz, ZIR_INST_STRUCT_INIT_EMPTY_RESULT, rl.data, node);
|
|
}
|
|
if (rl.tag == RL_DISCARD) {
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
if (rl.tag == RL_PTR) {
|
|
// AstGen.zig:1691-1696.
|
|
uint32_t ty_inst = rlResultType(gz, rl, node);
|
|
uint32_t val = addUnNode(
|
|
gz, ZIR_INST_STRUCT_INIT_EMPTY_RESULT, ty_inst, node);
|
|
return rvalue(gz, rl, val, node);
|
|
}
|
|
// RL_NONE, RL_REF, RL_INFERRED_PTR (AstGen.zig:1697-1699).
|
|
return rvalue(gz, rl, ZIR_REF_EMPTY_TUPLE, node);
|
|
}
|
|
|
|
// Pre-register all field names to match upstream string ordering.
|
|
// Upstream has a duplicate name check (AstGen.zig:1756-1806) that
|
|
// adds all field names to string_bytes before evaluating values.
|
|
for (uint32_t i = 0; i < fields_len; i++) {
|
|
uint32_t name_token = firstToken(tree, fields[i]) - 2;
|
|
identAsString(ag, name_token);
|
|
}
|
|
|
|
if (type_expr_node == 0 && fields_len > 0) {
|
|
// Anonymous struct init with fields (AstGen.zig:1821-1861).
|
|
switch (rl.tag) {
|
|
case RL_NONE:
|
|
// structInitExprAnon (AstGen.zig:1822, 1865-1893).
|
|
return structInitExprAnon(gz, scope, node, fields, fields_len);
|
|
case RL_DISCARD: {
|
|
// Even if discarding we must perform side-effects
|
|
// (AstGen.zig:1823-1828).
|
|
for (uint32_t i = 0; i < fields_len; i++)
|
|
exprRl(gz, scope, RL_DISCARD_VAL, fields[i]);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
case RL_REF: {
|
|
// structInitExprAnon + ref (AstGen.zig:1830-1833).
|
|
uint32_t result
|
|
= structInitExprAnon(gz, scope, node, fields, fields_len);
|
|
return addUnTok(gz, ZIR_INST_REF, result, firstToken(tree, node));
|
|
}
|
|
case RL_REF_COERCED_TY: {
|
|
// Get elem type, validate, structInitExprTyped(is_ref=true)
|
|
// (AstGen.zig:1834-1837).
|
|
uint32_t result_ty_inst
|
|
= addUnNode(gz, ZIR_INST_ELEM_TYPE, rl.data, node);
|
|
addUnNode(gz, ZIR_INST_VALIDATE_STRUCT_INIT_RESULT_TY,
|
|
result_ty_inst, node);
|
|
return structInitExprTyped(
|
|
gz, scope, node, fields, fields_len, result_ty_inst, true);
|
|
}
|
|
case RL_TY:
|
|
case RL_COERCED_TY: {
|
|
// validate_struct_init_result_ty +
|
|
// structInitExprTyped(is_ref=false) (AstGen.zig:1839-1841).
|
|
uint32_t ty_inst = rl.data;
|
|
addUnNode(
|
|
gz, ZIR_INST_VALIDATE_STRUCT_INIT_RESULT_TY, ty_inst, node);
|
|
return structInitExprTyped(
|
|
gz, scope, node, fields, fields_len, ty_inst, false);
|
|
}
|
|
case RL_PTR: {
|
|
// structInitExprPtr (AstGen.zig:1843-1846, 1934-1964).
|
|
uint32_t struct_ptr_inst
|
|
= addUnNode(gz, ZIR_INST_OPT_EU_BASE_PTR_INIT, rl.data, node);
|
|
// Block payload: body_len = fields_len.
|
|
ensureExtraCapacity(ag, 1 + fields_len);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = fields_len;
|
|
uint32_t items_start = ag->extra_len;
|
|
ag->extra_len += fields_len;
|
|
|
|
for (uint32_t i = 0; i < fields_len; i++) {
|
|
uint32_t field_init = fields[i];
|
|
uint32_t name_token = firstToken(tree, field_init) - 2;
|
|
uint32_t str_index = identAsString(ag, name_token);
|
|
// struct_init_field_ptr (AstGen.zig:1954-1957).
|
|
uint32_t field_ptr
|
|
= addPlNodeBin(gz, ZIR_INST_STRUCT_INIT_FIELD_PTR,
|
|
field_init, struct_ptr_inst, str_index);
|
|
ag->extra[items_start + i]
|
|
= field_ptr - ZIR_REF_START_INDEX; // .toIndex()
|
|
// Evaluate init with ptr RL (AstGen.zig:1960).
|
|
ResultLoc ptr_rl = { .tag = RL_PTR,
|
|
.data = field_ptr,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
exprRl(gz, scope, ptr_rl, field_init);
|
|
}
|
|
addPlNodePayloadIndex(
|
|
gz, ZIR_INST_VALIDATE_PTR_STRUCT_INIT, node, payload_index);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
case RL_INFERRED_PTR: {
|
|
// Standard anon init + rvalue store (AstGen.zig:1847-1852).
|
|
uint32_t struct_inst
|
|
= structInitExprAnon(gz, scope, node, fields, fields_len);
|
|
return rvalue(gz, rl, struct_inst, node);
|
|
}
|
|
case RL_DESTRUCTURE:
|
|
// Struct value cannot be destructured (AstGen.zig:1854-1859).
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
// Typed init: evaluate type, emit struct_init_empty or struct_init.
|
|
if (type_expr_node != 0 && fields_len == 0) {
|
|
// Check for [_]T{} pattern (AstGen.zig:1707-1753).
|
|
AstNodeTag type_tag = tree->nodes.tags[type_expr_node];
|
|
if (type_tag == AST_NODE_ARRAY_TYPE
|
|
|| type_tag == AST_NODE_ARRAY_TYPE_SENTINEL) {
|
|
AstData type_nd = tree->nodes.datas[type_expr_node];
|
|
uint32_t elem_count_node = type_nd.lhs;
|
|
if (tree->nodes.tags[elem_count_node] == AST_NODE_IDENTIFIER
|
|
&& isUnderscoreIdent(tree, elem_count_node)) {
|
|
// Inferred length with 0 fields → length 0.
|
|
if (type_tag == AST_NODE_ARRAY_TYPE) {
|
|
uint32_t elem_type = typeExpr(gz, scope, type_nd.rhs);
|
|
uint32_t array_type_inst
|
|
= addPlNodeBin(gz, ZIR_INST_ARRAY_TYPE, type_expr_node,
|
|
ZIR_REF_ZERO_USIZE, elem_type);
|
|
return rvalue(gz, rl,
|
|
addUnNode(gz, ZIR_INST_STRUCT_INIT_EMPTY,
|
|
array_type_inst, node),
|
|
node);
|
|
}
|
|
// ARRAY_TYPE_SENTINEL: extra[rhs] = sentinel, extra[rhs+1]
|
|
// = elem_type
|
|
uint32_t sentinel_node = tree->extra_data.arr[type_nd.rhs];
|
|
uint32_t elem_type_node
|
|
= tree->extra_data.arr[type_nd.rhs + 1];
|
|
uint32_t elem_type = typeExpr(gz, scope, elem_type_node);
|
|
ResultLoc sent_rl = { .tag = RL_TY,
|
|
.data = elem_type,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t sentinel = comptimeExpr(gz, scope, sent_rl,
|
|
sentinel_node, COMPTIME_REASON_ARRAY_SENTINEL);
|
|
uint32_t array_type_inst = addPlNodeTriple(gz,
|
|
ZIR_INST_ARRAY_TYPE_SENTINEL, type_expr_node,
|
|
ZIR_REF_ZERO_USIZE, elem_type, sentinel);
|
|
return rvalue(gz, rl,
|
|
addUnNode(
|
|
gz, ZIR_INST_STRUCT_INIT_EMPTY, array_type_inst, node),
|
|
node);
|
|
}
|
|
}
|
|
uint32_t ty_inst = typeExpr(gz, scope, type_expr_node);
|
|
return rvalue(gz, rl,
|
|
addUnNode(gz, ZIR_INST_STRUCT_INIT_EMPTY, ty_inst, node), node);
|
|
}
|
|
|
|
// Typed struct init with fields (AstGen.zig:1808-1818).
|
|
if (type_expr_node != 0 && fields_len > 0) {
|
|
uint32_t ty_inst = typeExpr(gz, scope, type_expr_node);
|
|
addUnNode(gz, ZIR_INST_VALIDATE_STRUCT_INIT_TY, ty_inst, node);
|
|
// Upstream: .ref => structInitExprTyped(is_ref=true)
|
|
// else => rvalue(structInitExprTyped(is_ref=false))
|
|
if (rl.tag == RL_REF) {
|
|
return structInitExprTyped(
|
|
gz, scope, node, fields, fields_len, ty_inst, true);
|
|
}
|
|
uint32_t struct_inst = structInitExprTyped(
|
|
gz, scope, node, fields, fields_len, ty_inst, false);
|
|
return rvalue(gz, rl, struct_inst, node);
|
|
}
|
|
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
// --- tryExpr (AstGen.zig:5957) ---
|
|
static uint32_t tryExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
AstData nd = ag->tree->nodes.datas[node];
|
|
uint32_t operand_node = nd.lhs;
|
|
|
|
if (!gz->is_comptime) {
|
|
emitDbgNode(gz, node);
|
|
}
|
|
uint32_t try_lc_line = ag->source_line - gz->decl_line;
|
|
uint32_t try_lc_column = ag->source_column;
|
|
|
|
// Determine operand rl and block tag based on result location
|
|
// (AstGen.zig:5989-5992).
|
|
ResultLoc operand_rl;
|
|
ZirInstTag block_tag;
|
|
ZirInstTag err_tag;
|
|
if (RL_IS_REF(rl)) {
|
|
operand_rl = RL_REF_VAL;
|
|
block_tag = ZIR_INST_TRY_PTR;
|
|
err_tag = ZIR_INST_ERR_UNION_CODE_PTR;
|
|
} else {
|
|
operand_rl = RL_NONE_VAL;
|
|
block_tag = ZIR_INST_TRY;
|
|
err_tag = ZIR_INST_ERR_UNION_CODE;
|
|
}
|
|
operand_rl.ctx = RI_CTX_ERROR_HANDLING_EXPR;
|
|
|
|
// Evaluate operand (AstGen.zig:5993-6006).
|
|
uint32_t operand = exprRl(gz, scope, operand_rl, operand_node);
|
|
|
|
// Create try block instruction (AstGen.zig:6008).
|
|
uint32_t try_inst = makeBlockInst(ag, block_tag, gz, node);
|
|
gzAppendInstruction(gz, try_inst);
|
|
|
|
// Else scope: extract error code, return it (AstGen.zig:6011-6025).
|
|
GenZir else_scope = makeSubBlock(gz, scope);
|
|
|
|
uint32_t err_code = addUnNode(&else_scope, err_tag, operand, node);
|
|
|
|
// Emit defers for error path (AstGen.zig:6019).
|
|
if (ag->fn_block != NULL) {
|
|
const Scope* fn_block_scope = &((GenZir*)ag->fn_block)->base;
|
|
genDefers(&else_scope, fn_block_scope, scope, DEFER_BOTH_SANS_ERR);
|
|
}
|
|
|
|
// Emit dbg_stmt at try keyword for error return tracing (AstGen.zig:6020).
|
|
emitDbgStmt(&else_scope, try_lc_line, try_lc_column);
|
|
|
|
// ret_node with error code (AstGen.zig:6021).
|
|
addUnNode(&else_scope, ZIR_INST_RET_NODE, err_code, node);
|
|
|
|
setTryBody(ag, &else_scope, try_inst, operand);
|
|
// else_scope unstacked by setTryBody.
|
|
|
|
// For ref/ref_coerced_ty, return directly; otherwise rvalue
|
|
// (AstGen.zig:6025-6028).
|
|
uint32_t result = try_inst + ZIR_REF_START_INDEX; // toRef()
|
|
if (RL_IS_REF(rl))
|
|
return result;
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
|
|
// --- boolBinOp (AstGen.zig:6274) ---
|
|
// Short-circuiting boolean and/or.
|
|
|
|
static uint32_t boolBinOp(
|
|
GenZir* gz, Scope* scope, uint32_t node, ZirInstTag zir_tag) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
AstData nd = ag->tree->nodes.datas[node];
|
|
uint32_t lhs_node = nd.lhs;
|
|
uint32_t rhs_node = nd.rhs;
|
|
|
|
// Evaluate LHS (AstGen.zig:6285).
|
|
uint32_t lhs = expr(gz, scope, lhs_node);
|
|
|
|
// Reserve the bool_br instruction (payload set later)
|
|
// (AstGen.zig:6286).
|
|
uint32_t bool_br = reserveInstructionIndex(ag);
|
|
gzAppendInstruction(gz, bool_br);
|
|
|
|
// Evaluate RHS in sub-block (AstGen.zig:6288-6293).
|
|
GenZir rhs_scope = makeSubBlock(gz, scope);
|
|
uint32_t rhs = expr(&rhs_scope, &rhs_scope.base, rhs_node);
|
|
|
|
if (!ag->has_compile_errors) {
|
|
// break_inline from rhs to bool_br (AstGen.zig:6292).
|
|
makeBreakInline(&rhs_scope, bool_br, rhs,
|
|
(int32_t)rhs_node - (int32_t)rhs_scope.decl_node_index);
|
|
}
|
|
|
|
// setBoolBrBody (AstGen.zig:6294, 11929-11944).
|
|
uint32_t raw_body_len = gzInstructionsLen(&rhs_scope);
|
|
const uint32_t* body = gzInstructionsSlice(&rhs_scope);
|
|
uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_body_len);
|
|
ensureExtraCapacity(ag, 2 + body_len);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = lhs; // BoolBr.lhs
|
|
ag->extra[ag->extra_len++] = body_len; // BoolBr.body_len
|
|
for (uint32_t i = 0; i < raw_body_len; i++)
|
|
appendPossiblyRefdBodyInst(ag, body[i]);
|
|
gzUnstack(&rhs_scope);
|
|
|
|
// Fill in the bool_br instruction.
|
|
ag->inst_tags[bool_br] = zir_tag;
|
|
ag->inst_datas[bool_br].pl_node.src_node
|
|
= (int32_t)node - (int32_t)gz->decl_node_index;
|
|
ag->inst_datas[bool_br].pl_node.payload_index = payload_index;
|
|
|
|
return bool_br + ZIR_REF_START_INDEX;
|
|
}
|
|
|
|
// Mirrors nodeIsTriviallyZero (AstGen.zig:10299-10313).
|
|
static bool nodeIsTriviallyZero(const Ast* tree, uint32_t node) {
|
|
if (tree->nodes.tags[node] != AST_NODE_NUMBER_LITERAL)
|
|
return false;
|
|
uint32_t tok = tree->nodes.main_tokens[node];
|
|
uint32_t tok_start = tree->tokens.starts[tok];
|
|
const char* source = (const char*)tree->source;
|
|
if (source[tok_start] != '0')
|
|
return false;
|
|
// Distinguish "0.." (range, token is "0") from "0.5" (float literal).
|
|
char c = source[tok_start + 1];
|
|
if (c == '.')
|
|
return source[tok_start + 2] == '.';
|
|
// Any alphanumeric or underscore means the token is longer than "0".
|
|
if (c >= '0' && c <= '9')
|
|
return false;
|
|
if (c >= 'a' && c <= 'z')
|
|
return false;
|
|
if (c >= 'A' && c <= 'Z')
|
|
return false;
|
|
if (c == '_')
|
|
return false;
|
|
return true;
|
|
}
|
|
|
|
// Mirrors expr (AstGen.zig:634) — main expression dispatcher.
|
|
static uint32_t exprRl(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
if (node == 0) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
AstNodeTag tag = ag->tree->nodes.tags[node];
|
|
AstData nd = ag->tree->nodes.datas[node];
|
|
|
|
switch (tag) {
|
|
case AST_NODE_NUMBER_LITERAL:
|
|
return rvalue(
|
|
gz, rl, numberLiteral(gz, node, node, NUM_SIGN_POSITIVE), node);
|
|
case AST_NODE_BUILTIN_CALL_TWO:
|
|
case AST_NODE_BUILTIN_CALL_TWO_COMMA:
|
|
return builtinCall(gz, scope, rl, node);
|
|
case AST_NODE_BUILTIN_CALL:
|
|
case AST_NODE_BUILTIN_CALL_COMMA:
|
|
return builtinCallMultiArg(gz, scope, rl, node, nd.lhs, nd.rhs);
|
|
case AST_NODE_FIELD_ACCESS:
|
|
return fieldAccessExpr(gz, scope, rl, node);
|
|
case AST_NODE_IDENTIFIER:
|
|
return identifierExpr(gz, scope, rl, node);
|
|
case AST_NODE_STRING_LITERAL: {
|
|
// Mirrors stringLiteral (AstGen.zig:8626).
|
|
uint32_t str_lit_token = ag->tree->nodes.main_tokens[node];
|
|
uint32_t str_index, str_len;
|
|
strLitAsString(ag, str_lit_token, &str_index, &str_len);
|
|
ZirInstData data;
|
|
data.str.start = str_index;
|
|
data.str.len = str_len;
|
|
uint32_t str_result = addInstruction(gz, ZIR_INST_STR, data);
|
|
return rvalue(gz, rl, str_result, node);
|
|
}
|
|
// address_of (AstGen.zig:953-960): evaluate operand with .ref rl.
|
|
case AST_NODE_ADDRESS_OF: {
|
|
uint32_t operand_node = ag->tree->nodes.datas[node].lhs;
|
|
// Check for result type to emit validate_ref_ty (AstGen.zig:954-956).
|
|
uint32_t res_ty = rlResultType(gz, rl, node);
|
|
ResultLoc operand_rl;
|
|
if (res_ty != 0) {
|
|
addUnTok(gz, ZIR_INST_VALIDATE_REF_TY, res_ty,
|
|
firstToken(ag->tree, node));
|
|
// Pass ref_coerced_ty so init expressions can use the type
|
|
// (AstGen.zig:958).
|
|
operand_rl = (ResultLoc) {
|
|
.tag = RL_REF_COERCED_TY, .data = res_ty, .src_node = 0
|
|
};
|
|
} else {
|
|
operand_rl = RL_REF_VAL;
|
|
}
|
|
uint32_t result = exprRl(gz, scope, operand_rl, operand_node);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// ptr_type (AstGen.zig:1077-1081).
|
|
case AST_NODE_PTR_TYPE_ALIGNED:
|
|
case AST_NODE_PTR_TYPE_SENTINEL:
|
|
case AST_NODE_PTR_TYPE:
|
|
case AST_NODE_PTR_TYPE_BIT_RANGE:
|
|
return rvalue(gz, rl, ptrTypeExpr(gz, scope, node), node);
|
|
// array_type (AstGen.zig:940).
|
|
case AST_NODE_ARRAY_TYPE:
|
|
return rvalue(gz, rl, arrayTypeExpr(gz, scope, node), node);
|
|
// array_init variants (AstGen.zig:836-856).
|
|
case AST_NODE_ARRAY_INIT:
|
|
case AST_NODE_ARRAY_INIT_COMMA:
|
|
case AST_NODE_ARRAY_INIT_ONE:
|
|
case AST_NODE_ARRAY_INIT_ONE_COMMA:
|
|
return arrayInitExpr(gz, scope, rl, node);
|
|
// array_cat (AstGen.zig:772): ++ binary operator.
|
|
case AST_NODE_ARRAY_CAT:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_ARRAY_CAT), node);
|
|
// array_mult (AstGen.zig:774-785): ** binary operator.
|
|
case AST_NODE_ARRAY_MULT: {
|
|
uint32_t res_ty = rlResultType(gz, rl, node);
|
|
uint32_t lhs = exprRl(gz, scope, RL_NONE_VAL, nd.lhs);
|
|
ResultLoc rhs_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_USIZE_TYPE,
|
|
.src_node = 0,
|
|
.ctx = 0 };
|
|
uint32_t rhs = comptimeExpr(
|
|
gz, scope, rhs_rl, nd.rhs, COMPTIME_REASON_ARRAY_MUL_FACTOR);
|
|
uint32_t result = addPlNodeTriple(gz, ZIR_INST_ARRAY_MUL, node,
|
|
res_ty != 0 ? res_ty : ZIR_REF_NONE, lhs, rhs);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
// grouped_expression (AstGen.zig:1100): passthrough.
|
|
case AST_NODE_GROUPED_EXPRESSION:
|
|
return exprRl(gz, scope, rl, ag->tree->nodes.datas[node].lhs);
|
|
// unreachable_literal (AstGen.zig:846-854).
|
|
case AST_NODE_UNREACHABLE_LITERAL: {
|
|
emitDbgNode(gz, node);
|
|
ZirInstData udata;
|
|
memset(&udata, 0, sizeof(udata));
|
|
udata.unreachable_data.src_node
|
|
= (int32_t)node - (int32_t)gz->decl_node_index;
|
|
addInstruction(gz, ZIR_INST_UNREACHABLE, udata);
|
|
return ZIR_REF_UNREACHABLE_VALUE;
|
|
}
|
|
// enum_literal (AstGen.zig:993).
|
|
case AST_NODE_ENUM_LITERAL: {
|
|
uint32_t ident_token = ag->tree->nodes.main_tokens[node];
|
|
uint32_t str_index = identAsString(ag, ident_token);
|
|
// If result type available, emit decl_literal (AstGen.zig:993-1003).
|
|
uint32_t res_ty = rlResultType(gz, rl, node);
|
|
if (res_ty != 0) {
|
|
uint32_t res = addPlNodeBin(
|
|
gz, ZIR_INST_DECL_LITERAL, node, res_ty, str_index);
|
|
// decl_literal does the coercion for us (AstGen.zig:1001).
|
|
// Only need rvalue for ptr/inferred_ptr/ref_coerced_ty.
|
|
if (rl.tag == RL_TY || rl.tag == RL_COERCED_TY)
|
|
return res;
|
|
return rvalue(gz, rl, res, node);
|
|
}
|
|
return rvalue(gz, rl,
|
|
addStrTok(gz, ZIR_INST_ENUM_LITERAL, str_index, ident_token),
|
|
node);
|
|
}
|
|
// multiline_string_literal (AstGen.zig:8645).
|
|
case AST_NODE_MULTILINE_STRING_LITERAL:
|
|
return rvalue(gz, rl, multilineStringLiteral(gz, scope, node), node);
|
|
// return (AstGen.zig:856).
|
|
case AST_NODE_RETURN:
|
|
return retExpr(gz, scope, node);
|
|
// call (AstGen.zig:783-790).
|
|
case AST_NODE_CALL_ONE:
|
|
case AST_NODE_CALL_ONE_COMMA:
|
|
case AST_NODE_CALL:
|
|
case AST_NODE_CALL_COMMA:
|
|
return rvalue(gz, rl, callExpr(gz, scope, rl, node), node);
|
|
// struct_init (AstGen.zig:836-839).
|
|
case AST_NODE_STRUCT_INIT_DOT_TWO:
|
|
case AST_NODE_STRUCT_INIT_DOT_TWO_COMMA:
|
|
case AST_NODE_STRUCT_INIT_DOT:
|
|
case AST_NODE_STRUCT_INIT_DOT_COMMA:
|
|
case AST_NODE_STRUCT_INIT_ONE:
|
|
case AST_NODE_STRUCT_INIT_ONE_COMMA:
|
|
case AST_NODE_STRUCT_INIT:
|
|
case AST_NODE_STRUCT_INIT_COMMA:
|
|
return structInitExpr(gz, scope, rl, node);
|
|
// container_decl (AstGen.zig:1083-1098).
|
|
case AST_NODE_CONTAINER_DECL:
|
|
case AST_NODE_CONTAINER_DECL_TRAILING:
|
|
case AST_NODE_CONTAINER_DECL_TWO:
|
|
case AST_NODE_CONTAINER_DECL_TWO_TRAILING:
|
|
case AST_NODE_CONTAINER_DECL_ARG:
|
|
case AST_NODE_CONTAINER_DECL_ARG_TRAILING:
|
|
case AST_NODE_TAGGED_UNION:
|
|
case AST_NODE_TAGGED_UNION_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_TWO:
|
|
case AST_NODE_TAGGED_UNION_TWO_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG:
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING:
|
|
return rvalue(
|
|
gz, rl, containerDecl(gz, scope, node, 2 /* anon */), node);
|
|
// try (AstGen.zig:1115).
|
|
case AST_NODE_TRY:
|
|
return tryExpr(gz, scope, rl, node);
|
|
// Comparison operators (AstGen.zig:714-726).
|
|
case AST_NODE_EQUAL_EQUAL:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_EQ), node);
|
|
case AST_NODE_BANG_EQUAL:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_NEQ), node);
|
|
case AST_NODE_LESS_THAN:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_LT), node);
|
|
case AST_NODE_GREATER_THAN:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_GT), node);
|
|
case AST_NODE_LESS_OR_EQUAL:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_LTE), node);
|
|
case AST_NODE_GREATER_OR_EQUAL:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_CMP_GTE), node);
|
|
// Arithmetic (AstGen.zig:656-698).
|
|
case AST_NODE_ADD:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_ADD), node);
|
|
case AST_NODE_SUB:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_SUB), node);
|
|
case AST_NODE_MUL:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_MUL), node);
|
|
case AST_NODE_DIV:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_DIV), node);
|
|
case AST_NODE_MOD:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_MOD), node);
|
|
// Bitwise (AstGen.zig:700-712).
|
|
case AST_NODE_BIT_AND:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_BIT_AND), node);
|
|
case AST_NODE_BIT_OR:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_BIT_OR), node);
|
|
case AST_NODE_BIT_XOR:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_XOR), node);
|
|
case AST_NODE_SHL:
|
|
return rvalue(gz, rl, shiftOp(gz, scope, node, ZIR_INST_SHL), node);
|
|
case AST_NODE_SHR:
|
|
return rvalue(gz, rl, shiftOp(gz, scope, node, ZIR_INST_SHR), node);
|
|
// Boolean operators (AstGen.zig:728-731) — special: boolBinOp.
|
|
case AST_NODE_BOOL_AND:
|
|
return rvalue(
|
|
gz, rl, boolBinOp(gz, scope, node, ZIR_INST_BOOL_BR_AND), node);
|
|
case AST_NODE_BOOL_OR:
|
|
return rvalue(
|
|
gz, rl, boolBinOp(gz, scope, node, ZIR_INST_BOOL_BR_OR), node);
|
|
// Unary operators (AstGen.zig:919-938).
|
|
case AST_NODE_BOOL_NOT:
|
|
return rvalue(gz, rl,
|
|
addUnNode(gz, ZIR_INST_BOOL_NOT, expr(gz, scope, nd.lhs), node),
|
|
node);
|
|
case AST_NODE_BIT_NOT:
|
|
return rvalue(gz, rl,
|
|
addUnNode(gz, ZIR_INST_BIT_NOT, expr(gz, scope, nd.lhs), node),
|
|
node);
|
|
// negation (AstGen.zig:9863-9882).
|
|
case AST_NODE_NEGATION: {
|
|
// Check for number_literal as sub-expression to preserve negativity
|
|
// (AstGen.zig:9875-9877).
|
|
if (ag->tree->nodes.tags[nd.lhs] == AST_NODE_NUMBER_LITERAL) {
|
|
return rvalue(gz, rl,
|
|
numberLiteral(gz, nd.lhs, node, NUM_SIGN_NEGATIVE), node);
|
|
}
|
|
return rvalue(gz, rl,
|
|
addUnNode(gz, ZIR_INST_NEGATE, expr(gz, scope, nd.lhs), node),
|
|
node);
|
|
}
|
|
case AST_NODE_NEGATION_WRAP:
|
|
return rvalue(gz, rl,
|
|
addUnNode(gz, ZIR_INST_NEGATE_WRAP, expr(gz, scope, nd.lhs), node),
|
|
node);
|
|
// deref (AstGen.zig:942-951).
|
|
case AST_NODE_DEREF: {
|
|
uint32_t lhs = expr(gz, scope, nd.lhs);
|
|
addUnNode(gz, ZIR_INST_VALIDATE_DEREF, lhs, node);
|
|
if (RL_IS_REF(rl))
|
|
return lhs;
|
|
return rvalue(gz, rl, addUnNode(gz, ZIR_INST_LOAD, lhs, node), node);
|
|
}
|
|
// optional_type (AstGen.zig:961-964).
|
|
case AST_NODE_OPTIONAL_TYPE:
|
|
return rvalue(gz, rl,
|
|
addUnNode(
|
|
gz, ZIR_INST_OPTIONAL_TYPE, typeExpr(gz, scope, nd.lhs), node),
|
|
node);
|
|
// unwrap_optional (AstGen.zig:966-983).
|
|
case AST_NODE_UNWRAP_OPTIONAL: {
|
|
if (RL_IS_REF(rl)) {
|
|
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs);
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
return addUnNode(
|
|
gz, ZIR_INST_OPTIONAL_PAYLOAD_SAFE_PTR, lhs, node);
|
|
} else {
|
|
uint32_t lhs = expr(gz, scope, nd.lhs);
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
return rvalue(gz, rl,
|
|
addUnNode(gz, ZIR_INST_OPTIONAL_PAYLOAD_SAFE, lhs, node),
|
|
node);
|
|
}
|
|
}
|
|
// error_union type (AstGen.zig:788-797).
|
|
case AST_NODE_ERROR_UNION: {
|
|
uint32_t lhs = typeExpr(gz, scope, nd.lhs);
|
|
uint32_t rhs = typeExpr(gz, scope, nd.rhs);
|
|
return rvalue(gz, rl,
|
|
addPlNodeBin(gz, ZIR_INST_ERROR_UNION_TYPE, node, lhs, rhs), node);
|
|
}
|
|
// char_literal (AstGen.zig:8662-8675).
|
|
case AST_NODE_CHAR_LITERAL: {
|
|
uint32_t main_tok = ag->tree->nodes.main_tokens[node];
|
|
uint32_t tok_start = ag->tree->tokens.starts[main_tok];
|
|
const char* src = ag->tree->source;
|
|
uint32_t ci = tok_start + 1; // skip opening quote
|
|
uint64_t char_val;
|
|
if (src[ci] == '\\') {
|
|
// Escape sequence (AstGen.zig:8668-8675).
|
|
ci++;
|
|
switch (src[ci]) {
|
|
case 'n':
|
|
char_val = '\n';
|
|
break;
|
|
case 'r':
|
|
char_val = '\r';
|
|
break;
|
|
case 't':
|
|
char_val = '\t';
|
|
break;
|
|
case '\\':
|
|
char_val = '\\';
|
|
break;
|
|
case '\'':
|
|
char_val = '\'';
|
|
break;
|
|
case '"':
|
|
char_val = '"';
|
|
break;
|
|
case 'x': {
|
|
// \xNN hex escape.
|
|
uint8_t val = 0;
|
|
for (int k = 0; k < 2; k++) {
|
|
ci++;
|
|
char c = src[ci];
|
|
if (c >= '0' && c <= '9')
|
|
val = (uint8_t)(val * 16 + (uint8_t)(c - '0'));
|
|
else if (c >= 'a' && c <= 'f')
|
|
val = (uint8_t)(val * 16 + 10 + (uint8_t)(c - 'a'));
|
|
else if (c >= 'A' && c <= 'F')
|
|
val = (uint8_t)(val * 16 + 10 + (uint8_t)(c - 'A'));
|
|
}
|
|
char_val = val;
|
|
break;
|
|
}
|
|
case 'u': {
|
|
// \u{NNNNNN} unicode escape (string_literal.zig:194-231).
|
|
// Skip past '{'.
|
|
ci++;
|
|
uint32_t codepoint = 0;
|
|
while (true) {
|
|
ci++;
|
|
char c = src[ci];
|
|
if (c >= '0' && c <= '9')
|
|
codepoint = codepoint * 16 + (uint32_t)(c - '0');
|
|
else if (c >= 'a' && c <= 'f')
|
|
codepoint = codepoint * 16 + 10 + (uint32_t)(c - 'a');
|
|
else if (c >= 'A' && c <= 'F')
|
|
codepoint = codepoint * 16 + 10 + (uint32_t)(c - 'A');
|
|
else
|
|
break; // Must be '}'.
|
|
}
|
|
char_val = codepoint;
|
|
break;
|
|
}
|
|
default:
|
|
char_val = (uint8_t)src[ci];
|
|
break;
|
|
}
|
|
} else {
|
|
char_val = (uint64_t)(uint8_t)src[ci];
|
|
}
|
|
return rvalue(gz, rl, addInt(gz, char_val), node);
|
|
}
|
|
// arrayAccess (AstGen.zig:6192-6221).
|
|
case AST_NODE_ARRAY_ACCESS: {
|
|
if (RL_IS_REF(rl)) {
|
|
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs);
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
uint32_t rhs = expr(gz, scope, nd.rhs);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
return addPlNodeBin(gz, ZIR_INST_ELEM_PTR_NODE, node, lhs, rhs);
|
|
}
|
|
uint32_t lhs = expr(gz, scope, nd.lhs);
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
uint32_t rhs = expr(gz, scope, nd.rhs);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
return rvalue(gz, rl,
|
|
addPlNodeBin(gz, ZIR_INST_ELEM_VAL_NODE, node, lhs, rhs), node);
|
|
}
|
|
// slice (AstGen.zig:882-939).
|
|
case AST_NODE_SLICE_OPEN: {
|
|
// (AstGen.zig:908-937).
|
|
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs);
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
ResultLoc usize_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_USIZE_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t start = exprRl(gz, scope, usize_rl, nd.rhs);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
return rvalue(gz, rl,
|
|
addPlNodeBin(gz, ZIR_INST_SLICE_START, node, lhs, start), node);
|
|
}
|
|
case AST_NODE_SLICE: {
|
|
// Slice[rhs]: { start, end } (AstGen.zig:882-937).
|
|
const Ast* stree = ag->tree;
|
|
uint32_t start_node = stree->extra_data.arr[nd.rhs];
|
|
uint32_t end_node = stree->extra_data.arr[nd.rhs + 1];
|
|
// slice_length optimization (AstGen.zig:887-906).
|
|
if (stree->nodes.tags[nd.lhs] == AST_NODE_SLICE_OPEN
|
|
&& nodeIsTriviallyZero(stree, start_node)) {
|
|
AstData inner_nd = stree->nodes.datas[nd.lhs];
|
|
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, inner_nd.lhs);
|
|
ResultLoc usize_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_USIZE_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE,
|
|
.components = NULL,
|
|
.components_len = 0 };
|
|
uint32_t start_ref = exprRl(gz, scope, usize_rl, inner_nd.rhs);
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
uint32_t len_ref = exprRl(gz, scope, usize_rl, end_node);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
ensureExtraCapacity(ag, 5);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = lhs;
|
|
ag->extra[ag->extra_len++] = start_ref;
|
|
ag->extra[ag->extra_len++] = len_ref;
|
|
ag->extra[ag->extra_len++] = ZIR_REF_NONE; // no sentinel
|
|
int32_t src_off = (int32_t)nd.lhs - (int32_t)gz->decl_node_index;
|
|
memcpy(&ag->extra[ag->extra_len], &src_off, sizeof(uint32_t));
|
|
ag->extra_len++;
|
|
ZirInstData data;
|
|
data.pl_node.src_node
|
|
= (int32_t)node - (int32_t)gz->decl_node_index;
|
|
data.pl_node.payload_index = payload_index;
|
|
return rvalue(
|
|
gz, rl, addInstruction(gz, ZIR_INST_SLICE_LENGTH, data), node);
|
|
}
|
|
// Normal path.
|
|
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs);
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
ResultLoc usize_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_USIZE_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE,
|
|
.components = NULL,
|
|
.components_len = 0 };
|
|
uint32_t start_ref = exprRl(gz, scope, usize_rl, start_node);
|
|
uint32_t end_ref = exprRl(gz, scope, usize_rl, end_node);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
ensureExtraCapacity(ag, 3);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = lhs;
|
|
ag->extra[ag->extra_len++] = start_ref;
|
|
ag->extra[ag->extra_len++] = end_ref;
|
|
ZirInstData data;
|
|
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
data.pl_node.payload_index = payload_index;
|
|
return rvalue(
|
|
gz, rl, addInstruction(gz, ZIR_INST_SLICE_END, data), node);
|
|
}
|
|
case AST_NODE_SLICE_SENTINEL: {
|
|
// SliceSentinel[rhs]: { start, end, sentinel }
|
|
// (AstGen.zig:882-937).
|
|
const Ast* stree = ag->tree;
|
|
uint32_t start_node = stree->extra_data.arr[nd.rhs];
|
|
uint32_t end_node = stree->extra_data.arr[nd.rhs + 1];
|
|
uint32_t sentinel_node = stree->extra_data.arr[nd.rhs + 2];
|
|
// slice_length optimization (AstGen.zig:887-906).
|
|
if (end_node != 0 && stree->nodes.tags[nd.lhs] == AST_NODE_SLICE_OPEN
|
|
&& nodeIsTriviallyZero(stree, start_node)) {
|
|
AstData inner_nd = stree->nodes.datas[nd.lhs];
|
|
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, inner_nd.lhs);
|
|
ResultLoc usize_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_USIZE_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE,
|
|
.components = NULL,
|
|
.components_len = 0 };
|
|
uint32_t start_ref = exprRl(gz, scope, usize_rl, inner_nd.rhs);
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
uint32_t len_ref = exprRl(gz, scope, usize_rl, end_node);
|
|
uint32_t sentinel_ref
|
|
= exprRl(gz, scope, RL_NONE_VAL, sentinel_node);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
ensureExtraCapacity(ag, 5);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = lhs;
|
|
ag->extra[ag->extra_len++] = start_ref;
|
|
ag->extra[ag->extra_len++] = len_ref;
|
|
ag->extra[ag->extra_len++] = sentinel_ref;
|
|
int32_t src_off = (int32_t)nd.lhs - (int32_t)gz->decl_node_index;
|
|
memcpy(&ag->extra[ag->extra_len], &src_off, sizeof(uint32_t));
|
|
ag->extra_len++;
|
|
ZirInstData data;
|
|
data.pl_node.src_node
|
|
= (int32_t)node - (int32_t)gz->decl_node_index;
|
|
data.pl_node.payload_index = payload_index;
|
|
return rvalue(
|
|
gz, rl, addInstruction(gz, ZIR_INST_SLICE_LENGTH, data), node);
|
|
}
|
|
// Normal path.
|
|
uint32_t lhs = exprRl(gz, scope, RL_REF_VAL, nd.lhs);
|
|
advanceSourceCursorToMainToken(ag, gz, node);
|
|
uint32_t saved_line = ag->source_line - gz->decl_line;
|
|
uint32_t saved_col = ag->source_column;
|
|
// start/end coerced to usize (AstGen.zig:911-912).
|
|
ResultLoc usize_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_USIZE_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE,
|
|
.components = NULL,
|
|
.components_len = 0 };
|
|
uint32_t start_ref = exprRl(gz, scope, usize_rl, start_node);
|
|
uint32_t end_ref = (end_node != 0)
|
|
? exprRl(gz, scope, usize_rl, end_node)
|
|
: ZIR_REF_NONE;
|
|
// sentinel: create slice_sentinel_ty and coerce (AstGen.zig:913-916).
|
|
uint32_t sentinel_ty
|
|
= addUnNode(gz, ZIR_INST_SLICE_SENTINEL_TY, lhs, node);
|
|
ResultLoc sent_rl = { .tag = RL_COERCED_TY,
|
|
.data = sentinel_ty,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE,
|
|
.components = NULL,
|
|
.components_len = 0 };
|
|
uint32_t sentinel_ref = exprRl(gz, scope, sent_rl, sentinel_node);
|
|
emitDbgStmt(gz, saved_line, saved_col);
|
|
ensureExtraCapacity(ag, 4);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = lhs;
|
|
ag->extra[ag->extra_len++] = start_ref;
|
|
ag->extra[ag->extra_len++] = end_ref;
|
|
ag->extra[ag->extra_len++] = sentinel_ref;
|
|
ZirInstData data;
|
|
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
data.pl_node.payload_index = payload_index;
|
|
return rvalue(
|
|
gz, rl, addInstruction(gz, ZIR_INST_SLICE_SENTINEL, data), node);
|
|
}
|
|
// orelse (AstGen.zig:1054-1075).
|
|
case AST_NODE_ORELSE:
|
|
if (RL_IS_REF(rl)) {
|
|
return orelseCatchExpr(gz, scope, rl, node,
|
|
ZIR_INST_IS_NON_NULL_PTR, ZIR_INST_OPTIONAL_PAYLOAD_UNSAFE_PTR,
|
|
(ZirInstTag)0, UINT32_MAX);
|
|
} else {
|
|
return orelseCatchExpr(gz, scope, rl, node, ZIR_INST_IS_NON_NULL,
|
|
ZIR_INST_OPTIONAL_PAYLOAD_UNSAFE, (ZirInstTag)0, UINT32_MAX);
|
|
}
|
|
// catch (AstGen.zig:1017-1052).
|
|
case AST_NODE_CATCH: {
|
|
uint32_t catch_token = ag->tree->nodes.main_tokens[node];
|
|
uint32_t pt = (ag->tree->tokens.tags[catch_token + 1] == TOKEN_PIPE)
|
|
? catch_token + 2
|
|
: UINT32_MAX;
|
|
// Detect catch |err| switch(err) { ... } pattern
|
|
// (AstGen.zig:1023-1029).
|
|
if (pt != UINT32_MAX) {
|
|
AstData catch_data = ag->tree->nodes.datas[node];
|
|
uint32_t rhs = catch_data.rhs;
|
|
AstNodeTag rhs_tag = ag->tree->nodes.tags[rhs];
|
|
if ((rhs_tag == AST_NODE_SWITCH
|
|
|| rhs_tag == AST_NODE_SWITCH_COMMA)
|
|
&& ag->tree->tokens.tags[ag->tree->nodes.main_tokens[rhs]]
|
|
== TOKEN_KEYWORD_SWITCH) {
|
|
// Check switch condition is identifier matching capture.
|
|
uint32_t cond = ag->tree->nodes.datas[rhs].lhs;
|
|
if (ag->tree->nodes.tags[cond] == AST_NODE_IDENTIFIER
|
|
&& tokenIdentEql(
|
|
ag->tree, pt, ag->tree->nodes.main_tokens[cond])) {
|
|
// Apply ri.br(): convert coerced_ty to ty.
|
|
ResultLoc brl = rl;
|
|
if (brl.tag == RL_COERCED_TY)
|
|
brl.tag = RL_TY;
|
|
return switchExprErrUnion(gz, scope, brl, node, 0);
|
|
}
|
|
}
|
|
}
|
|
if (RL_IS_REF(rl)) {
|
|
return orelseCatchExpr(gz, scope, rl, node,
|
|
ZIR_INST_IS_NON_ERR_PTR, ZIR_INST_ERR_UNION_PAYLOAD_UNSAFE_PTR,
|
|
ZIR_INST_ERR_UNION_CODE_PTR, pt);
|
|
} else {
|
|
return orelseCatchExpr(gz, scope, rl, node, ZIR_INST_IS_NON_ERR,
|
|
ZIR_INST_ERR_UNION_PAYLOAD_UNSAFE, ZIR_INST_ERR_UNION_CODE,
|
|
pt);
|
|
}
|
|
}
|
|
// Block expressions (AstGen.zig:984-992).
|
|
case AST_NODE_BLOCK_TWO:
|
|
case AST_NODE_BLOCK_TWO_SEMICOLON:
|
|
case AST_NODE_BLOCK:
|
|
case AST_NODE_BLOCK_SEMICOLON:
|
|
return blockExprExpr(gz, scope, rl, node);
|
|
// Anonymous array init (AstGen.zig:1119-1127).
|
|
case AST_NODE_ARRAY_INIT_DOT_TWO:
|
|
case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA:
|
|
case AST_NODE_ARRAY_INIT_DOT:
|
|
case AST_NODE_ARRAY_INIT_DOT_COMMA:
|
|
return arrayInitDotExpr(gz, scope, rl, node);
|
|
// if (AstGen.zig:1013-1024).
|
|
case AST_NODE_IF_SIMPLE:
|
|
case AST_NODE_IF: {
|
|
// Detect switchExprErrUnion pattern: if (x) |v| { ... } else |err|
|
|
// switch (err) { ... } (AstGen.zig:862-871).
|
|
if (tag == AST_NODE_IF) {
|
|
uint32_t then_node_if = ag->tree->extra_data.arr[nd.rhs];
|
|
uint32_t else_node_if = ag->tree->extra_data.arr[nd.rhs + 1];
|
|
if (else_node_if != 0) {
|
|
uint32_t else_tok = lastToken(ag->tree, then_node_if) + 1;
|
|
uint32_t err_tok = 0;
|
|
if (else_tok + 1 < ag->tree->tokens.len
|
|
&& ag->tree->tokens.tags[else_tok + 1] == TOKEN_PIPE) {
|
|
err_tok = else_tok + 2;
|
|
}
|
|
if (err_tok != 0) {
|
|
AstNodeTag else_tag = ag->tree->nodes.tags[else_node_if];
|
|
if ((else_tag == AST_NODE_SWITCH
|
|
|| else_tag == AST_NODE_SWITCH_COMMA)
|
|
&& ag->tree->tokens.tags[ag->tree->nodes
|
|
.main_tokens[else_node_if]]
|
|
== TOKEN_KEYWORD_SWITCH) {
|
|
uint32_t sw_cond
|
|
= ag->tree->nodes.datas[else_node_if].lhs;
|
|
if (ag->tree->nodes.tags[sw_cond]
|
|
== AST_NODE_IDENTIFIER
|
|
&& tokenIdentEql(ag->tree, err_tok,
|
|
ag->tree->nodes.main_tokens[sw_cond])) {
|
|
return switchExprErrUnion(
|
|
gz, scope, rlBr(rl), node, 1);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return ifExpr(gz, scope, rlBr(rl), node);
|
|
}
|
|
// for (AstGen.zig:1043-1060).
|
|
case AST_NODE_FOR_SIMPLE:
|
|
case AST_NODE_FOR:
|
|
return forExpr(gz, scope, rlBr(rl), node, false);
|
|
// Merge error sets (AstGen.zig:788-797).
|
|
case AST_NODE_MERGE_ERROR_SETS: {
|
|
uint32_t lhs = typeExpr(gz, scope, nd.lhs);
|
|
uint32_t rhs = typeExpr(gz, scope, nd.rhs);
|
|
return rvalue(gz, rl,
|
|
addPlNodeBin(gz, ZIR_INST_MERGE_ERROR_SETS, node, lhs, rhs), node);
|
|
}
|
|
// Wrapping arithmetic (AstGen.zig:751-758).
|
|
case AST_NODE_ADD_WRAP:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_ADDWRAP), node);
|
|
case AST_NODE_SUB_WRAP:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_SUBWRAP), node);
|
|
case AST_NODE_MUL_WRAP:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_MULWRAP), node);
|
|
// Saturating arithmetic (AstGen.zig:752-761).
|
|
case AST_NODE_ADD_SAT:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_ADD_SAT), node);
|
|
case AST_NODE_SUB_SAT:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_SUB_SAT), node);
|
|
case AST_NODE_MUL_SAT:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_MUL_SAT), node);
|
|
case AST_NODE_SHL_SAT:
|
|
return rvalue(
|
|
gz, rl, simpleBinOp(gz, scope, node, ZIR_INST_SHL_SAT), node);
|
|
// break (AstGen.zig:2150-2237).
|
|
case AST_NODE_BREAK: {
|
|
uint32_t opt_break_label = nd.lhs; // UINT32_MAX = none
|
|
uint32_t opt_rhs = nd.rhs; // 0 = none
|
|
|
|
// Walk scope chain to find target block (AstGen.zig:2157-2187).
|
|
for (Scope* s = scope; s != NULL;) {
|
|
if (s->tag == SCOPE_GEN_ZIR) {
|
|
GenZir* block_gz = (GenZir*)s;
|
|
uint32_t block_inst = UINT32_MAX;
|
|
if (opt_break_label != UINT32_MAX) {
|
|
// Labeled break: check label on GenZir.
|
|
// Use direct source text comparison, not identAsString,
|
|
// to avoid adding label names to string_bytes
|
|
// (AstGen.zig:2176 uses tokenIdentEql).
|
|
if (block_gz->label_token != UINT32_MAX
|
|
&& tokenIdentEql(ag->tree, opt_break_label,
|
|
block_gz->label_token)) {
|
|
block_inst = block_gz->label_block_inst;
|
|
}
|
|
} else {
|
|
// Unlabeled break: check break_block.
|
|
if (block_gz->break_block != UINT32_MAX)
|
|
block_inst = block_gz->break_block;
|
|
}
|
|
if (block_inst != UINT32_MAX) {
|
|
// Found target (AstGen.zig:2188-2228).
|
|
ZirInstTag break_tag = block_gz->is_inline
|
|
? ZIR_INST_BREAK_INLINE
|
|
: ZIR_INST_BREAK;
|
|
if (opt_rhs == 0) {
|
|
// Void break (AstGen.zig:2195-2206).
|
|
rvalue(gz, block_gz->break_result_info,
|
|
ZIR_REF_VOID_VALUE, node);
|
|
genDefers(gz, s, scope, DEFER_NORMAL_ONLY);
|
|
if (!block_gz->is_comptime) {
|
|
ZirInstData rdata;
|
|
rdata.un_node.operand
|
|
= block_inst + ZIR_REF_START_INDEX;
|
|
rdata.un_node.src_node
|
|
= (int32_t)node - (int32_t)gz->decl_node_index;
|
|
addInstruction(gz,
|
|
ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL,
|
|
rdata);
|
|
}
|
|
addBreak(gz, break_tag, block_inst, ZIR_REF_VOID_VALUE,
|
|
AST_NODE_OFFSET_NONE);
|
|
} else {
|
|
// Value break (AstGen.zig:2208-2228).
|
|
uint32_t operand = exprRl(
|
|
gz, scope, block_gz->break_result_info, opt_rhs);
|
|
genDefers(gz, s, scope, DEFER_NORMAL_ONLY);
|
|
if (!block_gz->is_comptime)
|
|
restoreErrRetIndex(gz, block_inst,
|
|
block_gz->break_result_info, opt_rhs, operand);
|
|
switch (block_gz->break_result_info.tag) {
|
|
case RL_PTR:
|
|
case RL_DISCARD:
|
|
addBreak(gz, break_tag, block_inst,
|
|
ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
|
|
break;
|
|
default:
|
|
addBreak(gz, break_tag, block_inst, operand,
|
|
(int32_t)opt_rhs
|
|
- (int32_t)gz->decl_node_index);
|
|
break;
|
|
}
|
|
}
|
|
return ZIR_REF_UNREACHABLE_VALUE;
|
|
}
|
|
s = block_gz->parent;
|
|
} else if (s->tag == SCOPE_LOCAL_VAL) {
|
|
s = ((ScopeLocalVal*)s)->parent;
|
|
} else if (s->tag == SCOPE_LOCAL_PTR) {
|
|
s = ((ScopeLocalPtr*)s)->parent;
|
|
} else if (s->tag == SCOPE_DEFER_NORMAL
|
|
|| s->tag == SCOPE_DEFER_ERROR) {
|
|
s = ((ScopeDefer*)s)->parent;
|
|
} else if (s->tag == SCOPE_LABEL) {
|
|
s = ((ScopeLabel*)s)->parent;
|
|
} else {
|
|
break;
|
|
}
|
|
}
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_UNREACHABLE_VALUE;
|
|
}
|
|
// continue (AstGen.zig:2246-2340).
|
|
case AST_NODE_CONTINUE: {
|
|
// Walk scope chain to find GenZir with continue_block.
|
|
for (Scope* s = scope; s != NULL;) {
|
|
if (s->tag == SCOPE_GEN_ZIR) {
|
|
GenZir* gz2 = (GenZir*)s;
|
|
if (gz2->continue_block != UINT32_MAX) {
|
|
genDefers(gz, s, scope, DEFER_NORMAL_ONLY);
|
|
ZirInstTag break_tag = gz2->is_inline
|
|
? ZIR_INST_BREAK_INLINE
|
|
: ZIR_INST_BREAK;
|
|
if (break_tag == ZIR_INST_BREAK_INLINE) {
|
|
// AstGen.zig:2328-2330.
|
|
addUnNode(gz, ZIR_INST_CHECK_COMPTIME_CONTROL_FLOW,
|
|
gz2->continue_block + ZIR_REF_START_INDEX, node);
|
|
}
|
|
// Restore error return index (AstGen.zig:2333-2334).
|
|
if (!gz2->is_comptime) {
|
|
ZirInstData rdata;
|
|
rdata.un_node.operand
|
|
= gz2->continue_block + ZIR_REF_START_INDEX;
|
|
rdata.un_node.src_node
|
|
= (int32_t)node - (int32_t)gz->decl_node_index;
|
|
addInstruction(gz,
|
|
ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL,
|
|
rdata);
|
|
}
|
|
addBreak(gz, break_tag, gz2->continue_block,
|
|
ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
|
|
return ZIR_REF_UNREACHABLE_VALUE;
|
|
}
|
|
s = gz2->parent;
|
|
} else if (s->tag == SCOPE_LOCAL_VAL) {
|
|
s = ((ScopeLocalVal*)s)->parent;
|
|
} else if (s->tag == SCOPE_LOCAL_PTR) {
|
|
s = ((ScopeLocalPtr*)s)->parent;
|
|
} else if (s->tag == SCOPE_DEFER_NORMAL
|
|
|| s->tag == SCOPE_DEFER_ERROR) {
|
|
s = ((ScopeDefer*)s)->parent;
|
|
} else if (s->tag == SCOPE_LABEL) {
|
|
s = ((ScopeLabel*)s)->parent;
|
|
} else {
|
|
break;
|
|
}
|
|
}
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_UNREACHABLE_VALUE;
|
|
}
|
|
// comptime (AstGen.zig:1104-1105).
|
|
case AST_NODE_COMPTIME: {
|
|
// comptimeExprAst / comptimeExpr2 (AstGen.zig:2104, 1982).
|
|
uint32_t body_node = nd.lhs;
|
|
|
|
// If already comptime, just pass through (AstGen.zig:1990-1992).
|
|
if (gz->is_comptime)
|
|
return exprRl(gz, scope, rl, body_node);
|
|
|
|
// Create comptime block (AstGen.zig:2078-2098).
|
|
uint32_t block_inst
|
|
= makeBlockInst(ag, ZIR_INST_BLOCK_COMPTIME, gz, node);
|
|
GenZir block_scope = makeSubBlock(gz, scope);
|
|
block_scope.is_comptime = true;
|
|
|
|
// Transform RL to type-only (AstGen.zig:2084-2090).
|
|
// Runtime-to-comptime boundary: can't pass runtime pointers.
|
|
ResultLoc ty_only_rl;
|
|
uint32_t res_ty = rlResultType(gz, rl, node);
|
|
if (res_ty != 0)
|
|
ty_only_rl = (ResultLoc) { .tag = RL_COERCED_TY,
|
|
.data = res_ty,
|
|
.src_node = 0,
|
|
.ctx = rl.ctx };
|
|
else
|
|
ty_only_rl = (ResultLoc) {
|
|
.tag = RL_NONE, .data = 0, .src_node = 0, .ctx = rl.ctx
|
|
};
|
|
|
|
uint32_t result = exprRl(&block_scope, scope, ty_only_rl, body_node);
|
|
addBreak(&block_scope, ZIR_INST_BREAK_INLINE, block_inst, result,
|
|
AST_NODE_OFFSET_NONE);
|
|
setBlockComptimeBody(
|
|
ag, &block_scope, block_inst, COMPTIME_REASON_COMPTIME_KEYWORD);
|
|
gzAppendInstruction(gz, block_inst);
|
|
|
|
// Apply rvalue to handle RL_PTR etc (AstGen.zig:2098).
|
|
return rvalue(gz, rl, block_inst + ZIR_REF_START_INDEX, node);
|
|
}
|
|
// switch (AstGen.zig:1072-1078).
|
|
case AST_NODE_SWITCH:
|
|
case AST_NODE_SWITCH_COMMA:
|
|
return switchExpr(gz, scope, rlBr(rl), node);
|
|
// while (AstGen.zig:1037-1042).
|
|
case AST_NODE_WHILE_SIMPLE:
|
|
case AST_NODE_WHILE_CONT:
|
|
case AST_NODE_WHILE:
|
|
return whileExpr(gz, scope, rlBr(rl), node, false);
|
|
// error_value (AstGen.zig:1005).
|
|
case AST_NODE_ERROR_VALUE: {
|
|
uint32_t error_token = ag->tree->nodes.main_tokens[node] + 2;
|
|
uint32_t str = identAsString(ag, error_token);
|
|
return rvalue(gz, rl,
|
|
addStrTok(gz, ZIR_INST_ERROR_VALUE, str, error_token), node);
|
|
}
|
|
// error_set_decl (AstGen.zig:5905-5955).
|
|
case AST_NODE_ERROR_SET_DECL: {
|
|
AstData esd = ag->tree->nodes.datas[node];
|
|
uint32_t lbrace = esd.lhs;
|
|
uint32_t rbrace = esd.rhs;
|
|
// Reserve 1 extra word for ErrorSetDecl.fields_len.
|
|
ensureExtraCapacity(ag, 1 + (rbrace - lbrace));
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra_len++; // placeholder for fields_len
|
|
uint32_t fields_len = 0;
|
|
for (uint32_t tok = lbrace + 1; tok < rbrace; tok++) {
|
|
TokenizerTag ttag = ag->tree->tokens.tags[tok];
|
|
if (ttag == TOKEN_DOC_COMMENT || ttag == TOKEN_COMMA)
|
|
continue;
|
|
if (ttag == TOKEN_IDENTIFIER) {
|
|
uint32_t str_index = identAsString(ag, tok);
|
|
ensureExtraCapacity(ag, 1);
|
|
ag->extra[ag->extra_len++] = str_index;
|
|
fields_len++;
|
|
}
|
|
}
|
|
ag->extra[payload_index] = fields_len;
|
|
return rvalue(gz, rl,
|
|
addPlNodePayloadIndex(
|
|
gz, ZIR_INST_ERROR_SET_DECL, node, payload_index),
|
|
node);
|
|
}
|
|
// assign in expr context (AstGen.zig:1011-1014).
|
|
case AST_NODE_ASSIGN:
|
|
assignStmt(gz, scope, node);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
// Compound assignment operators (AstGen.zig:689-744).
|
|
case AST_NODE_ASSIGN_ADD:
|
|
assignOp(gz, scope, node, ZIR_INST_ADD);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
case AST_NODE_ASSIGN_SUB:
|
|
assignOp(gz, scope, node, ZIR_INST_SUB);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
case AST_NODE_ASSIGN_MUL:
|
|
assignOp(gz, scope, node, ZIR_INST_MUL);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
case AST_NODE_ASSIGN_DIV:
|
|
assignOp(gz, scope, node, ZIR_INST_DIV);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
case AST_NODE_ASSIGN_MOD:
|
|
assignOp(gz, scope, node, ZIR_INST_MOD_REM);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
case AST_NODE_ASSIGN_BIT_AND:
|
|
assignOp(gz, scope, node, ZIR_INST_BIT_AND);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
case AST_NODE_ASSIGN_BIT_OR:
|
|
assignOp(gz, scope, node, ZIR_INST_BIT_OR);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
case AST_NODE_ASSIGN_BIT_XOR:
|
|
assignOp(gz, scope, node, ZIR_INST_XOR);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
case AST_NODE_ASSIGN_ADD_WRAP:
|
|
assignOp(gz, scope, node, ZIR_INST_ADDWRAP);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
case AST_NODE_ASSIGN_SUB_WRAP:
|
|
assignOp(gz, scope, node, ZIR_INST_SUBWRAP);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
case AST_NODE_ASSIGN_MUL_WRAP:
|
|
assignOp(gz, scope, node, ZIR_INST_MULWRAP);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
case AST_NODE_ASSIGN_ADD_SAT:
|
|
assignOp(gz, scope, node, ZIR_INST_ADD_SAT);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
case AST_NODE_ASSIGN_SUB_SAT:
|
|
assignOp(gz, scope, node, ZIR_INST_SUB_SAT);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
case AST_NODE_ASSIGN_MUL_SAT:
|
|
assignOp(gz, scope, node, ZIR_INST_MUL_SAT);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
// Shift assignment operators (AstGen.zig:676-687).
|
|
case AST_NODE_ASSIGN_SHL:
|
|
assignShift(gz, scope, node, ZIR_INST_SHL);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
case AST_NODE_ASSIGN_SHR:
|
|
assignShift(gz, scope, node, ZIR_INST_SHR);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
case AST_NODE_ASSIGN_SHL_SAT:
|
|
assignShiftSat(gz, scope, node);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
// assign_destructure (AstGen.zig:669-674).
|
|
case AST_NODE_ASSIGN_DESTRUCTURE:
|
|
assignDestructure(gz, scope, node);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
default:
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
}
|
|
|
|
static uint32_t expr(GenZir* gz, Scope* scope, uint32_t node) {
|
|
return exprRl(gz, scope, RL_NONE_VAL, node);
|
|
}
|
|
|
|
// --- blockExprExpr (AstGen.zig:2388-2536) ---
|
|
// Handles block expressions (labeled and unlabeled).
|
|
// Unlabeled blocks just execute statements and return void.
|
|
// Labeled blocks (blk: { ... break :blk val; }) need a block instruction.
|
|
|
|
static uint32_t blockExprExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
AstNodeTag tag = tree->nodes.tags[node];
|
|
AstData nd = tree->nodes.datas[node];
|
|
|
|
// Extract statements.
|
|
uint32_t stmt_buf[2];
|
|
const uint32_t* statements = NULL;
|
|
uint32_t stmt_count = 0;
|
|
|
|
switch (tag) {
|
|
case AST_NODE_BLOCK_TWO:
|
|
case AST_NODE_BLOCK_TWO_SEMICOLON: {
|
|
uint32_t idx = 0;
|
|
if (nd.lhs != 0)
|
|
stmt_buf[idx++] = nd.lhs;
|
|
if (nd.rhs != 0)
|
|
stmt_buf[idx++] = nd.rhs;
|
|
statements = stmt_buf;
|
|
stmt_count = idx;
|
|
break;
|
|
}
|
|
case AST_NODE_BLOCK:
|
|
case AST_NODE_BLOCK_SEMICOLON: {
|
|
uint32_t start = nd.lhs;
|
|
uint32_t end = nd.rhs;
|
|
statements = tree->extra_data.arr + start;
|
|
stmt_count = end - start;
|
|
break;
|
|
}
|
|
default:
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
// Check if labeled (AstGen.zig:2397-2402).
|
|
// A labeled block has: identifier colon before the lbrace.
|
|
uint32_t lbrace = tree->nodes.main_tokens[node];
|
|
bool is_labeled
|
|
= (lbrace >= 2 && tree->tokens.tags[lbrace - 1] == TOKEN_COLON
|
|
&& tree->tokens.tags[lbrace - 2] == TOKEN_IDENTIFIER);
|
|
|
|
if (!is_labeled) {
|
|
if (!gz->is_comptime) {
|
|
// Non-comptime unlabeled block (AstGen.zig:2404-2425).
|
|
// Create block_inst FIRST, add to gz, then process body.
|
|
uint32_t block_inst = makeBlockInst(ag, ZIR_INST_BLOCK, gz, node);
|
|
gzAppendInstruction(gz, block_inst);
|
|
|
|
GenZir block_scope = makeSubBlock(gz, scope);
|
|
blockExprStmts(
|
|
&block_scope, &block_scope.base, statements, stmt_count);
|
|
|
|
if (!endsWithNoReturn(&block_scope)) {
|
|
// restore_err_ret_index on gz (AstGen.zig:2420).
|
|
ZirInstData rdata;
|
|
rdata.un_node.operand = block_inst + ZIR_REF_START_INDEX;
|
|
rdata.un_node.src_node
|
|
= (int32_t)node - (int32_t)gz->decl_node_index;
|
|
addInstruction(
|
|
gz, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata);
|
|
// break on block_scope (AstGen.zig:2422).
|
|
addBreak(&block_scope, ZIR_INST_BREAK, block_inst,
|
|
ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
|
|
}
|
|
setBlockBody(ag, &block_scope, block_inst);
|
|
} else {
|
|
// Comptime unlabeled block: inline statements
|
|
// (AstGen.zig:2426-2429).
|
|
GenZir sub_gz = makeSubBlock(gz, scope);
|
|
blockExprStmts(&sub_gz, &sub_gz.base, statements, stmt_count);
|
|
}
|
|
// AstGen.zig:2431
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
}
|
|
|
|
// Labeled block (AstGen.zig:2466-2536).
|
|
// Note: upstream blockExpr always passes force_comptime=false.
|
|
uint32_t label_token = lbrace - 2;
|
|
|
|
// Compute break result info (AstGen.zig:2484-2492).
|
|
bool need_rl = nodesNeedRlContains(ag, node);
|
|
ResultLoc break_ri = breakResultInfo(gz, rl, node, need_rl);
|
|
bool need_result_rvalue = (break_ri.tag != rl.tag);
|
|
|
|
// Reserve the block instruction (AstGen.zig:2500-2501).
|
|
uint32_t block_inst = makeBlockInst(ag, ZIR_INST_BLOCK, gz, node);
|
|
gzAppendInstruction(gz, block_inst);
|
|
|
|
GenZir block_scope = makeSubBlock(gz, scope);
|
|
// Set label on block_scope (AstGen.zig:2504-2508).
|
|
block_scope.label_token = label_token;
|
|
block_scope.label_block_inst = block_inst;
|
|
block_scope.break_result_info = break_ri;
|
|
|
|
// Process statements (AstGen.zig:2512).
|
|
blockExprStmts(&block_scope, &block_scope.base, statements, stmt_count);
|
|
|
|
if (!endsWithNoReturn(&block_scope)) {
|
|
// Emit restore_err_ret_index (AstGen.zig:2515).
|
|
ZirInstData rdata;
|
|
rdata.un_node.operand = block_inst + ZIR_REF_START_INDEX;
|
|
rdata.un_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
addInstruction(
|
|
gz, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata);
|
|
// rvalue + break (AstGen.zig:2516-2518).
|
|
uint32_t result = rvalue(
|
|
gz, block_scope.break_result_info, ZIR_REF_VOID_VALUE, node);
|
|
addBreak(&block_scope, ZIR_INST_BREAK, block_inst, result,
|
|
AST_NODE_OFFSET_NONE);
|
|
}
|
|
|
|
setBlockBody(ag, &block_scope, block_inst);
|
|
|
|
// AstGen.zig:2531-2534.
|
|
if (need_result_rvalue)
|
|
return rvalue(gz, rl, block_inst + ZIR_REF_START_INDEX, node);
|
|
return block_inst + ZIR_REF_START_INDEX;
|
|
}
|
|
|
|
// --- arrayInitDotExpr (AstGen.zig:1576-1595) ---
|
|
// Handles anonymous array init: `.{a, b, c}`.
|
|
// Emits array_init_anon instruction with MultiOp payload.
|
|
|
|
static uint32_t arrayInitDotExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
AstNodeTag tag = tree->nodes.tags[node];
|
|
AstData nd = tree->nodes.datas[node];
|
|
|
|
// Extract elements.
|
|
uint32_t elem_buf[2];
|
|
const uint32_t* elements = NULL;
|
|
uint32_t elem_count = 0;
|
|
|
|
switch (tag) {
|
|
case AST_NODE_ARRAY_INIT_DOT_TWO:
|
|
case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA: {
|
|
uint32_t idx = 0;
|
|
if (nd.lhs != 0)
|
|
elem_buf[idx++] = nd.lhs;
|
|
if (nd.rhs != 0)
|
|
elem_buf[idx++] = nd.rhs;
|
|
elements = elem_buf;
|
|
elem_count = idx;
|
|
break;
|
|
}
|
|
case AST_NODE_ARRAY_INIT_DOT:
|
|
case AST_NODE_ARRAY_INIT_DOT_COMMA: {
|
|
uint32_t start = nd.lhs;
|
|
uint32_t end = nd.rhs;
|
|
elements = tree->extra_data.arr + start;
|
|
elem_count = end - start;
|
|
break;
|
|
}
|
|
default:
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
// Dispatch based on RL (AstGen.zig:1515-1572).
|
|
switch (rl.tag) {
|
|
case RL_NONE: {
|
|
// arrayInitExprAnon (AstGen.zig:1576-1595).
|
|
ensureExtraCapacity(ag, 1 + elem_count);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = elem_count;
|
|
uint32_t extra_start = ag->extra_len;
|
|
ag->extra_len += elem_count;
|
|
for (uint32_t i = 0; i < elem_count; i++) {
|
|
uint32_t elem_ref = expr(gz, scope, elements[i]);
|
|
ag->extra[extra_start + i] = elem_ref;
|
|
}
|
|
return addPlNodePayloadIndex(
|
|
gz, ZIR_INST_ARRAY_INIT_ANON, node, payload_index);
|
|
}
|
|
case RL_TY:
|
|
case RL_COERCED_TY: {
|
|
// validate_array_init_result_ty + arrayInitExprTyped
|
|
// (AstGen.zig:1534-1539).
|
|
uint32_t result_ty = rl.data;
|
|
// Emit ArrayInit { ty, init_count } payload for
|
|
// validate_array_init_result_ty.
|
|
ensureExtraCapacity(ag, 2);
|
|
uint32_t val_payload = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = result_ty;
|
|
ag->extra[ag->extra_len++] = elem_count;
|
|
addPlNodePayloadIndex(
|
|
gz, ZIR_INST_VALIDATE_ARRAY_INIT_RESULT_TY, node, val_payload);
|
|
|
|
// arrayInitExprTyped (AstGen.zig:1598-1642) with elem_ty=none.
|
|
uint32_t operands_len = elem_count + 1; // +1 for type
|
|
ensureExtraCapacity(ag, 1 + operands_len);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = operands_len;
|
|
ag->extra[ag->extra_len++] = result_ty;
|
|
uint32_t extra_start = ag->extra_len;
|
|
ag->extra_len += elem_count;
|
|
for (uint32_t i = 0; i < elem_count; i++) {
|
|
// array_init_elem_type uses bin data (AstGen.zig:1626-1632).
|
|
uint32_t elem_ty
|
|
= addBin(gz, ZIR_INST_ARRAY_INIT_ELEM_TYPE, result_ty, i);
|
|
ResultLoc elem_rl
|
|
= { .tag = RL_COERCED_TY, .data = elem_ty, .src_node = 0 };
|
|
uint32_t elem_ref = exprRl(gz, scope, elem_rl, elements[i]);
|
|
ag->extra[extra_start + i] = elem_ref;
|
|
}
|
|
return addPlNodePayloadIndex(
|
|
gz, ZIR_INST_ARRAY_INIT, node, payload_index);
|
|
}
|
|
case RL_INFERRED_PTR: {
|
|
// arrayInitExprAnon + rvalue (AstGen.zig:1545-1551).
|
|
ensureExtraCapacity(ag, 1 + elem_count);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = elem_count;
|
|
uint32_t extra_start = ag->extra_len;
|
|
ag->extra_len += elem_count;
|
|
for (uint32_t i = 0; i < elem_count; i++) {
|
|
uint32_t elem_ref = expr(gz, scope, elements[i]);
|
|
ag->extra[extra_start + i] = elem_ref;
|
|
}
|
|
uint32_t result = addPlNodePayloadIndex(
|
|
gz, ZIR_INST_ARRAY_INIT_ANON, node, payload_index);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
case RL_DISCARD: {
|
|
// Evaluate and discard each element (AstGen.zig:1517-1522).
|
|
for (uint32_t i = 0; i < elem_count; i++) {
|
|
exprRl(gz, scope, RL_DISCARD_VAL, elements[i]);
|
|
}
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
case RL_REF: {
|
|
// arrayInitExprAnon + ref (AstGen.zig:1523-1526).
|
|
ensureExtraCapacity(ag, 1 + elem_count);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = elem_count;
|
|
uint32_t extra_start = ag->extra_len;
|
|
ag->extra_len += elem_count;
|
|
for (uint32_t i = 0; i < elem_count; i++) {
|
|
uint32_t elem_ref = expr(gz, scope, elements[i]);
|
|
ag->extra[extra_start + i] = elem_ref;
|
|
}
|
|
uint32_t result = addPlNodePayloadIndex(
|
|
gz, ZIR_INST_ARRAY_INIT_ANON, node, payload_index);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
case RL_REF_COERCED_TY: {
|
|
// validate_array_init_ref_ty + arrayInitExprTyped
|
|
// (AstGen.zig:1527-1532).
|
|
uint32_t ptr_ty_inst = rl.data;
|
|
ensureExtraCapacity(ag, 2);
|
|
uint32_t val_payload = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = ptr_ty_inst;
|
|
ag->extra[ag->extra_len++] = elem_count;
|
|
uint32_t dest_arr_ty_inst = addPlNodePayloadIndex(
|
|
gz, ZIR_INST_VALIDATE_ARRAY_INIT_REF_TY, node, val_payload);
|
|
|
|
// arrayInitExprTyped with elem_ty=none, is_ref=true.
|
|
uint32_t operands_len = elem_count + 1;
|
|
ensureExtraCapacity(ag, 1 + operands_len);
|
|
uint32_t ai_payload = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = operands_len;
|
|
ag->extra[ag->extra_len++] = dest_arr_ty_inst;
|
|
uint32_t extra_start2 = ag->extra_len;
|
|
ag->extra_len += elem_count;
|
|
for (uint32_t i = 0; i < elem_count; i++) {
|
|
// array_init_elem_type uses bin data (AstGen.zig:1626-1632).
|
|
uint32_t elem_ty = addBin(
|
|
gz, ZIR_INST_ARRAY_INIT_ELEM_TYPE, dest_arr_ty_inst, i);
|
|
ResultLoc elem_rl
|
|
= { .tag = RL_COERCED_TY, .data = elem_ty, .src_node = 0 };
|
|
uint32_t elem_ref = exprRl(gz, scope, elem_rl, elements[i]);
|
|
ag->extra[extra_start2 + i] = elem_ref;
|
|
}
|
|
return addPlNodePayloadIndex(
|
|
gz, ZIR_INST_ARRAY_INIT_REF, node, ai_payload);
|
|
}
|
|
case RL_PTR: {
|
|
// arrayInitExprPtr (AstGen.zig:1541-1543, 1645-1672).
|
|
uint32_t array_ptr_inst
|
|
= addUnNode(gz, ZIR_INST_OPT_EU_BASE_PTR_INIT, rl.data, node);
|
|
// Block payload: body_len = elem_count.
|
|
ensureExtraCapacity(ag, 1 + elem_count);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = elem_count;
|
|
uint32_t items_start = ag->extra_len;
|
|
ag->extra_len += elem_count;
|
|
|
|
for (uint32_t i = 0; i < elem_count; i++) {
|
|
// array_init_elem_ptr: ElemPtrImm{ptr, index}.
|
|
uint32_t elem_ptr_inst = addPlNodeBin(gz,
|
|
ZIR_INST_ARRAY_INIT_ELEM_PTR, elements[i], array_ptr_inst, i);
|
|
ag->extra[items_start + i]
|
|
= elem_ptr_inst - ZIR_REF_START_INDEX; // .toIndex()
|
|
// Evaluate element with ptr RL (AstGen.zig:1668).
|
|
// Upstream creates fresh ResultInfo with default ctx (.none).
|
|
ResultLoc ptr_rl = { .tag = RL_PTR,
|
|
.data = elem_ptr_inst,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
exprRl(gz, scope, ptr_rl, elements[i]);
|
|
}
|
|
addPlNodePayloadIndex(
|
|
gz, ZIR_INST_VALIDATE_PTR_ARRAY_INIT, node, payload_index);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
case RL_DESTRUCTURE: {
|
|
// Destructure directly into result pointers (AstGen.zig:1552-1569).
|
|
if (elem_count != rl.components_len) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
for (uint32_t i = 0; i < elem_count; i++) {
|
|
const DestructureComponent* comp = &rl.components[i];
|
|
ResultLoc elem_rl;
|
|
switch (comp->tag) {
|
|
case DC_TYPED_PTR:
|
|
elem_rl = (ResultLoc) { .tag = RL_PTR,
|
|
.data = comp->inst,
|
|
.src_node = comp->src_node,
|
|
.ctx = RI_CTX_NONE };
|
|
break;
|
|
case DC_INFERRED_PTR:
|
|
elem_rl = (ResultLoc) { .tag = RL_INFERRED_PTR,
|
|
.data = comp->inst,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
break;
|
|
case DC_DISCARD:
|
|
elem_rl = RL_DISCARD_VAL;
|
|
break;
|
|
}
|
|
exprRl(gz, scope, elem_rl, elements[i]);
|
|
}
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
}
|
|
|
|
// Fallback: anon init + rvalue.
|
|
ensureExtraCapacity(ag, 1 + elem_count);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = elem_count;
|
|
uint32_t extra_start = ag->extra_len;
|
|
ag->extra_len += elem_count;
|
|
for (uint32_t i = 0; i < elem_count; i++) {
|
|
uint32_t elem_ref = expr(gz, scope, elements[i]);
|
|
ag->extra[extra_start + i] = elem_ref;
|
|
}
|
|
uint32_t result = addPlNodePayloadIndex(
|
|
gz, ZIR_INST_ARRAY_INIT_ANON, node, payload_index);
|
|
return rvalue(gz, rl, result, node);
|
|
}
|
|
|
|
// --- ifExpr (AstGen.zig:6300-6528) ---
|
|
// Handles if and if_simple expressions.
|
|
// Pattern: block_scope with condbr → then/else branches → setCondBrPayload.
|
|
|
|
static uint32_t ifExpr(GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
bool need_rl = nodesNeedRlContains(ag, node);
|
|
ResultLoc break_rl = breakResultInfo(gz, rl, node, need_rl);
|
|
AstNodeTag tag = tree->nodes.tags[node];
|
|
AstData nd = tree->nodes.datas[node];
|
|
|
|
uint32_t cond_node = nd.lhs;
|
|
uint32_t then_node, else_node;
|
|
|
|
if (tag == AST_NODE_IF_SIMPLE) {
|
|
then_node = nd.rhs;
|
|
else_node = 0;
|
|
} else {
|
|
// AST_NODE_IF: rhs is index into extra -> If{then_expr, else_expr}
|
|
then_node = tree->extra_data.arr[nd.rhs];
|
|
else_node = tree->extra_data.arr[nd.rhs + 1];
|
|
}
|
|
|
|
// Detect payload capture: if (cond) |x| (AstGen.zig Ast.fullIf).
|
|
// payload_pipe = lastToken(cond_expr) + 2; if pipe -> payload_token + 1.
|
|
uint32_t payload_token = 0; // 0 = no payload
|
|
uint32_t last_cond_tok = lastToken(tree, cond_node);
|
|
uint32_t pipe_tok = last_cond_tok + 2;
|
|
if (pipe_tok < tree->tokens.len
|
|
&& tree->tokens.tags[pipe_tok] == TOKEN_PIPE) {
|
|
payload_token = pipe_tok + 1; // identifier or * token
|
|
}
|
|
|
|
// Detect error token: then_expr else |e| (AstGen.zig Ast.fullIf).
|
|
uint32_t error_token = 0;
|
|
if (else_node != 0) {
|
|
uint32_t else_tok = lastToken(tree, then_node) + 1; // "else" keyword
|
|
if (else_tok + 1 < tree->tokens.len
|
|
&& tree->tokens.tags[else_tok + 1] == TOKEN_PIPE) {
|
|
error_token = else_tok + 2;
|
|
}
|
|
}
|
|
|
|
// Detect payload_is_ref: if payload_token is '*' (AstGen.zig:6330-6333).
|
|
bool payload_is_ref = (payload_token != 0
|
|
&& tree->tokens.tags[payload_token] == TOKEN_ASTERISK);
|
|
|
|
// Create block_scope (AstGen.zig:6326-6328).
|
|
GenZir block_scope = makeSubBlock(gz, scope);
|
|
|
|
// Emit DBG_STMT for condition (AstGen.zig:6335).
|
|
// NOTE: upstream emits into parent_gz AFTER block_scope is created,
|
|
// so the dbg_stmt ends up in block_scope's range (shared array).
|
|
emitDbgNode(gz, cond_node);
|
|
|
|
// Evaluate condition (AstGen.zig:6336-6363).
|
|
uint32_t cond_inst; // the value (optional/err-union/bool)
|
|
uint32_t bool_bit; // the boolean for condbr
|
|
if (error_token != 0) {
|
|
// Error union condition: if (err_union) |val| else |err|.
|
|
// (AstGen.zig:6340-6347).
|
|
ResultLoc cond_rl = payload_is_ref ? RL_REF_VAL : RL_NONE_VAL;
|
|
cond_rl.ctx = RI_CTX_ERROR_HANDLING_EXPR;
|
|
cond_inst
|
|
= exprRl(&block_scope, &block_scope.base, cond_rl, cond_node);
|
|
ZirInstTag cond_tag
|
|
= payload_is_ref ? ZIR_INST_IS_NON_ERR_PTR : ZIR_INST_IS_NON_ERR;
|
|
bool_bit = addUnNode(&block_scope, cond_tag, cond_inst, cond_node);
|
|
} else if (payload_token != 0) {
|
|
// Optional condition: if (optional) |val| (AstGen.zig:6348-6355).
|
|
ResultLoc cond_rl = payload_is_ref ? RL_REF_VAL : RL_NONE_VAL;
|
|
cond_inst
|
|
= exprRl(&block_scope, &block_scope.base, cond_rl, cond_node);
|
|
ZirInstTag cond_tag
|
|
= payload_is_ref ? ZIR_INST_IS_NON_NULL_PTR : ZIR_INST_IS_NON_NULL;
|
|
bool_bit = addUnNode(&block_scope, cond_tag, cond_inst, cond_node);
|
|
} else {
|
|
// Bool condition (AstGen.zig:6356-6362).
|
|
ResultLoc coerced_bool_ri = {
|
|
.tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_BOOL_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE,
|
|
};
|
|
cond_inst = exprRl(
|
|
&block_scope, &block_scope.base, coerced_bool_ri, cond_node);
|
|
bool_bit = cond_inst;
|
|
}
|
|
|
|
uint32_t condbr = addCondBr(&block_scope, ZIR_INST_CONDBR, node);
|
|
uint32_t block_inst = makeBlockInst(ag, ZIR_INST_BLOCK, gz, node);
|
|
setBlockBody(ag, &block_scope, block_inst);
|
|
gzAppendInstruction(gz, block_inst);
|
|
|
|
// Then branch (AstGen.zig:6372-6441).
|
|
GenZir then_scope = makeSubBlock(gz, scope);
|
|
Scope* then_sub_scope = &then_scope.base;
|
|
ScopeLocalVal payload_val_scope;
|
|
memset(&payload_val_scope, 0, sizeof(payload_val_scope));
|
|
|
|
if (error_token != 0 && payload_token != 0) {
|
|
// Error union with payload: unwrap payload (AstGen.zig:6379-6403).
|
|
ZirInstTag unwrap_tag = payload_is_ref
|
|
? ZIR_INST_ERR_UNION_PAYLOAD_UNSAFE_PTR
|
|
: ZIR_INST_ERR_UNION_PAYLOAD_UNSAFE;
|
|
uint32_t payload_inst
|
|
= addUnNode(&then_scope, unwrap_tag, cond_inst, then_node);
|
|
uint32_t name_token = payload_token + (payload_is_ref ? 1u : 0u);
|
|
if (tokenIsUnderscore(tree, name_token)) {
|
|
// Discard (AstGen.zig:6389-6391).
|
|
if (payload_is_ref) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
// then_sub_scope stays as &then_scope.base
|
|
} else {
|
|
uint32_t ident_name = identAsString(ag, name_token);
|
|
payload_val_scope = (ScopeLocalVal) {
|
|
.base = { .tag = SCOPE_LOCAL_VAL },
|
|
.parent = &then_scope.base,
|
|
.gen_zir = &then_scope,
|
|
.inst = payload_inst,
|
|
.token_src = name_token,
|
|
.name = ident_name,
|
|
};
|
|
addDbgVar(
|
|
&then_scope, ZIR_INST_DBG_VAR_VAL, ident_name, payload_inst);
|
|
then_sub_scope = &payload_val_scope.base;
|
|
}
|
|
} else if (error_token != 0) {
|
|
// Error union without payload: ensure payload is void
|
|
// (AstGen.zig:6404-6406).
|
|
addUnNode(&then_scope, ZIR_INST_ENSURE_ERR_UNION_PAYLOAD_VOID,
|
|
cond_inst, node);
|
|
} else if (payload_token != 0) {
|
|
// Optional with payload: unwrap optional (AstGen.zig:6408-6431).
|
|
uint32_t ident_token = payload_token + (payload_is_ref ? 1u : 0u);
|
|
if (tokenIsUnderscore(tree, ident_token)) {
|
|
// Discard (AstGen.zig:6415-6417).
|
|
if (payload_is_ref) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
// then_sub_scope stays as &then_scope.base
|
|
} else {
|
|
ZirInstTag unwrap_tag = payload_is_ref
|
|
? ZIR_INST_OPTIONAL_PAYLOAD_UNSAFE_PTR
|
|
: ZIR_INST_OPTIONAL_PAYLOAD_UNSAFE;
|
|
uint32_t payload_inst
|
|
= addUnNode(&then_scope, unwrap_tag, cond_inst, then_node);
|
|
uint32_t ident_name = identAsString(ag, ident_token);
|
|
payload_val_scope = (ScopeLocalVal) {
|
|
.base = { .tag = SCOPE_LOCAL_VAL },
|
|
.parent = &then_scope.base,
|
|
.gen_zir = &then_scope,
|
|
.inst = payload_inst,
|
|
.token_src = ident_token,
|
|
.name = ident_name,
|
|
};
|
|
addDbgVar(
|
|
&then_scope, ZIR_INST_DBG_VAR_VAL, ident_name, payload_inst);
|
|
then_sub_scope = &payload_val_scope.base;
|
|
}
|
|
}
|
|
|
|
// Use fullBodyExpr for then body (AstGen.zig:6437).
|
|
uint32_t then_result
|
|
= fullBodyExpr(&then_scope, then_sub_scope, break_rl, then_node);
|
|
if (!endsWithNoReturn(&then_scope)) {
|
|
addBreak(&then_scope, ZIR_INST_BREAK, block_inst, then_result,
|
|
(int32_t)then_node - (int32_t)gz->decl_node_index);
|
|
}
|
|
|
|
// Else branch (AstGen.zig:6443-6489).
|
|
GenZir else_scope = makeSubBlock(gz, scope);
|
|
|
|
// save_err_ret_index (AstGen.zig:6448-6449).
|
|
bool do_err_trace = ag->fn_ret_ty != 0 && error_token != 0;
|
|
if (do_err_trace && nodeMayAppendToErrorTrace(tree, cond_node))
|
|
addSaveErrRetIndex(&else_scope, ZIR_REF_NONE);
|
|
|
|
if (else_node != 0) {
|
|
Scope* else_sub_scope = &else_scope.base;
|
|
ScopeLocalVal error_val_scope;
|
|
memset(&error_val_scope, 0, sizeof(error_val_scope));
|
|
|
|
if (error_token != 0) {
|
|
// Error capture: else |err| (AstGen.zig:6452-6475).
|
|
ZirInstTag err_tag = payload_is_ref ? ZIR_INST_ERR_UNION_CODE_PTR
|
|
: ZIR_INST_ERR_UNION_CODE;
|
|
uint32_t err_inst
|
|
= addUnNode(&else_scope, err_tag, cond_inst, cond_node);
|
|
// identAsString must be called before the underscore check,
|
|
// matching upstream order (AstGen.zig:6459-6462).
|
|
uint32_t err_name = identAsString(ag, error_token);
|
|
if (tokenIsUnderscore(tree, error_token)) {
|
|
// Discard |_| (AstGen.zig:6461-6462).
|
|
// else_sub_scope stays as &else_scope.base
|
|
} else {
|
|
error_val_scope = (ScopeLocalVal) {
|
|
.base = { .tag = SCOPE_LOCAL_VAL },
|
|
.parent = &else_scope.base,
|
|
.gen_zir = &else_scope,
|
|
.inst = err_inst,
|
|
.token_src = error_token,
|
|
.name = err_name,
|
|
};
|
|
addDbgVar(
|
|
&else_scope, ZIR_INST_DBG_VAR_VAL, err_name, err_inst);
|
|
else_sub_scope = &error_val_scope.base;
|
|
}
|
|
}
|
|
|
|
// Use fullBodyExpr for else body (AstGen.zig:6478).
|
|
uint32_t else_result
|
|
= fullBodyExpr(&else_scope, else_sub_scope, break_rl, else_node);
|
|
if (!endsWithNoReturn(&else_scope)) {
|
|
// Restore error return index (AstGen.zig:6480-6482).
|
|
if (do_err_trace)
|
|
restoreErrRetIndex(
|
|
&else_scope, block_inst, break_rl, else_node, else_result);
|
|
addBreak(&else_scope, ZIR_INST_BREAK, block_inst, else_result,
|
|
(int32_t)else_node - (int32_t)gz->decl_node_index);
|
|
}
|
|
} else {
|
|
// No else branch (AstGen.zig:6486-6488).
|
|
uint32_t else_result
|
|
= rvalue(&else_scope, rl, ZIR_REF_VOID_VALUE, node);
|
|
addBreak(&else_scope, ZIR_INST_BREAK, block_inst, else_result,
|
|
AST_NODE_OFFSET_NONE);
|
|
}
|
|
|
|
// Wire up condbr (AstGen.zig:6491).
|
|
setCondBrPayload(ag, condbr, bool_bit, &then_scope, &else_scope);
|
|
|
|
// AstGen.zig:6493-6497.
|
|
bool need_result_rvalue = (break_rl.tag != rl.tag);
|
|
if (need_result_rvalue)
|
|
return rvalue(gz, rl, block_inst + ZIR_REF_START_INDEX, node);
|
|
return block_inst + ZIR_REF_START_INDEX;
|
|
}
|
|
|
|
// --- forExpr (AstGen.zig:6819-7125) ---
|
|
// Handles for_simple and for (multi-input).
|
|
// Supports both indexable and for_range inputs.
|
|
|
|
#define FOR_MAX_INPUTS 16
|
|
|
|
static uint32_t forExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node, bool is_statement) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
AstData nd = tree->nodes.datas[node];
|
|
AstNodeTag node_tag = tree->nodes.tags[node];
|
|
|
|
// Detect inline keyword (AstGen.zig:6847).
|
|
uint32_t main_token = tree->nodes.main_tokens[node];
|
|
bool is_inline = (main_token > 0
|
|
&& tree->tokens.tags[main_token - 1] == TOKEN_KEYWORD_INLINE);
|
|
|
|
// Compute label_token (AstGen.zig fullForComponents:2341-2348).
|
|
uint32_t label_token = UINT32_MAX;
|
|
{
|
|
uint32_t tok_i = main_token;
|
|
if (is_inline)
|
|
tok_i = main_token - 1;
|
|
if (tok_i >= 2 && tree->tokens.tags[tok_i - 2] == TOKEN_IDENTIFIER
|
|
&& tree->tokens.tags[tok_i - 1] == TOKEN_COLON)
|
|
label_token = tok_i - 2;
|
|
}
|
|
|
|
// Compute break_rl from rl (AstGen.zig:6833-6845).
|
|
bool need_rl = nodesNeedRlContains(ag, node);
|
|
ResultLoc break_rl = breakResultInfo(gz, rl, node, need_rl);
|
|
bool need_result_rvalue = (break_rl.tag != rl.tag);
|
|
|
|
// Extract input nodes and body/else nodes.
|
|
// FOR_SIMPLE: lhs = input node, rhs = body (Ast.zig:1960-1968).
|
|
// FOR: lhs = extra_data index, rhs = packed AstFor (Ast.zig:1970-1981).
|
|
uint32_t input_nodes[FOR_MAX_INPUTS];
|
|
uint32_t num_inputs;
|
|
uint32_t body_node;
|
|
uint32_t else_node = 0;
|
|
if (node_tag == AST_NODE_FOR_SIMPLE) {
|
|
input_nodes[0] = nd.lhs;
|
|
num_inputs = 1;
|
|
body_node = nd.rhs;
|
|
} else {
|
|
uint32_t extra_idx = nd.lhs;
|
|
AstFor for_data;
|
|
memcpy(&for_data, &nd.rhs, sizeof(AstFor));
|
|
num_inputs = for_data.inputs;
|
|
if (num_inputs == 0 || num_inputs > FOR_MAX_INPUTS) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
for (uint32_t i = 0; i < num_inputs; i++)
|
|
input_nodes[i] = tree->extra_data.arr[extra_idx + i];
|
|
body_node = tree->extra_data.arr[extra_idx + num_inputs];
|
|
if (for_data.has_else)
|
|
else_node = tree->extra_data.arr[extra_idx + num_inputs + 1];
|
|
}
|
|
|
|
// Per-input arrays (AstGen.zig:6858-6862).
|
|
uint32_t indexables[FOR_MAX_INPUTS];
|
|
uint32_t lens[FOR_MAX_INPUTS][2]; // [ref0, ref1] per input
|
|
|
|
// Allocate index counter (AstGen.zig:6865-6874).
|
|
ZirInstTag alloc_tag
|
|
= is_inline ? ZIR_INST_ALLOC_COMPTIME_MUT : ZIR_INST_ALLOC;
|
|
uint32_t index_ptr = addUnNode(gz, alloc_tag, ZIR_REF_USIZE_TYPE, node);
|
|
addPlNodeBin(gz, ZIR_INST_STORE_NODE, node, index_ptr, ZIR_REF_ZERO_USIZE);
|
|
|
|
// Compute payload_token (AstGen.zig fullForComponents:2349-2350).
|
|
// payload_token = lastToken(inputs[last]) + 3 + has_comma
|
|
uint32_t last_cond_tok = lastToken(tree, input_nodes[num_inputs - 1]);
|
|
bool has_comma = (last_cond_tok + 1 < tree->tokens.len
|
|
&& tree->tokens.tags[last_cond_tok + 1] == TOKEN_COMMA);
|
|
uint32_t payload_token = last_cond_tok + 3 + (has_comma ? 1 : 0);
|
|
|
|
bool any_len_checks = false;
|
|
|
|
// Process each input (AstGen.zig:6878-6925).
|
|
uint32_t capture_token = payload_token;
|
|
for (uint32_t i = 0; i < num_inputs; i++) {
|
|
uint32_t input = input_nodes[i];
|
|
// Advance capture_token past this capture's ident (+comma).
|
|
bool capture_is_ref
|
|
= (tree->tokens.tags[capture_token] == TOKEN_ASTERISK);
|
|
uint32_t ident_tok = capture_token + (capture_is_ref ? 1u : 0u);
|
|
bool is_discard = tokenIsUnderscore(tree, ident_tok);
|
|
capture_token = ident_tok + 2; // skip ident + comma/pipe
|
|
|
|
// Diagnostic: pointer modifier invalid on discard
|
|
// (AstGen.zig:6885-6887).
|
|
if (is_discard && capture_is_ref) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
emitDbgNode(gz, input);
|
|
|
|
if (tree->nodes.tags[input] == AST_NODE_FOR_RANGE) {
|
|
// Diagnostic: cannot capture reference to range
|
|
// (AstGen.zig:6893-6895).
|
|
if (capture_is_ref) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
// Range input (AstGen.zig:6896-6916).
|
|
AstData range_nd = tree->nodes.datas[input];
|
|
uint32_t start_node = range_nd.lhs;
|
|
uint32_t end_node = range_nd.rhs;
|
|
|
|
// AstGen.zig:6897-6902: expr with .rl = .{ .ty = .usize_type }
|
|
ResultLoc usize_rl = {
|
|
.tag = RL_TY,
|
|
.data = ZIR_REF_USIZE_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE,
|
|
};
|
|
uint32_t start_val = exprRl(gz, scope, usize_rl, start_node);
|
|
|
|
uint32_t end_val = ZIR_REF_NONE;
|
|
if (end_node != 0) {
|
|
end_val = exprRl(gz, scope, usize_rl, end_node);
|
|
}
|
|
|
|
// Diagnostic: discard of unbounded counter
|
|
// (AstGen.zig:6904-6906).
|
|
if (end_val == ZIR_REF_NONE && is_discard) {
|
|
SET_ERROR(ag);
|
|
}
|
|
|
|
if (end_val == ZIR_REF_NONE) {
|
|
lens[i][0] = ZIR_REF_NONE;
|
|
lens[i][1] = ZIR_REF_NONE;
|
|
} else {
|
|
any_len_checks = true;
|
|
lens[i][0] = start_val;
|
|
lens[i][1] = end_val;
|
|
}
|
|
|
|
// Check if start is trivially zero.
|
|
bool start_is_zero = false;
|
|
if (tree->nodes.tags[start_node] == AST_NODE_NUMBER_LITERAL) {
|
|
uint32_t tok = tree->nodes.main_tokens[start_node];
|
|
uint32_t ts = tree->tokens.starts[tok];
|
|
if (tree->source[ts] == '0'
|
|
&& (ts + 1 >= tree->source_len
|
|
|| tree->source[ts + 1] < '0'
|
|
|| tree->source[ts + 1] > '9'))
|
|
start_is_zero = true;
|
|
}
|
|
indexables[i] = start_is_zero ? ZIR_REF_NONE : start_val;
|
|
} else {
|
|
// Regular indexable (AstGen.zig:6918-6923).
|
|
uint32_t indexable = expr(gz, scope, input);
|
|
any_len_checks = true;
|
|
indexables[i] = indexable;
|
|
lens[i][0] = indexable;
|
|
lens[i][1] = ZIR_REF_NONE;
|
|
}
|
|
}
|
|
|
|
// Error if no length checks exist (AstGen.zig:6927-6929).
|
|
if (!any_len_checks) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
// Emit for_len as MultiOp (AstGen.zig:6933-6942).
|
|
uint32_t len;
|
|
{
|
|
uint32_t operands_len = num_inputs * 2;
|
|
ensureExtraCapacity(ag, 1 + operands_len);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = operands_len;
|
|
for (uint32_t i = 0; i < num_inputs; i++) {
|
|
ag->extra[ag->extra_len++] = lens[i][0];
|
|
ag->extra[ag->extra_len++] = lens[i][1];
|
|
}
|
|
ZirInstData data;
|
|
data.pl_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
data.pl_node.payload_index = payload_index;
|
|
len = addInstruction(gz, ZIR_INST_FOR_LEN, data);
|
|
}
|
|
|
|
// Create loop (AstGen.zig:6944-6946).
|
|
ZirInstTag loop_tag = is_inline ? ZIR_INST_BLOCK_INLINE : ZIR_INST_LOOP;
|
|
uint32_t loop_inst = makeBlockInst(ag, loop_tag, gz, node);
|
|
// Issue 3: append loop_inst to parent_gz immediately (AstGen.zig:6946).
|
|
gzAppendInstruction(gz, loop_inst);
|
|
|
|
GenZir loop_scope = makeSubBlock(gz, scope);
|
|
loop_scope.is_inline = is_inline;
|
|
// Issue 1: set break_result_info (AstGen.zig:6950).
|
|
loop_scope.break_result_info = break_rl;
|
|
|
|
// Load index (AstGen.zig:6955-6956).
|
|
// We need to finish loop_scope later once we have the deferred refs from
|
|
// then_scope. However, the load must be removed from instructions in the
|
|
// meantime or it appears to be part of parent_gz.
|
|
uint32_t index = addUnNode(&loop_scope, ZIR_INST_LOAD, index_ptr, node);
|
|
ag->scratch_inst_len--; // pop from loop_scope (AstGen.zig:6956)
|
|
|
|
// Condition: added to cond_scope (AstGen.zig:6958-6962).
|
|
GenZir cond_scope = makeSubBlock(gz, &loop_scope.base);
|
|
uint32_t cond
|
|
= addPlNodeBin(&cond_scope, ZIR_INST_CMP_LT, node, index, len);
|
|
|
|
// Create condbr + block (AstGen.zig:6967-6974).
|
|
ZirInstTag condbr_tag
|
|
= is_inline ? ZIR_INST_CONDBR_INLINE : ZIR_INST_CONDBR;
|
|
uint32_t condbr = addCondBr(&cond_scope, condbr_tag, node);
|
|
ZirInstTag block_tag = is_inline ? ZIR_INST_BLOCK_INLINE : ZIR_INST_BLOCK;
|
|
uint32_t cond_block = makeBlockInst(ag, block_tag, &loop_scope, node);
|
|
setBlockBody(ag, &cond_scope, cond_block);
|
|
|
|
loop_scope.break_block = loop_inst;
|
|
loop_scope.continue_block = cond_block; // AstGen.zig:6974
|
|
// Issue 4: set label on loop_scope (AstGen.zig:6975-6980).
|
|
if (label_token != UINT32_MAX) {
|
|
loop_scope.label_token = label_token;
|
|
loop_scope.label_block_inst = loop_inst;
|
|
}
|
|
|
|
// Then branch: loop body (AstGen.zig:6982-7065).
|
|
GenZir then_scope = makeSubBlock(gz, &cond_scope.base);
|
|
|
|
// Set up capture scopes for all inputs (AstGen.zig:6986-7045).
|
|
ScopeLocalVal capture_scopes[FOR_MAX_INPUTS];
|
|
Scope* body_scope_parent = &then_scope.base;
|
|
{
|
|
capture_token = payload_token;
|
|
for (uint32_t i = 0; i < num_inputs; i++) {
|
|
uint32_t input = input_nodes[i];
|
|
bool capture_is_ref
|
|
= (tree->tokens.tags[capture_token] == TOKEN_ASTERISK);
|
|
uint32_t ident_tok = capture_token + (capture_is_ref ? 1u : 0u);
|
|
capture_token = ident_tok + 2;
|
|
|
|
// Check if discard (AstGen.zig:6999).
|
|
bool is_discard = tokenIsUnderscore(tree, ident_tok);
|
|
if (is_discard)
|
|
continue;
|
|
|
|
// Compute capture inst (AstGen.zig:7004-7028).
|
|
uint32_t capture_inst;
|
|
bool is_counter = (tree->nodes.tags[input] == AST_NODE_FOR_RANGE);
|
|
|
|
if (indexables[i] == ZIR_REF_NONE) {
|
|
// Start=0 counter: use index directly.
|
|
capture_inst = index;
|
|
} else if (is_counter) {
|
|
// Counter with nonzero start: add.
|
|
capture_inst = addPlNodeBin(
|
|
&then_scope, ZIR_INST_ADD, input, indexables[i], index);
|
|
} else if (capture_is_ref) {
|
|
// Indexable by ref: elem_ptr.
|
|
capture_inst = addPlNodeBin(&then_scope, ZIR_INST_ELEM_PTR,
|
|
input, indexables[i], index);
|
|
} else {
|
|
// Indexable by val: elem_val.
|
|
capture_inst = addPlNodeBin(&then_scope, ZIR_INST_ELEM_VAL,
|
|
input, indexables[i], index);
|
|
}
|
|
|
|
uint32_t name_str = identAsString(ag, ident_tok);
|
|
capture_scopes[i] = (ScopeLocalVal) {
|
|
.base = { .tag = SCOPE_LOCAL_VAL },
|
|
.parent = body_scope_parent,
|
|
.gen_zir = &then_scope,
|
|
.inst = capture_inst,
|
|
.token_src = ident_tok,
|
|
.name = name_str,
|
|
};
|
|
// AstGen.zig:7040.
|
|
addDbgVar(
|
|
&then_scope, ZIR_INST_DBG_VAR_VAL, name_str, capture_inst);
|
|
body_scope_parent = &capture_scopes[i].base;
|
|
}
|
|
}
|
|
|
|
// Execute body (AstGen.zig:7047-7048).
|
|
uint32_t then_result
|
|
= fullBodyExpr(&then_scope, body_scope_parent, RL_NONE_VAL, body_node);
|
|
addEnsureResult(&then_scope, then_result, body_node);
|
|
|
|
// dbg_stmt + dbg_empty_stmt (AstGen.zig:7052-7061).
|
|
advanceSourceCursor(ag, tree->tokens.starts[lastToken(tree, body_node)]);
|
|
emitDbgStmt(gz, ag->source_line - gz->decl_line, ag->source_column);
|
|
{
|
|
ZirInstData ext_data;
|
|
ext_data.extended.opcode = (uint16_t)ZIR_EXT_DBG_EMPTY_STMT;
|
|
ext_data.extended.small = 0;
|
|
ext_data.extended.operand = 0;
|
|
addInstruction(gz, ZIR_INST_EXTENDED, ext_data);
|
|
}
|
|
|
|
ZirInstTag break_tag = is_inline ? ZIR_INST_BREAK_INLINE : ZIR_INST_BREAK;
|
|
addBreak(&then_scope, break_tag, cond_block, ZIR_REF_VOID_VALUE,
|
|
AST_NODE_OFFSET_NONE);
|
|
|
|
// Else branch (AstGen.zig:7066-7091).
|
|
GenZir else_scope = makeSubBlock(gz, &cond_scope.base);
|
|
|
|
if (else_node != 0) {
|
|
// Issue 2: evaluate else expression (AstGen.zig:7069-7081).
|
|
// Remove continue/break blocks so that control flow applies
|
|
// to outer loops (AstGen.zig:7073-7074).
|
|
loop_scope.continue_block = UINT32_MAX;
|
|
loop_scope.break_block = UINT32_MAX;
|
|
uint32_t else_result = fullBodyExpr(&else_scope, &else_scope.base,
|
|
loop_scope.break_result_info, else_node);
|
|
if (is_statement) {
|
|
addEnsureResult(&else_scope, else_result, else_node);
|
|
}
|
|
if (!endsWithNoReturn(&else_scope)) {
|
|
addBreak(&else_scope, break_tag, loop_inst, else_result,
|
|
(int32_t)else_node - (int32_t)gz->decl_node_index);
|
|
}
|
|
} else {
|
|
// No else: break with void (AstGen.zig:7082-7085).
|
|
uint32_t void_result
|
|
= rvalue(&else_scope, rl, ZIR_REF_VOID_VALUE, node);
|
|
addBreak(&else_scope, break_tag, loop_inst, void_result,
|
|
AST_NODE_OFFSET_NONE);
|
|
}
|
|
|
|
// Issue 4: check unused label (AstGen.zig:7087-7091).
|
|
if (label_token != UINT32_MAX) {
|
|
// Note: upstream checks loop_scope.label.used; we don't track usage
|
|
// yet, so skip the "unused for loop label" error for now.
|
|
}
|
|
|
|
setCondBrPayload(ag, condbr, cond, &then_scope, &else_scope);
|
|
|
|
// then_scope and else_scope unstacked now. Resurrect loop_scope to
|
|
// finally finish it (AstGen.zig:7095-7113).
|
|
{
|
|
// Reset loop_scope instructions and re-add index + cond_block.
|
|
loop_scope.instructions_top = ag->scratch_inst_len;
|
|
gzAppendInstruction(&loop_scope, index - ZIR_REF_START_INDEX);
|
|
gzAppendInstruction(&loop_scope, cond_block);
|
|
|
|
// Increment the index variable (AstGen.zig:7100-7108).
|
|
uint32_t index_plus_one = addPlNodeBin(
|
|
&loop_scope, ZIR_INST_ADD_UNSAFE, node, index, ZIR_REF_ONE_USIZE);
|
|
addPlNodeBin(
|
|
&loop_scope, ZIR_INST_STORE_NODE, node, index_ptr, index_plus_one);
|
|
|
|
// Repeat (AstGen.zig:7110-7111).
|
|
ZirInstTag repeat_tag
|
|
= is_inline ? ZIR_INST_REPEAT_INLINE : ZIR_INST_REPEAT;
|
|
ZirInstData repeat_data;
|
|
memset(&repeat_data, 0, sizeof(repeat_data));
|
|
repeat_data.node = (int32_t)node - (int32_t)loop_scope.decl_node_index;
|
|
addInstruction(&loop_scope, repeat_tag, repeat_data);
|
|
|
|
setBlockBody(ag, &loop_scope, loop_inst);
|
|
}
|
|
|
|
// Issue 1: apply rvalue if needed (AstGen.zig:7116-7119).
|
|
uint32_t result;
|
|
if (need_result_rvalue)
|
|
result = rvalue(gz, rl, loop_inst + ZIR_REF_START_INDEX, node);
|
|
else
|
|
result = loop_inst + ZIR_REF_START_INDEX;
|
|
|
|
// Emit ensure_result_used when used as statement (AstGen.zig:7121-7123).
|
|
if (is_statement) {
|
|
addUnNode(gz, ZIR_INST_ENSURE_RESULT_USED, result, node);
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
// --- orelseCatchExpr (AstGen.zig:6031-6142) ---
|
|
// Handles `lhs orelse rhs` and `lhs catch rhs`.
|
|
|
|
static uint32_t orelseCatchExpr(GenZir* gz, Scope* scope, ResultLoc rl,
|
|
uint32_t node, ZirInstTag cond_op, ZirInstTag unwrap_op,
|
|
ZirInstTag unwrap_code_op, uint32_t payload_token) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
AstData nd = tree->nodes.datas[node];
|
|
|
|
bool do_err_trace = ag->fn_ret_ty != 0
|
|
&& (cond_op == ZIR_INST_IS_NON_ERR
|
|
|| cond_op == ZIR_INST_IS_NON_ERR_PTR);
|
|
|
|
// breakResultInfo (AstGen.zig:6046-6058).
|
|
bool need_rl = nodesNeedRlContains(ag, node);
|
|
ResultLoc break_rl = breakResultInfo(gz, rl, node, need_rl);
|
|
bool need_result_rvalue = (break_rl.tag != rl.tag);
|
|
|
|
// Create block_scope (AstGen.zig:6062-6063).
|
|
GenZir block_scope = makeSubBlock(gz, scope);
|
|
|
|
// Evaluate operand in block_scope (AstGen.zig:6066-6074).
|
|
ResultLoc operand_rl;
|
|
if (RL_IS_REF(break_rl)) {
|
|
operand_rl = RL_REF_VAL;
|
|
} else {
|
|
operand_rl = RL_NONE_VAL;
|
|
}
|
|
if (do_err_trace) {
|
|
operand_rl.ctx = RI_CTX_ERROR_HANDLING_EXPR;
|
|
}
|
|
uint32_t operand
|
|
= exprRl(&block_scope, &block_scope.base, operand_rl, nd.lhs);
|
|
|
|
// Check condition in block_scope (AstGen.zig:6075).
|
|
uint32_t condition = addUnNode(&block_scope, cond_op, operand, node);
|
|
|
|
// condbr in block_scope (AstGen.zig:6076).
|
|
uint32_t condbr = addCondBr(&block_scope, ZIR_INST_CONDBR, node);
|
|
|
|
// Create block in parent gz (AstGen.zig:6078-6081).
|
|
uint32_t block_inst = makeBlockInst(ag, ZIR_INST_BLOCK, gz, node);
|
|
setBlockBody(ag, &block_scope, block_inst);
|
|
// block_scope unstacked now.
|
|
gzAppendInstruction(gz, block_inst);
|
|
|
|
// Then branch: unwrap payload (AstGen.zig:6083-6092).
|
|
GenZir then_scope = makeSubBlock(&block_scope, scope);
|
|
uint32_t unwrapped = addUnNode(&then_scope, unwrap_op, operand, node);
|
|
// Apply rvalue coercion unless rl is ref/ref_coerced_ty
|
|
// (AstGen.zig:6088-6091).
|
|
uint32_t then_result = (rl.tag == RL_REF || rl.tag == RL_REF_COERCED_TY)
|
|
? unwrapped
|
|
: rvalue(&then_scope, break_rl, unwrapped, node);
|
|
addBreak(&then_scope, ZIR_INST_BREAK, block_inst, then_result,
|
|
(int32_t)node - (int32_t)gz->decl_node_index);
|
|
|
|
// Else branch: evaluate RHS (AstGen.zig:6094-6131).
|
|
GenZir else_scope = makeSubBlock(&block_scope, scope);
|
|
|
|
// save_err_ret_index (AstGen.zig:6099-6100).
|
|
if (do_err_trace && nodeMayAppendToErrorTrace(tree, nd.lhs))
|
|
addSaveErrRetIndex(&else_scope, ZIR_REF_NONE);
|
|
|
|
// Error capture scope (AstGen.zig:6102-6123).
|
|
ScopeLocalVal err_val_scope;
|
|
memset(&err_val_scope, 0, sizeof(err_val_scope));
|
|
Scope* else_sub_scope = &else_scope.base;
|
|
if (payload_token != UINT32_MAX) {
|
|
if (tokenIsUnderscore(tree, payload_token)) {
|
|
// Discard |_| — else_sub_scope stays as &else_scope.base.
|
|
} else {
|
|
uint32_t err_name = identAsString(ag, payload_token);
|
|
uint32_t err_inst
|
|
= addUnNode(&else_scope, unwrap_code_op, operand, node);
|
|
err_val_scope = (ScopeLocalVal) {
|
|
.base = { .tag = SCOPE_LOCAL_VAL },
|
|
.parent = &else_scope.base,
|
|
.gen_zir = &else_scope,
|
|
.inst = err_inst,
|
|
.token_src = payload_token,
|
|
.name = err_name,
|
|
};
|
|
else_sub_scope = &err_val_scope.base;
|
|
}
|
|
}
|
|
|
|
// Use fullBodyExpr (not expr) to inline unlabeled blocks
|
|
// (AstGen.zig:6125).
|
|
uint32_t else_result
|
|
= fullBodyExpr(&else_scope, else_sub_scope, break_rl, nd.rhs);
|
|
if (!endsWithNoReturn(&else_scope)) {
|
|
// restoreErrRetIndex (AstGen.zig:6128-6129).
|
|
if (do_err_trace)
|
|
restoreErrRetIndex(
|
|
&else_scope, block_inst, break_rl, nd.rhs, else_result);
|
|
addBreak(&else_scope, ZIR_INST_BREAK, block_inst, else_result,
|
|
(int32_t)nd.rhs - (int32_t)gz->decl_node_index);
|
|
}
|
|
|
|
setCondBrPayload(ag, condbr, condition, &then_scope, &else_scope);
|
|
|
|
// AstGen.zig:6137-6141.
|
|
if (need_result_rvalue)
|
|
return rvalue(gz, rl, block_inst + ZIR_REF_START_INDEX, node);
|
|
return block_inst + ZIR_REF_START_INDEX;
|
|
}
|
|
|
|
// --- whileExpr (AstGen.zig:6529-6817) ---
|
|
// Handles while_simple, while_cont, while.
|
|
// Structure: loop { cond_block { cond, condbr }, repeat }
|
|
// condbr → then { [payload], continue_block { [cont_expr], body,
|
|
// break continue }, break cond }
|
|
// → else { [err_capture], else_body / break loop }
|
|
|
|
static uint32_t whileExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node, bool is_statement) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
AstNodeTag node_tag = tree->nodes.tags[node];
|
|
AstData nd = tree->nodes.datas[node];
|
|
|
|
// Compute break_rl from rl (AstGen.zig:6540-6548).
|
|
bool need_rl = nodesNeedRlContains(ag, node);
|
|
ResultLoc break_rl = breakResultInfo(gz, rl, node, need_rl);
|
|
bool need_result_rvalue = (break_rl.tag != rl.tag);
|
|
|
|
// Detect inline keyword (AstGen.zig:6558).
|
|
uint32_t main_token = tree->nodes.main_tokens[node];
|
|
bool is_inline = (main_token > 0
|
|
&& tree->tokens.tags[main_token - 1] == TOKEN_KEYWORD_INLINE);
|
|
|
|
// Compute label_token (AstGen.zig fullWhileComponents:2310-2317).
|
|
uint32_t label_token = UINT32_MAX;
|
|
{
|
|
uint32_t tok_i = main_token;
|
|
if (is_inline)
|
|
tok_i = main_token - 1;
|
|
if (tok_i >= 2 && tree->tokens.tags[tok_i - 2] == TOKEN_IDENTIFIER
|
|
&& tree->tokens.tags[tok_i - 1] == TOKEN_COLON)
|
|
label_token = tok_i - 2;
|
|
}
|
|
|
|
// Extract AST nodes depending on node type (Ast.zig:1925-1958).
|
|
uint32_t cond_node = nd.lhs;
|
|
uint32_t body_node;
|
|
uint32_t else_node = 0;
|
|
uint32_t cont_expr = 0; // 0 = none
|
|
if (node_tag == AST_NODE_WHILE_SIMPLE) {
|
|
body_node = nd.rhs;
|
|
} else if (node_tag == AST_NODE_WHILE_CONT) {
|
|
// WhileCont[rhs]: { cont_expr, then_expr }
|
|
cont_expr = tree->extra_data.arr[nd.rhs];
|
|
body_node = tree->extra_data.arr[nd.rhs + 1];
|
|
} else {
|
|
// AST_NODE_WHILE: While[rhs]: { cont_expr, then_expr, else_expr }
|
|
// cont_expr is stored via OPT(): UINT32_MAX means none.
|
|
uint32_t raw_cont = tree->extra_data.arr[nd.rhs];
|
|
cont_expr = (raw_cont == UINT32_MAX) ? 0 : raw_cont;
|
|
body_node = tree->extra_data.arr[nd.rhs + 1];
|
|
else_node = tree->extra_data.arr[nd.rhs + 2];
|
|
}
|
|
|
|
// Detect payload capture (Ast.zig fullWhileComponents:2318-2321).
|
|
uint32_t payload_token = 0;
|
|
{
|
|
uint32_t last_cond_tok = lastToken(tree, cond_node);
|
|
if (last_cond_tok + 2 < tree->tokens.len
|
|
&& tree->tokens.tags[last_cond_tok + 2] == TOKEN_PIPE) {
|
|
payload_token = last_cond_tok + 3;
|
|
}
|
|
}
|
|
|
|
// Detect error token (Ast.zig fullWhileComponents:2322-2329).
|
|
uint32_t error_token = 0;
|
|
if (else_node != 0) {
|
|
uint32_t else_tok = lastToken(tree, body_node) + 1;
|
|
if (else_tok + 1 < tree->tokens.len
|
|
&& tree->tokens.tags[else_tok + 1] == TOKEN_PIPE) {
|
|
error_token = else_tok + 2;
|
|
}
|
|
}
|
|
|
|
// Detect payload_is_ref (AstGen.zig:6574-6577).
|
|
bool payload_is_ref = (payload_token != 0
|
|
&& tree->tokens.tags[payload_token] == TOKEN_ASTERISK);
|
|
|
|
// Create loop instruction (AstGen.zig:6562-6564).
|
|
ZirInstTag loop_tag = is_inline ? ZIR_INST_BLOCK_INLINE : ZIR_INST_LOOP;
|
|
uint32_t loop_inst = makeBlockInst(ag, loop_tag, gz, node);
|
|
gzAppendInstruction(gz, loop_inst);
|
|
|
|
GenZir loop_scope = makeSubBlock(gz, scope);
|
|
loop_scope.is_inline = is_inline;
|
|
loop_scope.break_result_info = break_rl;
|
|
|
|
// Evaluate condition in cond_scope (AstGen.zig:6571-6607).
|
|
GenZir cond_scope = makeSubBlock(gz, &loop_scope.base);
|
|
|
|
// Emit debug node for the condition expression (AstGen.zig:6579).
|
|
emitDbgNode(gz, cond_node);
|
|
|
|
uint32_t cond_inst; // the value (optional/err-union/bool)
|
|
uint32_t bool_bit; // the boolean for condbr
|
|
if (error_token != 0) {
|
|
// Error union condition (AstGen.zig:6584-6591).
|
|
ResultLoc cond_rl = payload_is_ref ? RL_REF_VAL : RL_NONE_VAL;
|
|
cond_inst
|
|
= fullBodyExpr(&cond_scope, &cond_scope.base, cond_rl, cond_node);
|
|
ZirInstTag cond_tag
|
|
= payload_is_ref ? ZIR_INST_IS_NON_ERR_PTR : ZIR_INST_IS_NON_ERR;
|
|
bool_bit = addUnNode(&cond_scope, cond_tag, cond_inst, cond_node);
|
|
} else if (payload_token != 0) {
|
|
// Optional condition (AstGen.zig:6592-6599).
|
|
ResultLoc cond_rl = payload_is_ref ? RL_REF_VAL : RL_NONE_VAL;
|
|
cond_inst
|
|
= fullBodyExpr(&cond_scope, &cond_scope.base, cond_rl, cond_node);
|
|
ZirInstTag cond_tag
|
|
= payload_is_ref ? ZIR_INST_IS_NON_NULL_PTR : ZIR_INST_IS_NON_NULL;
|
|
bool_bit = addUnNode(&cond_scope, cond_tag, cond_inst, cond_node);
|
|
} else {
|
|
// Bool condition (AstGen.zig:6600-6606).
|
|
ResultLoc coerced_bool_ri = {
|
|
.tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_BOOL_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE,
|
|
};
|
|
cond_inst = fullBodyExpr(
|
|
&cond_scope, &cond_scope.base, coerced_bool_ri, cond_node);
|
|
bool_bit = cond_inst;
|
|
}
|
|
|
|
// Create condbr + cond_block (AstGen.zig:6609-6615).
|
|
ZirInstTag condbr_tag
|
|
= is_inline ? ZIR_INST_CONDBR_INLINE : ZIR_INST_CONDBR;
|
|
uint32_t condbr = addCondBr(&cond_scope, condbr_tag, node);
|
|
ZirInstTag block_tag = is_inline ? ZIR_INST_BLOCK_INLINE : ZIR_INST_BLOCK;
|
|
uint32_t cond_block = makeBlockInst(ag, block_tag, &loop_scope, node);
|
|
setBlockBody(ag, &cond_scope, cond_block); // unstacks cond_scope
|
|
gzAppendInstruction(&loop_scope, cond_block);
|
|
|
|
// Payload handling (AstGen.zig:6623-6690).
|
|
// Create payload instructions in the global inst array but don't add to
|
|
// any scope's scratch area yet. then_scope and continue_scope are created
|
|
// after loop_scope is finalized (to avoid scratch array overlap).
|
|
uint32_t dbg_var_name = 0;
|
|
uint32_t dbg_var_inst = 0;
|
|
uint32_t opt_payload_raw_inst = UINT32_MAX; // raw inst index, not ref
|
|
uint32_t payload_ref = 0;
|
|
uint32_t payload_ident_token = 0;
|
|
uint32_t payload_ident_name = 0;
|
|
bool payload_has_scope = false; // true if we need a ScopeLocalVal
|
|
|
|
if (error_token != 0 && payload_token != 0) {
|
|
// Error union with payload: makeUnNode (AstGen.zig:6628-6655).
|
|
ZirInstTag unwrap_tag = payload_is_ref
|
|
? ZIR_INST_ERR_UNION_PAYLOAD_UNSAFE_PTR
|
|
: ZIR_INST_ERR_UNION_PAYLOAD_UNSAFE;
|
|
ensureInstCapacity(ag, 1);
|
|
uint32_t raw_idx = ag->inst_len;
|
|
ag->inst_tags[raw_idx] = unwrap_tag;
|
|
ZirInstData d;
|
|
d.un_node.src_node = (int32_t)cond_node - (int32_t)gz->decl_node_index;
|
|
d.un_node.operand = cond_inst;
|
|
ag->inst_datas[raw_idx] = d;
|
|
ag->inst_len++;
|
|
payload_ref = raw_idx + ZIR_REF_START_INDEX;
|
|
opt_payload_raw_inst = raw_idx;
|
|
payload_ident_token = payload_token + (payload_is_ref ? 1u : 0u);
|
|
if (tokenIsUnderscore(tree, payload_ident_token)) {
|
|
if (payload_is_ref) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
} else {
|
|
payload_ident_name = identAsString(ag, payload_ident_token);
|
|
payload_has_scope = true;
|
|
}
|
|
} else if (error_token != 0) {
|
|
// Error union without payload (AstGen.zig:6656-6658).
|
|
ensureInstCapacity(ag, 1);
|
|
uint32_t raw_idx = ag->inst_len;
|
|
ag->inst_tags[raw_idx] = ZIR_INST_ENSURE_ERR_UNION_PAYLOAD_VOID;
|
|
ZirInstData d;
|
|
d.un_node.src_node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
d.un_node.operand = cond_inst;
|
|
ag->inst_datas[raw_idx] = d;
|
|
ag->inst_len++;
|
|
opt_payload_raw_inst = raw_idx;
|
|
} else if (payload_token != 0) {
|
|
// Optional with payload: makeUnNode (AstGen.zig:6660-6686).
|
|
ZirInstTag unwrap_tag = payload_is_ref
|
|
? ZIR_INST_OPTIONAL_PAYLOAD_UNSAFE_PTR
|
|
: ZIR_INST_OPTIONAL_PAYLOAD_UNSAFE;
|
|
ensureInstCapacity(ag, 1);
|
|
uint32_t raw_idx = ag->inst_len;
|
|
ag->inst_tags[raw_idx] = unwrap_tag;
|
|
ZirInstData d;
|
|
d.un_node.src_node = (int32_t)cond_node - (int32_t)gz->decl_node_index;
|
|
d.un_node.operand = cond_inst;
|
|
ag->inst_datas[raw_idx] = d;
|
|
ag->inst_len++;
|
|
payload_ref = raw_idx + ZIR_REF_START_INDEX;
|
|
opt_payload_raw_inst = raw_idx;
|
|
payload_ident_token = payload_token + (payload_is_ref ? 1u : 0u);
|
|
if (tokenIsUnderscore(tree, payload_ident_token)) {
|
|
if (payload_is_ref) {
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
} else {
|
|
payload_ident_name = identAsString(ag, payload_ident_token);
|
|
payload_has_scope = true;
|
|
}
|
|
}
|
|
|
|
// Create continue_block (AstGen.zig:6695).
|
|
// makeBlockInst doesn't add to scratch.
|
|
uint32_t continue_block_inst = makeBlockInst(ag, block_tag, gz, node);
|
|
|
|
// Add repeat to loop_scope (AstGen.zig:6697-6698).
|
|
{
|
|
ZirInstTag repeat_tag
|
|
= is_inline ? ZIR_INST_REPEAT_INLINE : ZIR_INST_REPEAT;
|
|
ZirInstData repeat_data;
|
|
memset(&repeat_data, 0, sizeof(repeat_data));
|
|
repeat_data.node = (int32_t)node - (int32_t)loop_scope.decl_node_index;
|
|
addInstruction(&loop_scope, repeat_tag, repeat_data);
|
|
}
|
|
|
|
// Set loop body and configure break/continue (AstGen.zig:6700-6708).
|
|
setBlockBody(ag, &loop_scope, loop_inst); // unstacks loop_scope
|
|
loop_scope.break_block = loop_inst;
|
|
loop_scope.continue_block = continue_block_inst;
|
|
if (label_token != UINT32_MAX) {
|
|
loop_scope.label_token = label_token;
|
|
loop_scope.label_block_inst = loop_inst;
|
|
}
|
|
|
|
// Now create then_scope (AstGen.zig:6617-6621, 6711).
|
|
// loop_scope is unstacked, scratch is clean.
|
|
GenZir then_scope = makeSubBlock(gz, &cond_scope.base);
|
|
Scope* then_sub_scope = &then_scope.base;
|
|
ScopeLocalVal payload_val_scope;
|
|
memset(&payload_val_scope, 0, sizeof(payload_val_scope));
|
|
|
|
if (payload_has_scope) {
|
|
payload_val_scope = (ScopeLocalVal) {
|
|
.base = { .tag = SCOPE_LOCAL_VAL },
|
|
.parent = &then_scope.base,
|
|
.gen_zir = &then_scope,
|
|
.inst = payload_ref,
|
|
.token_src = payload_ident_token,
|
|
.name = payload_ident_name,
|
|
};
|
|
dbg_var_name = payload_ident_name;
|
|
dbg_var_inst = payload_ref;
|
|
then_sub_scope = &payload_val_scope.base;
|
|
}
|
|
|
|
// Add payload instruction to then_scope (AstGen.zig:6714-6716).
|
|
if (opt_payload_raw_inst != UINT32_MAX)
|
|
gzAppendInstruction(&then_scope, opt_payload_raw_inst);
|
|
// Add dbg_var_val for payload (AstGen.zig:6717).
|
|
if (dbg_var_name != 0)
|
|
addDbgVar(
|
|
&then_scope, ZIR_INST_DBG_VAR_VAL, dbg_var_name, dbg_var_inst);
|
|
// Add continue_block to then_scope (AstGen.zig:6718).
|
|
gzAppendInstruction(&then_scope, continue_block_inst);
|
|
|
|
// Emit continue expression if present (AstGen.zig:6723-6725).
|
|
// Upstream: unusedResultExpr = emitDbgNode + expr + addEnsureResult.
|
|
if (cont_expr != 0) {
|
|
emitDbgNode(&then_scope, cont_expr);
|
|
uint32_t cont_result = expr(&then_scope, then_sub_scope, cont_expr);
|
|
addEnsureResult(&then_scope, cont_result, cont_expr);
|
|
}
|
|
|
|
// Create continue_scope (AstGen.zig:6692-6694, 6727).
|
|
GenZir continue_scope = makeSubBlock(gz, then_sub_scope);
|
|
|
|
// Execute body (AstGen.zig:6728-6731).
|
|
uint32_t then_node = body_node;
|
|
emitDbgNode(&continue_scope, then_node);
|
|
uint32_t unused_result = fullBodyExpr(
|
|
&continue_scope, &continue_scope.base, RL_NONE_VAL, then_node);
|
|
addEnsureResult(&continue_scope, unused_result, then_node);
|
|
|
|
// Break continue_block if not noreturn (AstGen.zig:6735-6747).
|
|
ZirInstTag break_tag = is_inline ? ZIR_INST_BREAK_INLINE : ZIR_INST_BREAK;
|
|
if (!endsWithNoReturn(&continue_scope)) {
|
|
// dbg_stmt + dbg_empty_stmt (AstGen.zig:6736-6745).
|
|
advanceSourceCursor(
|
|
ag, tree->tokens.starts[lastToken(tree, then_node)]);
|
|
emitDbgStmt(gz, ag->source_line - gz->decl_line, ag->source_column);
|
|
{
|
|
ZirInstData ext_data;
|
|
ext_data.extended.opcode = (uint16_t)ZIR_EXT_DBG_EMPTY_STMT;
|
|
ext_data.extended.small = 0;
|
|
ext_data.extended.operand = 0;
|
|
addInstruction(gz, ZIR_INST_EXTENDED, ext_data);
|
|
}
|
|
addBreak(&continue_scope, break_tag, continue_block_inst,
|
|
ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
|
|
}
|
|
setBlockBody(ag, &continue_scope, continue_block_inst);
|
|
// Break cond_block from then_scope (AstGen.zig:6749).
|
|
addBreak(&then_scope, break_tag, cond_block, ZIR_REF_VOID_VALUE,
|
|
AST_NODE_OFFSET_NONE);
|
|
|
|
// Else scope (AstGen.zig:6751-6797).
|
|
GenZir else_scope = makeSubBlock(gz, &cond_scope.base);
|
|
|
|
if (else_node != 0) {
|
|
Scope* else_sub_scope = &else_scope.base;
|
|
ScopeLocalVal error_val_scope;
|
|
memset(&error_val_scope, 0, sizeof(error_val_scope));
|
|
|
|
if (error_token != 0) {
|
|
// Error capture: else |err| (AstGen.zig:6756-6776).
|
|
ZirInstTag err_tag = payload_is_ref ? ZIR_INST_ERR_UNION_CODE_PTR
|
|
: ZIR_INST_ERR_UNION_CODE;
|
|
uint32_t err_inst
|
|
= addUnNode(&else_scope, err_tag, cond_inst, cond_node);
|
|
if (tokenIsUnderscore(tree, error_token)) {
|
|
// Discard |_| — else_sub_scope stays as &else_scope.base
|
|
} else {
|
|
uint32_t err_name = identAsString(ag, error_token);
|
|
error_val_scope = (ScopeLocalVal) {
|
|
.base = { .tag = SCOPE_LOCAL_VAL },
|
|
.parent = &else_scope.base,
|
|
.gen_zir = &else_scope,
|
|
.inst = err_inst,
|
|
.token_src = error_token,
|
|
.name = err_name,
|
|
};
|
|
addDbgVar(
|
|
&else_scope, ZIR_INST_DBG_VAR_VAL, err_name, err_inst);
|
|
else_sub_scope = &error_val_scope.base;
|
|
}
|
|
}
|
|
|
|
// Remove continue/break blocks so control flow applies to outer
|
|
// loops (AstGen.zig:6783-6784).
|
|
loop_scope.continue_block = UINT32_MAX;
|
|
loop_scope.break_block = UINT32_MAX;
|
|
uint32_t else_result = fullBodyExpr(&else_scope, else_sub_scope,
|
|
loop_scope.break_result_info, else_node);
|
|
if (is_statement) {
|
|
addEnsureResult(&else_scope, else_result, else_node);
|
|
}
|
|
if (!endsWithNoReturn(&else_scope)) {
|
|
addBreak(&else_scope, break_tag, loop_inst, else_result,
|
|
(int32_t)else_node - (int32_t)gz->decl_node_index);
|
|
}
|
|
} else {
|
|
// No else: break with void (AstGen.zig:6794-6796).
|
|
uint32_t void_result
|
|
= rvalue(&else_scope, rl, ZIR_REF_VOID_VALUE, node);
|
|
addBreak(&else_scope, break_tag, loop_inst, void_result,
|
|
AST_NODE_OFFSET_NONE);
|
|
}
|
|
|
|
// Wire up condbr (AstGen.zig:6805).
|
|
setCondBrPayload(ag, condbr, bool_bit, &then_scope, &else_scope);
|
|
|
|
// Apply rvalue if needed (AstGen.zig:6807-6810).
|
|
uint32_t result;
|
|
if (need_result_rvalue)
|
|
result = rvalue(gz, rl, loop_inst + ZIR_REF_START_INDEX, node);
|
|
else
|
|
result = loop_inst + ZIR_REF_START_INDEX;
|
|
|
|
// Emit ensure_result_used when used as statement (AstGen.zig:6812-6814).
|
|
if (is_statement) {
|
|
addUnNode(gz, ZIR_INST_ENSURE_RESULT_USED, result, node);
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
// --- switchExpr (AstGen.zig:7625-8117) ---
|
|
// Handles switch and switch_comma expressions.
|
|
// Encoding: switch_block pl_node with SwitchBlock extra payload.
|
|
|
|
// Helper: append body instruction with ref_table fixups to pay buffer.
|
|
// Mirrors appendPossiblyRefdBodyInst (AstGen.zig:13675-13683) but writes
|
|
// to a dynamically-grown pay buffer instead of extra.
|
|
static void appendPossiblyRefdBodyInstPay(AstGenCtx* ag, uint32_t body_inst,
|
|
uint32_t** pay, uint32_t* pay_len, uint32_t* pay_cap) {
|
|
if (*pay_len >= *pay_cap) {
|
|
*pay_cap *= 2;
|
|
uint32_t* p = realloc(*pay, *pay_cap * sizeof(uint32_t));
|
|
if (!p)
|
|
abort();
|
|
*pay = p;
|
|
}
|
|
(*pay)[(*pay_len)++] = body_inst;
|
|
uint32_t ref_inst;
|
|
if (refTableFetchRemove(ag, body_inst, &ref_inst)) {
|
|
appendPossiblyRefdBodyInstPay(ag, ref_inst, pay, pay_len, pay_cap);
|
|
}
|
|
}
|
|
|
|
// Helper: append body with fixups and extra refs to pay buffer.
|
|
// Mirrors appendBodyWithFixupsExtraRefsArrayList (AstGen.zig:13659-13673).
|
|
static void appendBodyWithFixupsExtraRefsPay(AstGenCtx* ag,
|
|
const uint32_t* body, uint32_t body_len, const uint32_t* extra_refs,
|
|
uint32_t extra_refs_len, uint32_t** pay, uint32_t* pay_len_p,
|
|
uint32_t* pay_cap_p) {
|
|
for (uint32_t i = 0; i < extra_refs_len; i++) {
|
|
uint32_t ref_inst;
|
|
if (refTableFetchRemove(ag, extra_refs[i], &ref_inst)) {
|
|
appendPossiblyRefdBodyInstPay(
|
|
ag, ref_inst, pay, pay_len_p, pay_cap_p);
|
|
}
|
|
}
|
|
for (uint32_t i = 0; i < body_len; i++) {
|
|
appendPossiblyRefdBodyInstPay(ag, body[i], pay, pay_len_p, pay_cap_p);
|
|
}
|
|
}
|
|
|
|
// Helper: ensure pay buffer has capacity for `additional` more entries.
|
|
static void ensurePayCapacity(
|
|
uint32_t** pay, uint32_t* pay_cap, uint32_t pay_len, uint32_t additional) {
|
|
uint32_t needed = pay_len + additional;
|
|
if (needed > *pay_cap) {
|
|
while (*pay_cap < needed)
|
|
*pay_cap *= 2;
|
|
uint32_t* p = realloc(*pay, *pay_cap * sizeof(uint32_t));
|
|
if (!p)
|
|
abort();
|
|
*pay = p;
|
|
}
|
|
}
|
|
|
|
// Helper: get values for a switch case node. For SWITCH_CASE_ONE /
|
|
// SWITCH_CASE_INLINE_ONE, returns the single value (or NULL if else).
|
|
// For SWITCH_CASE / SWITCH_CASE_INLINE, returns the SubRange values.
|
|
// Returns values count and sets *values_arr to point to the values.
|
|
// For _ONE variants, writes to single_buf and returns pointer to it.
|
|
static uint32_t switchCaseValues(const Ast* tree, uint32_t case_node,
|
|
uint32_t* single_buf, const uint32_t** values_arr) {
|
|
AstNodeTag ct = tree->nodes.tags[case_node];
|
|
AstData cd = tree->nodes.datas[case_node];
|
|
switch (ct) {
|
|
case AST_NODE_SWITCH_CASE_ONE:
|
|
case AST_NODE_SWITCH_CASE_INLINE_ONE:
|
|
if (cd.lhs == 0) {
|
|
*values_arr = NULL;
|
|
return 0; // else prong
|
|
}
|
|
*single_buf = cd.lhs;
|
|
*values_arr = single_buf;
|
|
return 1;
|
|
case AST_NODE_SWITCH_CASE:
|
|
case AST_NODE_SWITCH_CASE_INLINE: {
|
|
uint32_t ist = tree->extra_data.arr[cd.lhs];
|
|
uint32_t ien = tree->extra_data.arr[cd.lhs + 1];
|
|
*values_arr = tree->extra_data.arr + ist;
|
|
return ien - ist;
|
|
}
|
|
default:
|
|
*values_arr = NULL;
|
|
return 0;
|
|
}
|
|
}
|
|
|
|
// --- switchExprErrUnion (AstGen.zig:7127-7623) ---
|
|
// Handles `catch |err| switch(err) { ... }` and
|
|
// `if (x) |v| { ... } else |err| switch (err) { ... }` optimization.
|
|
// Emits ZIR_INST_SWITCH_BLOCK_ERR_UNION instead of separate catch/if + switch.
|
|
// node_ty: 0=catch, 1=if.
|
|
static uint32_t switchExprErrUnion(GenZir* parent_gz, Scope* scope,
|
|
ResultLoc rl, uint32_t catch_or_if_node, int node_ty) {
|
|
AstGenCtx* ag = parent_gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
AstData cnd = tree->nodes.datas[catch_or_if_node];
|
|
|
|
// Extract switch_node, operand_node, error_payload based on node_ty
|
|
// (AstGen.zig:7138-7154).
|
|
uint32_t switch_node, operand_node, error_payload;
|
|
// For `if` case, we also need payload_token and then_node.
|
|
uint32_t if_payload_token = 0; // 0 = no payload
|
|
uint32_t if_then_node = 0;
|
|
if (node_ty == 0) {
|
|
// catch: rhs=switch, lhs=operand, main_token=catch keyword
|
|
switch_node = cnd.rhs;
|
|
operand_node = cnd.lhs;
|
|
uint32_t catch_token = tree->nodes.main_tokens[catch_or_if_node];
|
|
error_payload = catch_token + 2; // token after `catch |`
|
|
} else {
|
|
// if: parse fullIf structure (AstGen.zig:7138-7154).
|
|
operand_node = cnd.lhs;
|
|
if_then_node = tree->extra_data.arr[cnd.rhs];
|
|
uint32_t else_node = tree->extra_data.arr[cnd.rhs + 1];
|
|
switch_node = else_node;
|
|
// Compute error_token.
|
|
uint32_t else_tok = lastToken(tree, if_then_node) + 1;
|
|
error_payload = else_tok + 2; // else |err| => err is at else_tok+2
|
|
// Compute payload_token (if (cond) |val|).
|
|
uint32_t last_cond_tok = lastToken(tree, operand_node);
|
|
uint32_t pipe_tok = last_cond_tok + 2;
|
|
if (pipe_tok < tree->tokens.len
|
|
&& tree->tokens.tags[pipe_tok] == TOKEN_PIPE) {
|
|
if_payload_token = pipe_tok + 1;
|
|
}
|
|
}
|
|
|
|
// Parse switch: lhs=condition, rhs=extra index for SubRange.
|
|
AstData snd = tree->nodes.datas[switch_node];
|
|
uint32_t sw_extra = snd.rhs;
|
|
uint32_t cases_start = tree->extra_data.arr[sw_extra];
|
|
uint32_t cases_end = tree->extra_data.arr[sw_extra + 1];
|
|
const uint32_t* case_nodes_arr = tree->extra_data.arr + cases_start;
|
|
uint32_t case_count = cases_end - cases_start;
|
|
|
|
bool do_err_trace = (ag->fn_ret_ty != 0);
|
|
bool need_rl = nodesNeedRlContains(ag, catch_or_if_node);
|
|
ResultLoc break_rl
|
|
= breakResultInfo(parent_gz, rl, catch_or_if_node, need_rl);
|
|
bool need_result_rvalue = (break_rl.tag != rl.tag);
|
|
|
|
// payload_is_ref (AstGen.zig:7168-7171).
|
|
bool payload_is_ref;
|
|
if (node_ty == 1) {
|
|
payload_is_ref = (if_payload_token != 0
|
|
&& tree->tokens.tags[if_payload_token] == TOKEN_ASTERISK);
|
|
} else {
|
|
payload_is_ref = (rl.tag == RL_REF || rl.tag == RL_REF_COERCED_TY);
|
|
}
|
|
|
|
// First pass: categorize cases (AstGen.zig:7173-7229).
|
|
uint32_t scalar_cases_len = 0;
|
|
uint32_t multi_cases_len = 0;
|
|
bool has_else = false;
|
|
uint32_t else_ci = UINT32_MAX; // index into case_nodes_arr
|
|
|
|
for (uint32_t ci = 0; ci < case_count; ci++) {
|
|
uint32_t cn = case_nodes_arr[ci];
|
|
uint32_t single_buf;
|
|
const uint32_t* values;
|
|
uint32_t values_len = switchCaseValues(tree, cn, &single_buf, &values);
|
|
|
|
if (values_len == 0) {
|
|
has_else = true;
|
|
else_ci = ci;
|
|
continue;
|
|
}
|
|
|
|
if (values_len == 1
|
|
&& tree->nodes.tags[values[0]] != AST_NODE_SWITCH_RANGE) {
|
|
scalar_cases_len++;
|
|
} else {
|
|
multi_cases_len++;
|
|
}
|
|
}
|
|
|
|
// Operand rl (AstGen.zig:7231-7234).
|
|
ResultLoc operand_rl = payload_is_ref ? RL_REF_VAL : RL_NONE_VAL;
|
|
operand_rl.ctx = RI_CTX_ERROR_HANDLING_EXPR;
|
|
|
|
// Save operand source location (AstGen.zig:7236-7237).
|
|
advanceSourceCursorToNode(ag, operand_node);
|
|
uint32_t operand_lc_line = ag->source_line - parent_gz->decl_line;
|
|
uint32_t operand_lc_col = ag->source_column;
|
|
|
|
uint32_t raw_operand = reachableExpr(
|
|
parent_gz, scope, operand_rl, operand_node, switch_node);
|
|
|
|
// --- Payload buffer ---
|
|
// Table: [non_error_prong] [else?] [scalar_0..N] [multi_0..N]
|
|
uint32_t case_table_start = 0;
|
|
uint32_t scalar_case_table = case_table_start + 1 + (has_else ? 1u : 0u);
|
|
uint32_t multi_case_table = scalar_case_table + scalar_cases_len;
|
|
uint32_t case_table_end = multi_case_table + multi_cases_len;
|
|
|
|
uint32_t pay_cap = case_table_end + case_count * 16;
|
|
if (pay_cap < 64)
|
|
pay_cap = 64;
|
|
uint32_t* pay = malloc(pay_cap * sizeof(uint32_t));
|
|
if (!pay)
|
|
abort();
|
|
uint32_t pay_len = case_table_end;
|
|
|
|
GenZir block_scope = makeSubBlock(parent_gz, scope);
|
|
block_scope.instructions_top = UINT32_MAX; // unstacked
|
|
block_scope.break_result_info = break_rl;
|
|
|
|
// dbg_stmt before switch_block_err_union (AstGen.zig:7249).
|
|
emitDbgStmtForceCurrentIndex(parent_gz, operand_lc_line, operand_lc_col);
|
|
|
|
uint32_t switch_inst = makeBlockInst(
|
|
ag, ZIR_INST_SWITCH_BLOCK_ERR_UNION, parent_gz, switch_node);
|
|
|
|
GenZir case_scope = makeSubBlock(parent_gz, &block_scope.base);
|
|
|
|
// --- Non-error prong (AstGen.zig:7255-7391) ---
|
|
{
|
|
uint32_t body_len_index = pay_len;
|
|
pay[case_table_start] = body_len_index;
|
|
ensurePayCapacity(&pay, &pay_cap, pay_len, 1);
|
|
pay_len = body_len_index + 1; // body_len slot
|
|
|
|
case_scope.instructions_top = ag->scratch_inst_len;
|
|
|
|
ZirInstTag unwrap_tag = payload_is_ref
|
|
? ZIR_INST_ERR_UNION_PAYLOAD_UNSAFE_PTR
|
|
: ZIR_INST_ERR_UNION_PAYLOAD_UNSAFE;
|
|
uint32_t unwrapped = addUnNode(
|
|
&case_scope, unwrap_tag, raw_operand, catch_or_if_node);
|
|
|
|
if (node_ty == 0) {
|
|
// catch: rvalue the unwrapped payload and break
|
|
// (AstGen.zig:7299-7314).
|
|
uint32_t case_result
|
|
= (rl.tag == RL_REF || rl.tag == RL_REF_COERCED_TY)
|
|
? unwrapped
|
|
: rvalue(&case_scope, block_scope.break_result_info, unwrapped,
|
|
catch_or_if_node);
|
|
addBreak(&case_scope, ZIR_INST_BREAK, switch_inst, case_result,
|
|
(int32_t)catch_or_if_node
|
|
- (int32_t)parent_gz->decl_node_index);
|
|
} else {
|
|
// if: evaluate then branch with payload capture
|
|
// (AstGen.zig:7316-7368).
|
|
ScopeLocalVal payload_val_scope;
|
|
memset(&payload_val_scope, 0, sizeof(payload_val_scope));
|
|
Scope* then_sub_scope = &case_scope.base;
|
|
|
|
if (if_payload_token != 0) {
|
|
uint32_t name_token
|
|
= if_payload_token + (payload_is_ref ? 1u : 0u);
|
|
uint32_t ident_name = identAsString(ag, name_token);
|
|
if (tokenIsUnderscore(tree, name_token)) {
|
|
// Discard: sub_scope stays as case_scope.
|
|
} else {
|
|
payload_val_scope = (ScopeLocalVal) {
|
|
.base = { .tag = SCOPE_LOCAL_VAL },
|
|
.parent = &case_scope.base,
|
|
.gen_zir = &case_scope,
|
|
.name = ident_name,
|
|
.inst = unwrapped,
|
|
.token_src = name_token,
|
|
};
|
|
addDbgVar(&case_scope, ZIR_INST_DBG_VAR_VAL, ident_name,
|
|
unwrapped);
|
|
then_sub_scope = &payload_val_scope.base;
|
|
}
|
|
} else {
|
|
// No payload: ensure payload is void
|
|
// (AstGen.zig:7346-7350).
|
|
addUnNode(&case_scope, ZIR_INST_ENSURE_ERR_UNION_PAYLOAD_VOID,
|
|
raw_operand, catch_or_if_node);
|
|
}
|
|
|
|
uint32_t then_result = exprRl(&case_scope, then_sub_scope,
|
|
block_scope.break_result_info, if_then_node);
|
|
if (!refIsNoReturn(parent_gz, then_result)) {
|
|
addBreak(&case_scope, ZIR_INST_BREAK, switch_inst, then_result,
|
|
(int32_t)if_then_node
|
|
- (int32_t)parent_gz->decl_node_index);
|
|
}
|
|
}
|
|
|
|
uint32_t raw_body_len = gzInstructionsLen(&case_scope);
|
|
const uint32_t* body = gzInstructionsSlice(&case_scope);
|
|
const uint32_t extra_refs[1] = { switch_inst };
|
|
uint32_t body_len = countBodyLenAfterFixupsExtraRefs(
|
|
ag, body, raw_body_len, extra_refs, 1);
|
|
|
|
// ProngInfo: body_len, capture, not inline, no tag
|
|
// (AstGen.zig:7375-7389).
|
|
uint32_t capture = 0; // none
|
|
if (node_ty == 1 && if_payload_token != 0) {
|
|
capture = payload_is_ref ? 2u : 1u; // by_ref or by_val
|
|
}
|
|
pay[body_len_index]
|
|
= (body_len & 0x0FFFFFFFu) | ((capture & 3u) << 28);
|
|
|
|
ensurePayCapacity(&pay, &pay_cap, pay_len, body_len);
|
|
appendBodyWithFixupsExtraRefsPay(
|
|
ag, body, raw_body_len, extra_refs, 1, &pay, &pay_len, &pay_cap);
|
|
|
|
gzUnstack(&case_scope);
|
|
}
|
|
|
|
// --- Error capture name (AstGen.zig:7329-7342) ---
|
|
uint32_t err_name = identAsString(ag, error_payload);
|
|
|
|
// Allocate shared value_placeholder for error capture
|
|
// (AstGen.zig:7345-7353).
|
|
uint32_t err_inst = ag->inst_len;
|
|
ensureInstCapacity(ag, 1);
|
|
ag->inst_tags[err_inst] = ZIR_INST_EXTENDED;
|
|
ZirInstData edata;
|
|
memset(&edata, 0xaa, sizeof(edata));
|
|
edata.extended.opcode = (uint16_t)ZIR_EXT_VALUE_PLACEHOLDER;
|
|
ag->inst_datas[err_inst] = edata;
|
|
ag->inst_len++;
|
|
|
|
// --- Error cases (AstGen.zig:7356-7520) ---
|
|
uint32_t multi_case_index = 0;
|
|
uint32_t scalar_case_index = 0;
|
|
bool any_uses_err_capture = false;
|
|
|
|
for (uint32_t ci = 0; ci < case_count; ci++) {
|
|
uint32_t cn = case_nodes_arr[ci];
|
|
AstNodeTag ct = tree->nodes.tags[cn];
|
|
AstData cd = tree->nodes.datas[cn];
|
|
bool is_inline = (ct == AST_NODE_SWITCH_CASE_INLINE_ONE
|
|
|| ct == AST_NODE_SWITCH_CASE_INLINE);
|
|
|
|
uint32_t single_buf;
|
|
const uint32_t* values;
|
|
uint32_t values_len = switchCaseValues(tree, cn, &single_buf, &values);
|
|
|
|
bool is_multi_case = values_len > 1
|
|
|| (values_len == 1
|
|
&& tree->nodes.tags[values[0]] == AST_NODE_SWITCH_RANGE);
|
|
|
|
// Set up error capture scope (AstGen.zig:7366-7404).
|
|
uint32_t dbg_var_name = 0;
|
|
uint32_t dbg_var_inst = 0;
|
|
ScopeLocalVal err_scope;
|
|
ScopeLocalVal capture_scope_val;
|
|
memset(&err_scope, 0, sizeof(err_scope));
|
|
memset(&capture_scope_val, 0, sizeof(capture_scope_val));
|
|
|
|
err_scope = (ScopeLocalVal) {
|
|
.base = { .tag = SCOPE_LOCAL_VAL },
|
|
.parent = &case_scope.base,
|
|
.gen_zir = &case_scope,
|
|
.inst = err_inst + ZIR_REF_START_INDEX,
|
|
.token_src = error_payload,
|
|
.name = err_name,
|
|
};
|
|
|
|
Scope* sub_scope = &err_scope.base;
|
|
|
|
// Check for case payload capture (AstGen.zig:7381-7404).
|
|
uint32_t arrow_token = tree->nodes.main_tokens[cn];
|
|
bool has_payload = false;
|
|
if (tree->tokens.tags[arrow_token + 1] == TOKEN_PIPE) {
|
|
uint32_t capture_token = arrow_token + 2;
|
|
if (tree->tokens.tags[capture_token] == TOKEN_IDENTIFIER) {
|
|
has_payload = true;
|
|
if (!tokenIsUnderscore(tree, capture_token)) {
|
|
uint32_t tag_name = identAsString(ag, capture_token);
|
|
capture_scope_val = (ScopeLocalVal) {
|
|
.base = { .tag = SCOPE_LOCAL_VAL },
|
|
.parent = &case_scope.base,
|
|
.gen_zir = &case_scope,
|
|
.inst = switch_inst + ZIR_REF_START_INDEX,
|
|
.token_src = capture_token,
|
|
.name = tag_name,
|
|
};
|
|
dbg_var_name = tag_name;
|
|
dbg_var_inst = switch_inst + ZIR_REF_START_INDEX;
|
|
// err_scope parent points to capture_scope_val
|
|
err_scope.parent = &capture_scope_val.base;
|
|
}
|
|
}
|
|
}
|
|
|
|
// Fill item data in pay buffer (AstGen.zig:7406-7462).
|
|
ensurePayCapacity(&pay, &pay_cap, pay_len, 32);
|
|
uint32_t hdr = pay_len;
|
|
uint32_t prong_info_slot = 0;
|
|
|
|
if (is_multi_case) {
|
|
pay[multi_case_table + multi_case_index] = hdr;
|
|
multi_case_index++;
|
|
pay[pay_len++] = 0; // items_len placeholder
|
|
pay[pay_len++] = 0; // ranges_len placeholder
|
|
prong_info_slot = pay_len++;
|
|
|
|
uint32_t nitems = 0;
|
|
uint32_t nranges = 0;
|
|
for (uint32_t vi = 0; vi < values_len; vi++) {
|
|
if (tree->nodes.tags[values[vi]] == AST_NODE_SWITCH_RANGE) {
|
|
nranges++;
|
|
continue;
|
|
}
|
|
nitems++;
|
|
ensurePayCapacity(&pay, &pay_cap, pay_len, 1);
|
|
pay[pay_len++] = comptimeExpr(parent_gz, scope, RL_NONE_VAL,
|
|
values[vi], COMPTIME_REASON_SWITCH_ITEM);
|
|
}
|
|
for (uint32_t vi = 0; vi < values_len; vi++) {
|
|
if (tree->nodes.tags[values[vi]] != AST_NODE_SWITCH_RANGE)
|
|
continue;
|
|
AstData rng = tree->nodes.datas[values[vi]];
|
|
ensurePayCapacity(&pay, &pay_cap, pay_len, 2);
|
|
pay[pay_len++] = comptimeExpr(parent_gz, scope, RL_NONE_VAL,
|
|
rng.lhs, COMPTIME_REASON_SWITCH_ITEM);
|
|
pay[pay_len++] = comptimeExpr(parent_gz, scope, RL_NONE_VAL,
|
|
rng.rhs, COMPTIME_REASON_SWITCH_ITEM);
|
|
}
|
|
pay[hdr] = nitems;
|
|
pay[hdr + 1] = nranges;
|
|
} else if (ci == else_ci) {
|
|
pay[case_table_start + 1] = hdr;
|
|
prong_info_slot = pay_len++;
|
|
} else {
|
|
// Scalar case.
|
|
pay[scalar_case_table + scalar_case_index] = hdr;
|
|
scalar_case_index++;
|
|
pay[pay_len++] = comptimeExpr(parent_gz, scope, RL_NONE_VAL,
|
|
values[0], COMPTIME_REASON_SWITCH_ITEM);
|
|
prong_info_slot = pay_len++;
|
|
}
|
|
|
|
// Evaluate body (AstGen.zig:7464-7518).
|
|
{
|
|
case_scope.instructions_top = ag->scratch_inst_len;
|
|
|
|
if (do_err_trace && nodeMayAppendToErrorTrace(tree, operand_node))
|
|
addSaveErrRetIndex(&case_scope, ZIR_REF_NONE);
|
|
|
|
if (dbg_var_name != 0) {
|
|
addDbgVar(&case_scope, ZIR_INST_DBG_VAR_VAL, dbg_var_name,
|
|
dbg_var_inst);
|
|
}
|
|
|
|
uint32_t body_node = cd.rhs;
|
|
uint32_t result = fullBodyExpr(&case_scope, sub_scope,
|
|
block_scope.break_result_info, body_node);
|
|
|
|
// Track err capture usage (AstGen.zig:7489-7494).
|
|
// The upstream tracks scope usage via used/discarded fields.
|
|
// We approximate: err capture is "used" if the err_scope was
|
|
// found during identifier lookup (i.e., the err name was
|
|
// referenced in the body). Since we don't track scope usage
|
|
// in C, we check if any instruction in the body references
|
|
// the err_inst placeholder.
|
|
bool uses_err = false;
|
|
{
|
|
uint32_t rbl = gzInstructionsLen(&case_scope);
|
|
const uint32_t* rbody = gzInstructionsSlice(&case_scope);
|
|
for (uint32_t bi = 0; bi < rbl; bi++) {
|
|
uint32_t inst = rbody[bi];
|
|
// Check if any instruction data references err_inst.
|
|
ZirInstData d = ag->inst_datas[inst];
|
|
if (d.un_node.operand == err_inst + ZIR_REF_START_INDEX) {
|
|
uses_err = true;
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
if (uses_err) {
|
|
addDbgVar(&case_scope, ZIR_INST_DBG_VAR_VAL, err_name,
|
|
err_inst + ZIR_REF_START_INDEX);
|
|
any_uses_err_capture = true;
|
|
}
|
|
|
|
if (!refIsNoReturn(parent_gz, result)) {
|
|
if (do_err_trace)
|
|
restoreErrRetIndex(&case_scope, switch_inst,
|
|
block_scope.break_result_info, body_node, result);
|
|
addBreak(&case_scope, ZIR_INST_BREAK, switch_inst, result,
|
|
(int32_t)body_node - (int32_t)parent_gz->decl_node_index);
|
|
}
|
|
|
|
uint32_t raw_body_len = gzInstructionsLen(&case_scope);
|
|
const uint32_t* body = gzInstructionsSlice(&case_scope);
|
|
|
|
uint32_t extra_refs[2];
|
|
uint32_t extra_refs_len = 0;
|
|
extra_refs[extra_refs_len++] = switch_inst;
|
|
if (uses_err)
|
|
extra_refs[extra_refs_len++] = err_inst;
|
|
|
|
uint32_t body_len = countBodyLenAfterFixupsExtraRefs(
|
|
ag, body, raw_body_len, extra_refs, extra_refs_len);
|
|
|
|
uint32_t capture = has_payload ? 1u : 0u; // by_val or none
|
|
uint32_t prong_info = (body_len & 0x0FFFFFFFu)
|
|
| ((capture & 3u) << 28) | ((is_inline ? 1u : 0u) << 30);
|
|
pay[prong_info_slot] = prong_info;
|
|
|
|
ensurePayCapacity(&pay, &pay_cap, pay_len, body_len);
|
|
appendBodyWithFixupsExtraRefsPay(ag, body, raw_body_len,
|
|
extra_refs, extra_refs_len, &pay, &pay_len, &pay_cap);
|
|
|
|
gzUnstack(&case_scope);
|
|
}
|
|
}
|
|
|
|
// Now add switch_inst to parent (AstGen.zig:7522).
|
|
gzAppendInstruction(parent_gz, switch_inst);
|
|
|
|
// --- Serialize to extra (AstGen.zig:7524-7615) ---
|
|
ensureExtraCapacity(ag,
|
|
3 + (multi_cases_len > 0 ? 1u : 0u) + (any_uses_err_capture ? 1u : 0u)
|
|
+ pay_len - case_table_end);
|
|
uint32_t payload_index = ag->extra_len;
|
|
|
|
// SwitchBlockErrUnion: operand (AstGen.zig:7529).
|
|
ag->extra[ag->extra_len++] = raw_operand;
|
|
|
|
// SwitchBlockErrUnion: bits (AstGen.zig:7530-7538).
|
|
{
|
|
uint32_t bits = 0;
|
|
if (multi_cases_len > 0)
|
|
bits |= 1u; // has_multi_cases (bit 0)
|
|
if (has_else)
|
|
bits |= (1u << 1); // has_else (bit 1)
|
|
if (any_uses_err_capture)
|
|
bits |= (1u << 2); // any_uses_err_capture (bit 2)
|
|
if (payload_is_ref)
|
|
bits |= (1u << 3); // payload_is_ref (bit 3)
|
|
bits |= (scalar_cases_len & 0x0FFFFFFFu) << 4; // scalar_cases_len
|
|
ag->extra[ag->extra_len++] = bits;
|
|
}
|
|
|
|
// SwitchBlockErrUnion: main_src_node_offset (AstGen.zig:7539).
|
|
ag->extra[ag->extra_len++] = (uint32_t)((int32_t)catch_or_if_node
|
|
- (int32_t)parent_gz->decl_node_index);
|
|
|
|
// multi_cases_len (AstGen.zig:7542-7544).
|
|
if (multi_cases_len > 0)
|
|
ag->extra[ag->extra_len++] = multi_cases_len;
|
|
|
|
// err_inst (AstGen.zig:7546-7548).
|
|
if (any_uses_err_capture)
|
|
ag->extra[ag->extra_len++] = err_inst;
|
|
|
|
ag->inst_datas[switch_inst].pl_node.payload_index = payload_index;
|
|
|
|
// Serialize case data from pay table (AstGen.zig:7552-7613).
|
|
for (uint32_t i = 0; i < case_table_end; i++) {
|
|
uint32_t si = pay[i];
|
|
uint32_t body_len_idx = si;
|
|
uint32_t end = si;
|
|
|
|
if (i < scalar_case_table) {
|
|
// Non-error or else prong: [prong_info, body...]
|
|
end += 1;
|
|
} else if (i < multi_case_table) {
|
|
// Scalar: [item, prong_info, body...]
|
|
body_len_idx = si + 1;
|
|
end += 2;
|
|
} else {
|
|
// Multi: [items_len, ranges_len, prong_info, items..., ranges...]
|
|
body_len_idx = si + 2;
|
|
uint32_t ni = pay[si];
|
|
uint32_t nr = pay[si + 1];
|
|
end += 3 + ni + nr * 2;
|
|
}
|
|
uint32_t prong_info = pay[body_len_idx];
|
|
uint32_t bl = prong_info & 0x0FFFFFFFu;
|
|
end += bl;
|
|
ensureExtraCapacity(ag, end - si);
|
|
for (uint32_t j = si; j < end; j++)
|
|
ag->extra[ag->extra_len++] = pay[j];
|
|
}
|
|
|
|
free(pay);
|
|
|
|
if (need_result_rvalue)
|
|
return rvalue(
|
|
parent_gz, rl, switch_inst + ZIR_REF_START_INDEX, switch_node);
|
|
return switch_inst + ZIR_REF_START_INDEX;
|
|
}
|
|
|
|
static uint32_t switchExpr(
|
|
GenZir* parent_gz, Scope* scope, ResultLoc rl, uint32_t node) {
|
|
AstGenCtx* ag = parent_gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
bool need_rl = nodesNeedRlContains(ag, node);
|
|
ResultLoc break_rl = breakResultInfo(parent_gz, rl, node, need_rl);
|
|
AstData nd = tree->nodes.datas[node];
|
|
|
|
// Detect label (AstGen.zig:7652-7654 / Ast.zig switchFull).
|
|
uint32_t main_token = tree->nodes.main_tokens[node];
|
|
uint32_t label_token = UINT32_MAX; // none
|
|
if (tree->tokens.tags[main_token] == TOKEN_IDENTIFIER) {
|
|
label_token = main_token;
|
|
// switch_token = main_token + 2 (skip label + colon)
|
|
}
|
|
|
|
// AST_NODE_SWITCH: lhs = condition node, rhs = extra index for SubRange.
|
|
// SubRange[rhs] = { cases_start, cases_end }.
|
|
// Case nodes are at extra_data[cases_start..cases_end].
|
|
uint32_t cond_node = nd.lhs;
|
|
uint32_t extra_idx = nd.rhs;
|
|
uint32_t cases_start = tree->extra_data.arr[extra_idx];
|
|
uint32_t cases_end = tree->extra_data.arr[extra_idx + 1];
|
|
const uint32_t* case_nodes_arr = tree->extra_data.arr + cases_start;
|
|
uint32_t case_count = cases_end - cases_start;
|
|
|
|
// --- First pass: categorize cases (AstGen.zig:7659-7762) ---
|
|
bool any_payload_is_ref = false;
|
|
bool any_has_tag_capture = false;
|
|
bool any_non_inline_capture = false;
|
|
uint32_t scalar_cases_len = 0;
|
|
uint32_t multi_cases_len = 0;
|
|
bool has_else = false;
|
|
// Underscore prong tracking (AstGen.zig:7667-7670).
|
|
uint32_t underscore_case_idx = UINT32_MAX; // index into case_nodes_arr
|
|
uint32_t underscore_node = UINT32_MAX; // the `_` value node
|
|
// underscore_additional_items: 0=none, 2=under, 4=under_one_item,
|
|
// 6=under_many_items (matching SpecialProngs bit patterns).
|
|
uint32_t underscore_additional_items = 2; // .under
|
|
|
|
for (uint32_t ci = 0; ci < case_count; ci++) {
|
|
uint32_t cn = case_nodes_arr[ci];
|
|
AstNodeTag ct = tree->nodes.tags[cn];
|
|
AstData cd = tree->nodes.datas[cn];
|
|
bool is_inline = (ct == AST_NODE_SWITCH_CASE_INLINE_ONE
|
|
|| ct == AST_NODE_SWITCH_CASE_INLINE);
|
|
|
|
// Check payload token for ref/tag capture (AstGen.zig:7673-7689).
|
|
uint32_t arrow_token = tree->nodes.main_tokens[cn]; // =>
|
|
if (tree->tokens.tags[arrow_token + 1] == TOKEN_PIPE) {
|
|
uint32_t payload_token = arrow_token + 2;
|
|
uint32_t ident = payload_token;
|
|
if (tree->tokens.tags[payload_token] == TOKEN_ASTERISK) {
|
|
any_payload_is_ref = true;
|
|
ident = payload_token + 1;
|
|
}
|
|
if (tree->tokens.tags[ident + 1] == TOKEN_COMMA) {
|
|
any_has_tag_capture = true;
|
|
}
|
|
if (!tokenIsUnderscore(tree, ident)) {
|
|
any_non_inline_capture = true;
|
|
}
|
|
}
|
|
|
|
// Get values for this case.
|
|
uint32_t single_buf;
|
|
const uint32_t* values;
|
|
uint32_t values_len = switchCaseValues(tree, cn, &single_buf, &values);
|
|
|
|
// Check for else prong (values_len == 0) (AstGen.zig:7693-7711).
|
|
if (values_len == 0) {
|
|
has_else = true;
|
|
continue;
|
|
}
|
|
|
|
// Check for '_' prong (AstGen.zig:7714-7752).
|
|
bool case_has_underscore = false;
|
|
for (uint32_t vi = 0; vi < values_len; vi++) {
|
|
uint32_t val = values[vi];
|
|
if (tree->nodes.tags[val] == AST_NODE_IDENTIFIER
|
|
&& isUnderscoreIdent(tree, val)) {
|
|
underscore_case_idx = ci;
|
|
underscore_node = val;
|
|
switch (values_len) {
|
|
case 1:
|
|
underscore_additional_items = 2; // .under
|
|
break;
|
|
case 2:
|
|
underscore_additional_items = 4; // .under_one_item
|
|
break;
|
|
default:
|
|
underscore_additional_items = 6; // .under_many_items
|
|
break;
|
|
}
|
|
case_has_underscore = true;
|
|
}
|
|
}
|
|
if (case_has_underscore)
|
|
continue;
|
|
|
|
// Categorize as scalar or multi (AstGen.zig:7754-7758).
|
|
if (values_len == 1
|
|
&& tree->nodes.tags[values[0]] != AST_NODE_SWITCH_RANGE) {
|
|
scalar_cases_len++;
|
|
} else {
|
|
multi_cases_len++;
|
|
}
|
|
(void)is_inline; // inline_cases_len tracking skipped (issue 13)
|
|
(void)cd;
|
|
}
|
|
|
|
// Compute special_prongs (AstGen.zig:7764-7770).
|
|
// SpecialProngs is a 3-bit field:
|
|
// bit 0: has_else
|
|
// bits 1-2: underscore variant (0=none, 1=under, 2=under_one_item,
|
|
// 3=under_many_items)
|
|
bool has_under = (underscore_case_idx != UINT32_MAX);
|
|
uint32_t special_prongs; // 3-bit SpecialProngs enum value
|
|
{
|
|
uint32_t else_bit = has_else ? 1u : 0u;
|
|
uint32_t under_bits = has_under ? underscore_additional_items : 0u;
|
|
special_prongs = else_bit | under_bits;
|
|
}
|
|
|
|
// Operand result info (AstGen.zig:7772).
|
|
ResultLoc operand_ri = any_payload_is_ref ? RL_REF_VAL : RL_NONE_VAL;
|
|
|
|
// Save operand source location before evaluating (AstGen.zig:7774-7775).
|
|
advanceSourceCursorToNode(ag, cond_node);
|
|
uint32_t operand_lc_line = ag->source_line - parent_gz->decl_line;
|
|
uint32_t operand_lc_col = ag->source_column;
|
|
|
|
// Evaluate switch operand (AstGen.zig:7777).
|
|
uint32_t raw_operand = exprRl(parent_gz, scope, operand_ri, cond_node);
|
|
|
|
// Compute typeof for labeled switch continue support
|
|
// (AstGen.zig:7782-7784).
|
|
uint32_t raw_operand_ty_ref = 0;
|
|
if (label_token != UINT32_MAX) {
|
|
raw_operand_ty_ref
|
|
= addUnNode(parent_gz, ZIR_INST_TYPEOF, raw_operand, cond_node);
|
|
}
|
|
|
|
// Sema expects a dbg_stmt immediately before switch_block(_ref)
|
|
// (AstGen.zig:7806).
|
|
emitDbgStmtForceCurrentIndex(parent_gz, operand_lc_line, operand_lc_col);
|
|
|
|
// Create switch_block instruction (AstGen.zig:7808-7809).
|
|
ZirInstTag switch_tag = any_payload_is_ref ? ZIR_INST_SWITCH_BLOCK_REF
|
|
: ZIR_INST_SWITCH_BLOCK;
|
|
uint32_t switch_inst = makeBlockInst(ag, switch_tag, parent_gz, node);
|
|
|
|
// Set up block_scope (AstGen.zig:7800-7826).
|
|
GenZir block_scope = makeSubBlock(parent_gz, scope);
|
|
block_scope.instructions_top = UINT32_MAX; // unstacked
|
|
block_scope.break_result_info = break_rl;
|
|
|
|
// Label support (AstGen.zig:7811-7826).
|
|
if (label_token != UINT32_MAX) {
|
|
block_scope.continue_block = switch_inst;
|
|
// continue_result_info: for ref, use ref_coerced_ty; else coerced_ty
|
|
// (AstGen.zig:7813-7818).
|
|
if (any_payload_is_ref) {
|
|
block_scope.break_result_info
|
|
= (ResultLoc) { .tag = RL_REF_COERCED_TY,
|
|
.data = raw_operand_ty_ref,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
}
|
|
// Note: we store continue_result_info as break_result_info on
|
|
// block_scope for now; the label's block_inst is switch_inst.
|
|
block_scope.label_token = label_token;
|
|
block_scope.label_block_inst = switch_inst;
|
|
}
|
|
|
|
// Allocate shared value_placeholder for tag captures
|
|
// (AstGen.zig:7833-7844).
|
|
uint32_t tag_inst = 0;
|
|
if (any_has_tag_capture) {
|
|
tag_inst = ag->inst_len;
|
|
ensureInstCapacity(ag, 1);
|
|
ag->inst_tags[tag_inst] = ZIR_INST_EXTENDED;
|
|
ZirInstData tdata;
|
|
memset(&tdata, 0xaa, sizeof(tdata));
|
|
tdata.extended.opcode = (uint16_t)ZIR_EXT_VALUE_PLACEHOLDER;
|
|
ag->inst_datas[tag_inst] = tdata;
|
|
ag->inst_len++;
|
|
}
|
|
|
|
// Case scope — re-used for all cases (AstGen.zig:7829-7830).
|
|
GenZir case_scope = makeSubBlock(parent_gz, &block_scope.base);
|
|
case_scope.instructions_top = UINT32_MAX; // unstacked initially
|
|
|
|
// --- Payload buffer (AstGen.zig:7789-7798) ---
|
|
// Table layout: [else?] [under?] [scalar_0..N] [multi_0..N]
|
|
uint32_t else_tbl = 0;
|
|
uint32_t under_tbl = (has_else ? 1u : 0u);
|
|
uint32_t scalar_tbl = under_tbl + (has_under ? 1u : 0u);
|
|
uint32_t multi_tbl = scalar_tbl + scalar_cases_len;
|
|
uint32_t table_size = multi_tbl + multi_cases_len;
|
|
|
|
uint32_t pay_cap = table_size + case_count * 16;
|
|
if (pay_cap < 64)
|
|
pay_cap = 64;
|
|
uint32_t* pay = malloc(pay_cap * sizeof(uint32_t));
|
|
uint32_t pay_len = table_size;
|
|
|
|
uint32_t scalar_ci = 0;
|
|
uint32_t multi_ci = 0;
|
|
|
|
// --- Second pass: emit items and bodies (AstGen.zig:7849-8027) ---
|
|
for (uint32_t ci = 0; ci < case_count; ci++) {
|
|
uint32_t cn = case_nodes_arr[ci];
|
|
AstNodeTag ct = tree->nodes.tags[cn];
|
|
AstData cd = tree->nodes.datas[cn];
|
|
bool is_inline = (ct == AST_NODE_SWITCH_CASE_INLINE_ONE
|
|
|| ct == AST_NODE_SWITCH_CASE_INLINE);
|
|
|
|
// Get values for this case.
|
|
uint32_t single_buf;
|
|
const uint32_t* values;
|
|
uint32_t values_len = switchCaseValues(tree, cn, &single_buf, &values);
|
|
|
|
// Determine if this is the else, underscore, scalar, or multi case.
|
|
bool is_else_case = (values_len == 0);
|
|
bool is_underscore_case = (ci == underscore_case_idx);
|
|
bool is_multi_case = false;
|
|
if (!is_else_case && !is_underscore_case) {
|
|
is_multi_case = (values_len > 1
|
|
|| (values_len == 1
|
|
&& tree->nodes.tags[values[0]] == AST_NODE_SWITCH_RANGE));
|
|
}
|
|
|
|
// Parse payload token (AstGen.zig:7855-7921).
|
|
uint32_t dbg_var_name = 0; // NullTerminatedString, 0 = empty
|
|
uint32_t dbg_var_inst = 0;
|
|
uint32_t dbg_var_tag_name = 0;
|
|
uint32_t dbg_var_tag_inst = 0;
|
|
bool has_tag_capture = false;
|
|
ScopeLocalVal capture_val_scope;
|
|
ScopeLocalVal tag_scope_val;
|
|
memset(&capture_val_scope, 0, sizeof(capture_val_scope));
|
|
memset(&tag_scope_val, 0, sizeof(tag_scope_val));
|
|
|
|
uint32_t capture = 0; // 0=none, 1=by_val, 2=by_ref
|
|
|
|
uint32_t arrow_token = tree->nodes.main_tokens[cn];
|
|
bool has_payload = (tree->tokens.tags[arrow_token + 1] == TOKEN_PIPE);
|
|
Scope* sub_scope = &case_scope.base;
|
|
|
|
if (has_payload) {
|
|
uint32_t payload_token = arrow_token + 2;
|
|
bool capture_is_ref
|
|
= (tree->tokens.tags[payload_token] == TOKEN_ASTERISK);
|
|
uint32_t ident = payload_token + (capture_is_ref ? 1u : 0u);
|
|
|
|
capture = capture_is_ref ? 2u : 1u; // by_ref : by_val
|
|
|
|
if (tokenIsUnderscore(tree, ident)) {
|
|
// Discard capture (AstGen.zig:7874-7878).
|
|
if (capture_is_ref) {
|
|
SET_ERROR(ag);
|
|
free(pay);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
capture = 0; // none
|
|
// sub_scope stays as &case_scope.base
|
|
} else {
|
|
// Named capture (AstGen.zig:7880-7892).
|
|
uint32_t capture_name = identAsString(ag, ident);
|
|
capture_val_scope = (ScopeLocalVal) {
|
|
.base = { .tag = SCOPE_LOCAL_VAL },
|
|
.parent = &case_scope.base,
|
|
.gen_zir = &case_scope,
|
|
.inst = switch_inst + ZIR_REF_START_INDEX,
|
|
.token_src = ident,
|
|
.name = capture_name,
|
|
};
|
|
dbg_var_name = capture_name;
|
|
dbg_var_inst = switch_inst + ZIR_REF_START_INDEX;
|
|
sub_scope = &capture_val_scope.base;
|
|
}
|
|
|
|
// Check for tag capture: ident followed by comma
|
|
// (AstGen.zig:7895-7921).
|
|
if (tree->tokens.tags[ident + 1] == TOKEN_COMMA) {
|
|
uint32_t tag_token = ident + 2;
|
|
uint32_t tag_name = identAsString(ag, tag_token);
|
|
|
|
has_tag_capture = true;
|
|
|
|
tag_scope_val = (ScopeLocalVal) {
|
|
.base = { .tag = SCOPE_LOCAL_VAL },
|
|
.parent = sub_scope,
|
|
.gen_zir = &case_scope,
|
|
.inst = tag_inst + ZIR_REF_START_INDEX,
|
|
.token_src = tag_token,
|
|
.name = tag_name,
|
|
};
|
|
dbg_var_tag_name = tag_name;
|
|
dbg_var_tag_inst = tag_inst + ZIR_REF_START_INDEX;
|
|
sub_scope = &tag_scope_val.base;
|
|
}
|
|
}
|
|
|
|
ensurePayCapacity(&pay, &pay_cap, pay_len, 32);
|
|
uint32_t hdr = pay_len;
|
|
uint32_t prong_info_slot = 0;
|
|
|
|
// Determine case kind and fill item data (AstGen.zig:7924-7995).
|
|
if (is_underscore_case && is_multi_case) {
|
|
// Underscore case with additional items as multi
|
|
// (AstGen.zig:7926-7942).
|
|
pay[under_tbl] = hdr;
|
|
if (underscore_additional_items == 4) {
|
|
// One additional item (AstGen.zig:7928-7937).
|
|
// [item_ref, prong_info]
|
|
uint32_t item_node;
|
|
if (values[0] == underscore_node)
|
|
item_node = values[1];
|
|
else
|
|
item_node = values[0];
|
|
pay[pay_len++] = comptimeExpr(parent_gz, scope, RL_NONE_VAL,
|
|
item_node, COMPTIME_REASON_SWITCH_ITEM);
|
|
prong_info_slot = pay_len++;
|
|
} else {
|
|
// Many additional items: multi format
|
|
// (AstGen.zig:7943-7977).
|
|
uint32_t nitems = 0;
|
|
uint32_t nranges = 0;
|
|
for (uint32_t vi = 0; vi < values_len; vi++) {
|
|
if (values[vi] == underscore_node)
|
|
continue;
|
|
if (tree->nodes.tags[values[vi]] == AST_NODE_SWITCH_RANGE)
|
|
nranges++;
|
|
else
|
|
nitems++;
|
|
}
|
|
pay[pay_len++] = nitems;
|
|
pay[pay_len++] = nranges;
|
|
prong_info_slot = pay_len++;
|
|
// Non-range items.
|
|
for (uint32_t vi = 0; vi < values_len; vi++) {
|
|
if (values[vi] == underscore_node)
|
|
continue;
|
|
if (tree->nodes.tags[values[vi]]
|
|
!= AST_NODE_SWITCH_RANGE) {
|
|
ensurePayCapacity(&pay, &pay_cap, pay_len, 1);
|
|
pay[pay_len++]
|
|
= comptimeExpr(parent_gz, scope, RL_NONE_VAL,
|
|
values[vi], COMPTIME_REASON_SWITCH_ITEM);
|
|
}
|
|
}
|
|
// Range pairs.
|
|
for (uint32_t vi = 0; vi < values_len; vi++) {
|
|
if (values[vi] == underscore_node)
|
|
continue;
|
|
if (tree->nodes.tags[values[vi]]
|
|
== AST_NODE_SWITCH_RANGE) {
|
|
AstData rng = tree->nodes.datas[values[vi]];
|
|
ensurePayCapacity(&pay, &pay_cap, pay_len, 2);
|
|
pay[pay_len++] = comptimeExpr(parent_gz, scope,
|
|
RL_NONE_VAL, rng.lhs, COMPTIME_REASON_SWITCH_ITEM);
|
|
pay[pay_len++] = comptimeExpr(parent_gz, scope,
|
|
RL_NONE_VAL, rng.rhs, COMPTIME_REASON_SWITCH_ITEM);
|
|
}
|
|
}
|
|
}
|
|
} else if (is_else_case) {
|
|
// Else prong (AstGen.zig:7978-7981).
|
|
pay[else_tbl] = hdr;
|
|
prong_info_slot = pay_len++;
|
|
} else if (is_underscore_case) {
|
|
// Underscore-only prong, no additional items
|
|
// (AstGen.zig:7982-7986).
|
|
pay[under_tbl] = hdr;
|
|
prong_info_slot = pay_len++;
|
|
} else if (!is_multi_case) {
|
|
// Scalar case (AstGen.zig:7987-7994).
|
|
pay[scalar_tbl + scalar_ci++] = hdr;
|
|
pay[pay_len++] = comptimeExpr(parent_gz, scope, RL_NONE_VAL,
|
|
values[0], COMPTIME_REASON_SWITCH_ITEM);
|
|
prong_info_slot = pay_len++;
|
|
} else {
|
|
// Multi case (AstGen.zig:7939-7977 non-underscore path).
|
|
pay[multi_tbl + multi_ci++] = hdr;
|
|
uint32_t nitems = 0;
|
|
uint32_t nranges = 0;
|
|
for (uint32_t vi = 0; vi < values_len; vi++) {
|
|
if (tree->nodes.tags[values[vi]] == AST_NODE_SWITCH_RANGE)
|
|
nranges++;
|
|
else
|
|
nitems++;
|
|
}
|
|
pay[pay_len++] = nitems;
|
|
pay[pay_len++] = nranges;
|
|
prong_info_slot = pay_len++;
|
|
// Non-range items.
|
|
for (uint32_t vi = 0; vi < values_len; vi++) {
|
|
if (tree->nodes.tags[values[vi]] != AST_NODE_SWITCH_RANGE) {
|
|
ensurePayCapacity(&pay, &pay_cap, pay_len, 1);
|
|
pay[pay_len++] = comptimeExpr(parent_gz, scope,
|
|
RL_NONE_VAL, values[vi], COMPTIME_REASON_SWITCH_ITEM);
|
|
}
|
|
}
|
|
// Range pairs.
|
|
for (uint32_t vi = 0; vi < values_len; vi++) {
|
|
if (tree->nodes.tags[values[vi]] == AST_NODE_SWITCH_RANGE) {
|
|
AstData rng = tree->nodes.datas[values[vi]];
|
|
ensurePayCapacity(&pay, &pay_cap, pay_len, 2);
|
|
pay[pay_len++] = comptimeExpr(parent_gz, scope,
|
|
RL_NONE_VAL, rng.lhs, COMPTIME_REASON_SWITCH_ITEM);
|
|
pay[pay_len++] = comptimeExpr(parent_gz, scope,
|
|
RL_NONE_VAL, rng.rhs, COMPTIME_REASON_SWITCH_ITEM);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Evaluate body (AstGen.zig:7997-8026).
|
|
{
|
|
// Temporarily stack case_scope on parent_gz
|
|
// (AstGen.zig:7998-8000).
|
|
case_scope.instructions_top = parent_gz->astgen->scratch_inst_len;
|
|
|
|
if (dbg_var_name != 0) {
|
|
addDbgVar(&case_scope, ZIR_INST_DBG_VAR_VAL, dbg_var_name,
|
|
dbg_var_inst);
|
|
}
|
|
if (dbg_var_tag_name != 0) {
|
|
addDbgVar(&case_scope, ZIR_INST_DBG_VAR_VAL, dbg_var_tag_name,
|
|
dbg_var_tag_inst);
|
|
}
|
|
|
|
uint32_t body_node = cd.rhs;
|
|
uint32_t result = fullBodyExpr(&case_scope, sub_scope,
|
|
block_scope.break_result_info, body_node);
|
|
if (!refIsNoReturn(parent_gz, result)) {
|
|
addBreak(&case_scope, ZIR_INST_BREAK, switch_inst, result,
|
|
(int32_t)body_node - (int32_t)parent_gz->decl_node_index);
|
|
}
|
|
|
|
uint32_t raw_body_len = gzInstructionsLen(&case_scope);
|
|
const uint32_t* body = gzInstructionsSlice(&case_scope);
|
|
|
|
// Body fixups with extra refs (AstGen.zig:8016-8025).
|
|
uint32_t extra_refs[2];
|
|
uint32_t extra_refs_len = 0;
|
|
extra_refs[extra_refs_len++] = switch_inst;
|
|
if (has_tag_capture) {
|
|
extra_refs[extra_refs_len++] = tag_inst;
|
|
}
|
|
|
|
uint32_t body_len = countBodyLenAfterFixupsExtraRefs(
|
|
ag, body, raw_body_len, extra_refs, extra_refs_len);
|
|
|
|
// Encode ProngInfo (AstGen.zig:8019-8024).
|
|
uint32_t prong_info = (body_len & 0x0FFFFFFFu)
|
|
| ((capture & 3u) << 28) | ((is_inline ? 1u : 0u) << 30)
|
|
| ((has_tag_capture ? 1u : 0u) << 31);
|
|
pay[prong_info_slot] = prong_info;
|
|
|
|
ensurePayCapacity(&pay, &pay_cap, pay_len, body_len);
|
|
appendBodyWithFixupsExtraRefsPay(ag, body, raw_body_len,
|
|
extra_refs, extra_refs_len, &pay, &pay_len, &pay_cap);
|
|
|
|
gzUnstack(&case_scope);
|
|
}
|
|
}
|
|
|
|
// Now add switch_block to parent (AstGen.zig:8034).
|
|
gzAppendInstruction(parent_gz, switch_inst);
|
|
|
|
// --- Serialize to extra in payload order (AstGen.zig:8036-8110) ---
|
|
ensureExtraCapacity(ag,
|
|
2 + (uint32_t)(multi_cases_len > 0 ? 1 : 0)
|
|
+ (any_has_tag_capture ? 1u : 0u) + pay_len - table_size);
|
|
uint32_t payload_index = ag->extra_len;
|
|
|
|
// SwitchBlock.operand (AstGen.zig:8042).
|
|
ag->extra[ag->extra_len++] = raw_operand;
|
|
|
|
// SwitchBlock.bits (AstGen.zig:8043-8050).
|
|
{
|
|
uint32_t bits = 0;
|
|
if (multi_cases_len > 0)
|
|
bits |= 1u; // has_multi_cases (bit 0)
|
|
bits |= (special_prongs & 7u) << 1; // special_prongs (bits 1-3)
|
|
if (any_has_tag_capture)
|
|
bits |= (1u << 4); // any_has_tag_capture (bit 4)
|
|
if (any_non_inline_capture)
|
|
bits |= (1u << 5); // any_non_inline_capture (bit 5)
|
|
// has_continue (bit 6): set if label used for continue.
|
|
// We don't track used_for_continue precisely, so set it if label
|
|
// exists (conservative). Skipping precise tracking per issue 9.
|
|
// Actually: only set if label_token != UINT32_MAX.
|
|
// The upstream checks block_scope.label.used_for_continue; we
|
|
// don't track that, so conservatively set it when there's a label.
|
|
// This is safe: sema handles the flag as an optimization hint.
|
|
if (label_token != UINT32_MAX)
|
|
bits |= (1u << 6); // has_continue
|
|
bits |= (scalar_cases_len & 0x1FFFFFFu) << 7; // scalar_cases_len
|
|
ag->extra[ag->extra_len++] = bits;
|
|
}
|
|
|
|
// multi_cases_len (AstGen.zig:8053-8055).
|
|
if (multi_cases_len > 0)
|
|
ag->extra[ag->extra_len++] = multi_cases_len;
|
|
|
|
// tag_inst (AstGen.zig:8057-8059).
|
|
if (any_has_tag_capture)
|
|
ag->extra[ag->extra_len++] = tag_inst;
|
|
|
|
ag->inst_datas[switch_inst].pl_node.payload_index = payload_index;
|
|
|
|
// Else prong (AstGen.zig:8064-8070).
|
|
if (has_else) {
|
|
uint32_t si = pay[else_tbl];
|
|
uint32_t prong_info = pay[si];
|
|
uint32_t bl = prong_info & 0x0FFFFFFFu;
|
|
uint32_t end = si + 1 + bl;
|
|
for (uint32_t i = si; i < end; i++)
|
|
ag->extra[ag->extra_len++] = pay[i];
|
|
}
|
|
|
|
// Underscore prong (AstGen.zig:8071-8093).
|
|
if (has_under) {
|
|
uint32_t si = pay[under_tbl];
|
|
uint32_t body_len_idx = si;
|
|
uint32_t end = si;
|
|
switch (underscore_additional_items) {
|
|
case 2: // none
|
|
end += 1; // just prong_info
|
|
break;
|
|
case 4: // one additional item
|
|
body_len_idx = si + 1;
|
|
end += 2; // item + prong_info
|
|
break;
|
|
case 6: // many additional items
|
|
body_len_idx = si + 2;
|
|
end += 3 + pay[si] + 2 * pay[si + 1]; // hdr + items + ranges
|
|
break;
|
|
default:
|
|
break;
|
|
}
|
|
uint32_t prong_info = pay[body_len_idx];
|
|
uint32_t bl = prong_info & 0x0FFFFFFFu;
|
|
end += bl;
|
|
for (uint32_t i = si; i < end; i++)
|
|
ag->extra[ag->extra_len++] = pay[i];
|
|
}
|
|
|
|
// Scalar and multi cases (AstGen.zig:8094-8110).
|
|
for (uint32_t i = 0; i < scalar_cases_len + multi_cases_len; i++) {
|
|
uint32_t tbl_idx = scalar_tbl + i;
|
|
uint32_t si = pay[tbl_idx];
|
|
uint32_t body_len_idx;
|
|
uint32_t end = si;
|
|
if (tbl_idx < multi_tbl) {
|
|
// Scalar: [item, prong_info, body...]
|
|
body_len_idx = si + 1;
|
|
end += 2;
|
|
} else {
|
|
// Multi: [items_len, ranges_len, prong_info, items..., ranges...]
|
|
body_len_idx = si + 2;
|
|
uint32_t ni = pay[si];
|
|
uint32_t nr = pay[si + 1];
|
|
end += 3 + ni + nr * 2;
|
|
}
|
|
uint32_t prong_info = pay[body_len_idx];
|
|
uint32_t bl = prong_info & 0x0FFFFFFFu;
|
|
end += bl;
|
|
for (uint32_t j = si; j < end; j++)
|
|
ag->extra[ag->extra_len++] = pay[j];
|
|
}
|
|
|
|
free(pay);
|
|
|
|
// AstGen.zig:8112-8115.
|
|
bool need_result_rvalue = (break_rl.tag != rl.tag);
|
|
if (need_result_rvalue)
|
|
return rvalue(parent_gz, rl, switch_inst + ZIR_REF_START_INDEX, node);
|
|
return switch_inst + ZIR_REF_START_INDEX;
|
|
}
|
|
|
|
// --- rvalue (AstGen.zig:11029) ---
|
|
// Simplified: handles .none and .discard result locations.
|
|
|
|
static uint32_t rvalueDiscard(GenZir* gz, uint32_t result, uint32_t src_node) {
|
|
// .discard => emit ensure_result_non_error, return .void_value
|
|
// (AstGen.zig:11071-11074)
|
|
ZirInstData data;
|
|
data.un_node.src_node = (int32_t)src_node - (int32_t)gz->decl_node_index;
|
|
data.un_node.operand = result;
|
|
addInstruction(gz, ZIR_INST_ENSURE_RESULT_NON_ERROR, data);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
// --- emitDbgNode / emitDbgStmt (AstGen.zig:3422, 13713) ---
|
|
|
|
static void emitDbgStmt(GenZir* gz, uint32_t line, uint32_t column) {
|
|
if (gz->is_comptime)
|
|
return;
|
|
// Check if last instruction is already dbg_stmt; if so, update it.
|
|
// (AstGen.zig:13715-13724)
|
|
AstGenCtx* ag = gz->astgen;
|
|
uint32_t gz_len = gzInstructionsLen(gz);
|
|
if (gz_len > 0) {
|
|
uint32_t last = gzInstructionsSlice(gz)[gz_len - 1];
|
|
if (ag->inst_tags[last] == ZIR_INST_DBG_STMT) {
|
|
ag->inst_datas[last].dbg_stmt.line = line;
|
|
ag->inst_datas[last].dbg_stmt.column = column;
|
|
return;
|
|
}
|
|
}
|
|
ZirInstData data;
|
|
data.dbg_stmt.line = line;
|
|
data.dbg_stmt.column = column;
|
|
addInstruction(gz, ZIR_INST_DBG_STMT, data);
|
|
}
|
|
|
|
// Mirrors emitDbgStmtForceCurrentIndex (AstGen.zig:13739-13760).
|
|
static void emitDbgStmtForceCurrentIndex(
|
|
GenZir* gz, uint32_t line, uint32_t column) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
uint32_t gz_len = gzInstructionsLen(gz);
|
|
if (gz_len > 0
|
|
&& gzInstructionsSlice(gz)[gz_len - 1] == ag->inst_len - 1) {
|
|
uint32_t last = ag->inst_len - 1;
|
|
if (ag->inst_tags[last] == ZIR_INST_DBG_STMT) {
|
|
ag->inst_datas[last].dbg_stmt.line = line;
|
|
ag->inst_datas[last].dbg_stmt.column = column;
|
|
return;
|
|
}
|
|
}
|
|
ZirInstData data;
|
|
data.dbg_stmt.line = line;
|
|
data.dbg_stmt.column = column;
|
|
addInstruction(gz, ZIR_INST_DBG_STMT, data);
|
|
}
|
|
|
|
static void emitDbgNode(GenZir* gz, uint32_t node) {
|
|
if (gz->is_comptime)
|
|
return;
|
|
AstGenCtx* ag = gz->astgen;
|
|
advanceSourceCursorToNode(ag, node);
|
|
uint32_t line = ag->source_line - gz->decl_line;
|
|
uint32_t column = ag->source_column;
|
|
emitDbgStmt(gz, line, column);
|
|
}
|
|
|
|
// --- assign (AstGen.zig:3434) ---
|
|
// Handles `_ = expr` discard pattern.
|
|
|
|
static void assignStmt(GenZir* gz, Scope* scope, uint32_t infix_node) {
|
|
emitDbgNode(gz, infix_node);
|
|
const AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
|
|
AstData nd = tree->nodes.datas[infix_node];
|
|
uint32_t lhs = nd.lhs;
|
|
uint32_t rhs = nd.rhs;
|
|
|
|
// Check if LHS is `_` identifier for discard (AstGen.zig:3440-3446).
|
|
if (tree->nodes.tags[lhs] == AST_NODE_IDENTIFIER) {
|
|
uint32_t ident_tok = tree->nodes.main_tokens[lhs];
|
|
uint32_t tok_start = tree->tokens.starts[ident_tok];
|
|
if (tree->source[tok_start] == '_'
|
|
&& (tok_start + 1 >= tree->source_len
|
|
|| !((tree->source[tok_start + 1] >= 'a'
|
|
&& tree->source[tok_start + 1] <= 'z')
|
|
|| (tree->source[tok_start + 1] >= 'A'
|
|
&& tree->source[tok_start + 1] <= 'Z')
|
|
|| tree->source[tok_start + 1] == '_'
|
|
|| (tree->source[tok_start + 1] >= '0'
|
|
&& tree->source[tok_start + 1] <= '9')))) {
|
|
// Discard: evaluate RHS with .discard result location.
|
|
uint32_t result = expr(gz, scope, rhs);
|
|
rvalueDiscard(gz, result, rhs);
|
|
return;
|
|
}
|
|
}
|
|
|
|
// Non-discard assignment: evaluate LHS as lvalue, pass ptr rl to RHS.
|
|
// (AstGen.zig:3448-3452).
|
|
{
|
|
uint32_t lhs_ptr = exprRl(gz, scope, RL_REF_VAL, lhs);
|
|
ResultLoc ptr_rl
|
|
= { .tag = RL_PTR, .data = lhs_ptr, .src_node = infix_node };
|
|
(void)exprRl(gz, scope, ptr_rl, rhs);
|
|
}
|
|
}
|
|
|
|
// --- assignDestructure (AstGen.zig:3456-3504) ---
|
|
// Handles destructure assignments where LHS is only lvalue expressions (no
|
|
// new var/const declarations). Called from exprRl and blockExprStmts.
|
|
|
|
static void assignDestructure(GenZir* gz, Scope* scope, uint32_t node) {
|
|
emitDbgNode(gz, node);
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
|
|
// Parse assign_destructure node: lhs=extra_index, rhs=value_expr.
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t extra_start = nd.lhs;
|
|
uint32_t value_expr = nd.rhs;
|
|
uint32_t variable_count = tree->extra_data.arr[extra_start];
|
|
const uint32_t* variables = tree->extra_data.arr + extra_start + 1;
|
|
|
|
// Detect comptime token (AstGen.zig:3462-3464).
|
|
// Check if the first variable's first token (or the token before it)
|
|
// is keyword_comptime.
|
|
bool has_comptime = false;
|
|
if (variable_count > 0) {
|
|
uint32_t first_var = variables[0];
|
|
AstNodeTag first_tag = tree->nodes.tags[first_var];
|
|
uint32_t first_tok;
|
|
if (first_tag == AST_NODE_GLOBAL_VAR_DECL
|
|
|| first_tag == AST_NODE_LOCAL_VAR_DECL
|
|
|| first_tag == AST_NODE_ALIGNED_VAR_DECL
|
|
|| first_tag == AST_NODE_SIMPLE_VAR_DECL) {
|
|
first_tok = firstToken(tree, first_var);
|
|
} else {
|
|
first_tok = firstToken(tree, first_var) - 1;
|
|
}
|
|
if (first_tok < tree->tokens.len
|
|
&& tree->tokens.tags[first_tok] == TOKEN_KEYWORD_COMPTIME) {
|
|
has_comptime = true;
|
|
}
|
|
}
|
|
|
|
if (has_comptime && gz->is_comptime) {
|
|
// Redundant comptime in already comptime scope (AstGen.zig:3466-3468).
|
|
SET_ERROR(ag);
|
|
return;
|
|
}
|
|
|
|
// If comptime, wrap in sub-block (AstGen.zig:3471-3477).
|
|
GenZir gz_buf;
|
|
GenZir* inner_gz = gz;
|
|
if (has_comptime) {
|
|
gz_buf = makeSubBlock(gz, scope);
|
|
gz_buf.is_comptime = true;
|
|
inner_gz = &gz_buf;
|
|
}
|
|
|
|
// Build rl_components (AstGen.zig:3479-3492).
|
|
DestructureComponent* rl_components
|
|
= malloc(variable_count * sizeof(DestructureComponent));
|
|
if (!rl_components)
|
|
exit(1);
|
|
for (uint32_t i = 0; i < variable_count; i++) {
|
|
uint32_t variable_node = variables[i];
|
|
// Check for `_` identifier (AstGen.zig:3481-3487).
|
|
if (tree->nodes.tags[variable_node] == AST_NODE_IDENTIFIER) {
|
|
uint32_t ident_tok = tree->nodes.main_tokens[variable_node];
|
|
uint32_t tok_start = tree->tokens.starts[ident_tok];
|
|
if (tree->source[tok_start] == '_'
|
|
&& (tok_start + 1 >= tree->source_len
|
|
|| !((tree->source[tok_start + 1] >= 'a'
|
|
&& tree->source[tok_start + 1] <= 'z')
|
|
|| (tree->source[tok_start + 1] >= 'A'
|
|
&& tree->source[tok_start + 1] <= 'Z')
|
|
|| tree->source[tok_start + 1] == '_'
|
|
|| (tree->source[tok_start + 1] >= '0'
|
|
&& tree->source[tok_start + 1] <= '9')))) {
|
|
rl_components[i].tag = DC_DISCARD;
|
|
rl_components[i].inst = 0;
|
|
rl_components[i].src_node = 0;
|
|
continue;
|
|
}
|
|
}
|
|
// lvalExpr: evaluate as ref (AstGen.zig:3488-3491).
|
|
rl_components[i].tag = DC_TYPED_PTR;
|
|
rl_components[i].inst
|
|
= exprRl(inner_gz, scope, RL_REF_VAL, variable_node);
|
|
rl_components[i].src_node = variable_node;
|
|
}
|
|
|
|
// Build destructure result location and evaluate RHS
|
|
// (AstGen.zig:3494-3499).
|
|
ResultLoc ds_rl;
|
|
memset(&ds_rl, 0, sizeof(ds_rl));
|
|
ds_rl.tag = RL_DESTRUCTURE;
|
|
ds_rl.src_node = node;
|
|
ds_rl.components = rl_components;
|
|
ds_rl.components_len = variable_count;
|
|
(void)exprRl(inner_gz, scope, ds_rl, value_expr);
|
|
|
|
// If comptime, finish block_comptime (AstGen.zig:3501-3505).
|
|
if (has_comptime) {
|
|
uint32_t comptime_block_inst
|
|
= makeBlockInst(ag, ZIR_INST_BLOCK_COMPTIME, gz, node);
|
|
addBreak(inner_gz, ZIR_INST_BREAK_INLINE, comptime_block_inst,
|
|
ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
|
|
setBlockComptimeBody(ag, inner_gz, comptime_block_inst,
|
|
COMPTIME_REASON_COMPTIME_KEYWORD);
|
|
gzAppendInstruction(gz, comptime_block_inst);
|
|
gzUnstack(inner_gz);
|
|
}
|
|
free(rl_components);
|
|
}
|
|
|
|
// --- assignDestructureMaybeDecls (AstGen.zig:3507-3729) ---
|
|
// Handles destructure assignments that may contain const/var declarations.
|
|
// Returns new scope containing any declared variables.
|
|
|
|
static Scope* assignDestructureMaybeDecls(GenZir* gz, Scope* scope,
|
|
uint32_t node, ScopeLocalVal* val_scopes, uint32_t* val_idx,
|
|
ScopeLocalPtr* ptr_scopes, uint32_t* ptr_idx, uint32_t max_scopes) {
|
|
(void)val_scopes;
|
|
(void)val_idx;
|
|
emitDbgNode(gz, node);
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
|
|
// Parse assign_destructure node.
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t extra_start = nd.lhs;
|
|
uint32_t value_expr = nd.rhs;
|
|
uint32_t variable_count = tree->extra_data.arr[extra_start];
|
|
const uint32_t* variables = tree->extra_data.arr + extra_start + 1;
|
|
|
|
// Detect comptime token.
|
|
bool has_comptime = false;
|
|
if (variable_count > 0) {
|
|
uint32_t first_var = variables[0];
|
|
AstNodeTag first_tag = tree->nodes.tags[first_var];
|
|
uint32_t first_tok;
|
|
if (first_tag == AST_NODE_GLOBAL_VAR_DECL
|
|
|| first_tag == AST_NODE_LOCAL_VAR_DECL
|
|
|| first_tag == AST_NODE_ALIGNED_VAR_DECL
|
|
|| first_tag == AST_NODE_SIMPLE_VAR_DECL) {
|
|
first_tok = firstToken(tree, first_var);
|
|
} else {
|
|
first_tok = firstToken(tree, first_var) - 1;
|
|
}
|
|
if (first_tok < tree->tokens.len
|
|
&& tree->tokens.tags[first_tok] == TOKEN_KEYWORD_COMPTIME) {
|
|
has_comptime = true;
|
|
}
|
|
}
|
|
|
|
bool is_comptime = has_comptime || gz->is_comptime;
|
|
bool value_is_comptime = tree->nodes.tags[value_expr] == AST_NODE_COMPTIME;
|
|
|
|
if (has_comptime && gz->is_comptime) {
|
|
SET_ERROR(ag); // redundant comptime
|
|
}
|
|
|
|
// First pass: build rl_components (AstGen.zig:3535-3620).
|
|
DestructureComponent* rl_components
|
|
= malloc(variable_count * sizeof(DestructureComponent));
|
|
if (!rl_components)
|
|
exit(1);
|
|
bool any_lvalue_expr = false;
|
|
|
|
for (uint32_t i = 0; i < variable_count; i++) {
|
|
uint32_t variable_node = variables[i];
|
|
AstNodeTag vtag = tree->nodes.tags[variable_node];
|
|
|
|
// Check for `_` identifier (AstGen.zig:3537-3544).
|
|
if (vtag == AST_NODE_IDENTIFIER) {
|
|
uint32_t ident_tok = tree->nodes.main_tokens[variable_node];
|
|
uint32_t tok_start = tree->tokens.starts[ident_tok];
|
|
if (tree->source[tok_start] == '_'
|
|
&& (tok_start + 1 >= tree->source_len
|
|
|| !((tree->source[tok_start + 1] >= 'a'
|
|
&& tree->source[tok_start + 1] <= 'z')
|
|
|| (tree->source[tok_start + 1] >= 'A'
|
|
&& tree->source[tok_start + 1] <= 'Z')
|
|
|| tree->source[tok_start + 1] == '_'
|
|
|| (tree->source[tok_start + 1] >= '0'
|
|
&& tree->source[tok_start + 1] <= '9')))) {
|
|
|
|
rl_components[i].tag = DC_DISCARD;
|
|
rl_components[i].inst = 0;
|
|
rl_components[i].src_node = 0;
|
|
continue;
|
|
}
|
|
}
|
|
|
|
// var/const declarations (AstGen.zig:3545-3607).
|
|
if (vtag == AST_NODE_GLOBAL_VAR_DECL || vtag == AST_NODE_LOCAL_VAR_DECL
|
|
|| vtag == AST_NODE_SIMPLE_VAR_DECL
|
|
|| vtag == AST_NODE_ALIGNED_VAR_DECL) {
|
|
AstData vnd = tree->nodes.datas[variable_node];
|
|
uint32_t mut_token = tree->nodes.main_tokens[variable_node];
|
|
bool var_is_const
|
|
= (tree->tokens.tags[mut_token] == TOKEN_KEYWORD_CONST);
|
|
bool this_comptime
|
|
= is_comptime || (var_is_const && value_is_comptime);
|
|
|
|
uint32_t type_node = vnd.lhs;
|
|
// AstGen.zig:3576-3607: typed vs inferred alloc.
|
|
if (type_node != 0) {
|
|
uint32_t type_inst = typeExpr(gz, scope, type_node);
|
|
ZirInstTag alloc_tag;
|
|
if (var_is_const)
|
|
alloc_tag = ZIR_INST_ALLOC;
|
|
else if (this_comptime)
|
|
alloc_tag = ZIR_INST_ALLOC_COMPTIME_MUT;
|
|
else
|
|
alloc_tag = ZIR_INST_ALLOC_MUT;
|
|
uint32_t ptr = addUnNode(gz, alloc_tag, type_inst, node);
|
|
rl_components[i].tag = DC_TYPED_PTR;
|
|
rl_components[i].inst = ptr;
|
|
rl_components[i].src_node = 0;
|
|
} else {
|
|
// Inferred alloc.
|
|
ZirInstTag alloc_tag;
|
|
if (var_is_const) {
|
|
alloc_tag = this_comptime
|
|
? ZIR_INST_ALLOC_INFERRED_COMPTIME
|
|
: ZIR_INST_ALLOC_INFERRED;
|
|
} else {
|
|
alloc_tag = this_comptime
|
|
? ZIR_INST_ALLOC_INFERRED_COMPTIME_MUT
|
|
: ZIR_INST_ALLOC_INFERRED_MUT;
|
|
}
|
|
uint32_t ptr = addNode(gz, alloc_tag, node);
|
|
rl_components[i].tag = DC_INFERRED_PTR;
|
|
rl_components[i].inst = ptr;
|
|
rl_components[i].src_node = 0;
|
|
}
|
|
continue;
|
|
}
|
|
// Lvalue expression (AstGen.zig:3609-3618).
|
|
any_lvalue_expr = true;
|
|
rl_components[i].tag = DC_TYPED_PTR;
|
|
rl_components[i].inst = 0; // will be filled in second pass
|
|
rl_components[i].src_node = variable_node;
|
|
}
|
|
|
|
// If comptime, wrap in sub-block (AstGen.zig:3627-3632).
|
|
GenZir gz_buf;
|
|
GenZir* inner_gz = gz;
|
|
if (has_comptime) {
|
|
gz_buf = makeSubBlock(gz, scope);
|
|
gz_buf.is_comptime = true;
|
|
inner_gz = &gz_buf;
|
|
}
|
|
|
|
// Second pass for lvalue expressions (AstGen.zig:3634-3642).
|
|
if (any_lvalue_expr) {
|
|
for (uint32_t i = 0; i < variable_count; i++) {
|
|
if (rl_components[i].tag != DC_TYPED_PTR)
|
|
continue;
|
|
AstNodeTag vtag = tree->nodes.tags[variables[i]];
|
|
if (vtag == AST_NODE_GLOBAL_VAR_DECL
|
|
|| vtag == AST_NODE_LOCAL_VAR_DECL
|
|
|| vtag == AST_NODE_SIMPLE_VAR_DECL
|
|
|| vtag == AST_NODE_ALIGNED_VAR_DECL)
|
|
continue;
|
|
rl_components[i].inst
|
|
= exprRl(inner_gz, scope, RL_REF_VAL, variables[i]);
|
|
}
|
|
}
|
|
|
|
// Evaluate RHS with destructure RL (AstGen.zig:3647-3652).
|
|
ResultLoc ds_rl;
|
|
memset(&ds_rl, 0, sizeof(ds_rl));
|
|
ds_rl.tag = RL_DESTRUCTURE;
|
|
ds_rl.src_node = node;
|
|
ds_rl.components = rl_components;
|
|
ds_rl.components_len = variable_count;
|
|
(void)exprRl(inner_gz, scope, ds_rl, value_expr);
|
|
|
|
// If comptime, finish block_comptime (AstGen.zig:3654-3660).
|
|
if (has_comptime) {
|
|
uint32_t comptime_block_inst
|
|
= makeBlockInst(ag, ZIR_INST_BLOCK_COMPTIME, gz, node);
|
|
addBreak(inner_gz, ZIR_INST_BREAK_INLINE, comptime_block_inst,
|
|
ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
|
|
setBlockComptimeBody(ag, inner_gz, comptime_block_inst,
|
|
COMPTIME_REASON_COMPTIME_KEYWORD);
|
|
gzAppendInstruction(gz, comptime_block_inst);
|
|
gzUnstack(inner_gz);
|
|
}
|
|
|
|
// Third pass: create scopes for declared variables (AstGen.zig:3664-3729).
|
|
Scope* cur_scope = scope;
|
|
for (uint32_t i = 0; i < variable_count; i++) {
|
|
uint32_t variable_node = variables[i];
|
|
AstNodeTag vtag = tree->nodes.tags[variable_node];
|
|
if (vtag != AST_NODE_LOCAL_VAR_DECL && vtag != AST_NODE_SIMPLE_VAR_DECL
|
|
&& vtag != AST_NODE_ALIGNED_VAR_DECL)
|
|
continue;
|
|
|
|
uint32_t mut_token = tree->nodes.main_tokens[variable_node];
|
|
bool var_is_const
|
|
= (tree->tokens.tags[mut_token] == TOKEN_KEYWORD_CONST);
|
|
uint32_t raw_ptr = rl_components[i].inst;
|
|
bool resolve_inferred = (rl_components[i].tag == DC_INFERRED_PTR);
|
|
|
|
// Resolve inferred alloc or make ptr const (AstGen.zig:3694-3700).
|
|
uint32_t final_ptr;
|
|
if (resolve_inferred)
|
|
final_ptr = addUnNode(
|
|
gz, ZIR_INST_RESOLVE_INFERRED_ALLOC, raw_ptr, variable_node);
|
|
else if (var_is_const)
|
|
final_ptr = addUnNode(gz, ZIR_INST_MAKE_PTR_CONST, raw_ptr, node);
|
|
else
|
|
final_ptr = raw_ptr;
|
|
|
|
// Create dbg_var_ptr (AstGen.zig:3710).
|
|
uint32_t name_token = mut_token + 1;
|
|
uint32_t ident_name = identAsString(ag, name_token);
|
|
addDbgVar(gz, ZIR_INST_DBG_VAR_PTR, ident_name, final_ptr);
|
|
|
|
// Create scope (AstGen.zig:3712-3722).
|
|
if (*ptr_idx < max_scopes) {
|
|
ptr_scopes[*ptr_idx] = (ScopeLocalPtr) {
|
|
.base = { .tag = SCOPE_LOCAL_PTR },
|
|
.parent = cur_scope,
|
|
.name = ident_name,
|
|
.ptr = final_ptr,
|
|
};
|
|
cur_scope = &ptr_scopes[*ptr_idx].base;
|
|
(*ptr_idx)++;
|
|
} else {
|
|
SET_ERROR(ag);
|
|
}
|
|
}
|
|
free(rl_components);
|
|
return cur_scope;
|
|
}
|
|
|
|
// --- assignOp (AstGen.zig:3731) ---
|
|
// Handles compound assignment operators (+=, -=, *=, etc.).
|
|
|
|
static void assignOp(
|
|
GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag) {
|
|
emitDbgNode(gz, infix_node);
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
|
|
AstData nd = tree->nodes.datas[infix_node];
|
|
uint32_t lhs_node = nd.lhs;
|
|
uint32_t rhs_node = nd.rhs;
|
|
|
|
// Evaluate LHS as lvalue pointer (AstGen.zig:3742).
|
|
uint32_t lhs_ptr = exprRl(gz, scope, RL_REF_VAL, lhs_node);
|
|
|
|
// Advance cursor for add/sub/mul/div/mod_rem (AstGen.zig:3744-3747).
|
|
uint32_t cursor_line = 0, cursor_col = 0;
|
|
bool need_dbg = false;
|
|
if (op_tag == ZIR_INST_ADD || op_tag == ZIR_INST_SUB
|
|
|| op_tag == ZIR_INST_MUL || op_tag == ZIR_INST_DIV
|
|
|| op_tag == ZIR_INST_MOD_REM) {
|
|
if (!gz->is_comptime) {
|
|
advanceSourceCursorToMainToken(ag, gz, infix_node);
|
|
}
|
|
cursor_line = ag->source_line - gz->decl_line;
|
|
cursor_col = ag->source_column;
|
|
need_dbg = true;
|
|
}
|
|
|
|
// Load current value (AstGen.zig:3748).
|
|
uint32_t lhs = addUnNode(gz, ZIR_INST_LOAD, lhs_ptr, infix_node);
|
|
|
|
// Determine RHS result type (AstGen.zig:3750-3766).
|
|
uint32_t rhs_res_ty;
|
|
if (op_tag == ZIR_INST_ADD || op_tag == ZIR_INST_SUB) {
|
|
// Emit inplace_arith_result_ty extended instruction.
|
|
uint16_t inplace_op
|
|
= (op_tag == ZIR_INST_ADD) ? 0 : 1; // add_eq=0, sub_eq=1
|
|
ZirInstData ext_data;
|
|
memset(&ext_data, 0, sizeof(ext_data));
|
|
ext_data.extended.opcode = (uint16_t)ZIR_EXT_INPLACE_ARITH_RESULT_TY;
|
|
ext_data.extended.small = inplace_op;
|
|
ext_data.extended.operand = lhs;
|
|
rhs_res_ty = addInstruction(gz, ZIR_INST_EXTENDED, ext_data);
|
|
} else {
|
|
rhs_res_ty = addUnNode(gz, ZIR_INST_TYPEOF, lhs, infix_node);
|
|
}
|
|
|
|
// Evaluate RHS with type coercion (AstGen.zig:3768).
|
|
uint32_t rhs_raw = expr(gz, scope, rhs_node);
|
|
uint32_t rhs
|
|
= addPlNodeBin(gz, ZIR_INST_AS_NODE, rhs_node, rhs_res_ty, rhs_raw);
|
|
|
|
// Emit debug statement for arithmetic ops (AstGen.zig:3770-3775).
|
|
if (need_dbg) {
|
|
emitDbgStmt(gz, cursor_line, cursor_col);
|
|
}
|
|
|
|
// Emit the operation (AstGen.zig:3776-3779).
|
|
uint32_t result = addPlNodeBin(gz, op_tag, infix_node, lhs, rhs);
|
|
|
|
// Store result back (AstGen.zig:3780-3783).
|
|
addPlNodeBin(gz, ZIR_INST_STORE_NODE, infix_node, lhs_ptr, result);
|
|
}
|
|
|
|
// --- assignShift (AstGen.zig:3786) ---
|
|
// Handles <<= and >>= assignment operators.
|
|
|
|
static void assignShift(
|
|
GenZir* gz, Scope* scope, uint32_t infix_node, ZirInstTag op_tag) {
|
|
emitDbgNode(gz, infix_node);
|
|
const AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
|
|
AstData nd = tree->nodes.datas[infix_node];
|
|
uint32_t lhs_node = nd.lhs;
|
|
uint32_t rhs_node = nd.rhs;
|
|
|
|
// Evaluate LHS as lvalue pointer (AstGen.zig:3797).
|
|
uint32_t lhs_ptr = exprRl(gz, scope, RL_REF_VAL, lhs_node);
|
|
// Load current value (AstGen.zig:3798).
|
|
uint32_t lhs = addUnNode(gz, ZIR_INST_LOAD, lhs_ptr, infix_node);
|
|
// RHS type is typeof_log2_int_type of LHS (AstGen.zig:3799).
|
|
uint32_t rhs_type
|
|
= addUnNode(gz, ZIR_INST_TYPEOF_LOG2_INT_TYPE, lhs, infix_node);
|
|
ResultLoc rhs_rl = {
|
|
.tag = RL_TY, .data = rhs_type, .src_node = 0, .ctx = RI_CTX_NONE
|
|
};
|
|
uint32_t rhs = exprRl(gz, scope, rhs_rl, rhs_node);
|
|
|
|
// Emit the shift operation (AstGen.zig:3802-3805).
|
|
uint32_t result = addPlNodeBin(gz, op_tag, infix_node, lhs, rhs);
|
|
// Store result back (AstGen.zig:3806-3809).
|
|
addPlNodeBin(gz, ZIR_INST_STORE_NODE, infix_node, lhs_ptr, result);
|
|
}
|
|
|
|
// --- assignShiftSat (AstGen.zig:3812) ---
|
|
// Handles <<|= saturating shift-left assignment.
|
|
|
|
static void assignShiftSat(GenZir* gz, Scope* scope, uint32_t infix_node) {
|
|
emitDbgNode(gz, infix_node);
|
|
const AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
|
|
AstData nd = tree->nodes.datas[infix_node];
|
|
uint32_t lhs_node = nd.lhs;
|
|
uint32_t rhs_node = nd.rhs;
|
|
|
|
// Evaluate LHS as lvalue pointer (AstGen.zig:3818).
|
|
uint32_t lhs_ptr = exprRl(gz, scope, RL_REF_VAL, lhs_node);
|
|
// Load current value (AstGen.zig:3819).
|
|
uint32_t lhs = addUnNode(gz, ZIR_INST_LOAD, lhs_ptr, infix_node);
|
|
// Saturating shift-left allows any integer type for both LHS and RHS
|
|
// (AstGen.zig:3820-3821).
|
|
uint32_t rhs = expr(gz, scope, rhs_node);
|
|
|
|
// Emit shl_sat (AstGen.zig:3823-3825).
|
|
uint32_t result = addPlNodeBin(gz, ZIR_INST_SHL_SAT, infix_node, lhs, rhs);
|
|
// Store result back (AstGen.zig:3827-3830).
|
|
addPlNodeBin(gz, ZIR_INST_STORE_NODE, infix_node, lhs_ptr, result);
|
|
}
|
|
|
|
// --- builtinEvalToError (BuiltinFn.zig) ---
|
|
// Returns per-builtin eval_to_error. Default is .never; only a few are
|
|
// .maybe or .always. Mirrors BuiltinFn.list lookup in AstGen.zig:10539.
|
|
static int builtinEvalToError(const Ast* tree, uint32_t node) {
|
|
uint32_t main_tok = tree->nodes.main_tokens[node];
|
|
uint32_t tok_start = tree->tokens.starts[main_tok];
|
|
const char* source = tree->source;
|
|
uint32_t name_start = tok_start + 1; // skip '@'
|
|
uint32_t name_end = name_start;
|
|
while (name_end < tree->source_len
|
|
&& ((source[name_end] >= 'a' && source[name_end] <= 'z')
|
|
|| (source[name_end] >= 'A' && source[name_end] <= 'Z')
|
|
|| source[name_end] == '_')) {
|
|
name_end++;
|
|
}
|
|
uint32_t name_len = name_end - name_start;
|
|
const char* name = source + name_start;
|
|
// clang-format off
|
|
// .always:
|
|
if (name_len == 12 && memcmp(name, "errorFromInt", 12) == 0)
|
|
return 1; // EVAL_TO_ERROR_ALWAYS
|
|
// .maybe:
|
|
if (name_len == 2 && memcmp(name, "as", 2) == 0) return 2;
|
|
if (name_len == 4 && memcmp(name, "call", 4) == 0) return 2;
|
|
if (name_len == 5 && memcmp(name, "field", 5) == 0) return 2;
|
|
if (name_len == 9 && memcmp(name, "errorCast", 9) == 0) return 2;
|
|
// clang-format on
|
|
// Default: .never
|
|
return 0;
|
|
}
|
|
|
|
// --- nodeMayEvalToError (AstGen.zig:10340) ---
|
|
// Three-way result: 0=never, 1=always, 2=maybe.
|
|
#define EVAL_TO_ERROR_NEVER 0
|
|
#define EVAL_TO_ERROR_ALWAYS 1
|
|
#define EVAL_TO_ERROR_MAYBE 2
|
|
|
|
static int nodeMayEvalToError(const Ast* tree, uint32_t node) {
|
|
uint32_t n = node;
|
|
while (true) {
|
|
AstNodeTag tag = tree->nodes.tags[n];
|
|
switch (tag) {
|
|
case AST_NODE_ERROR_VALUE:
|
|
return EVAL_TO_ERROR_ALWAYS;
|
|
// These may evaluate to errors.
|
|
case AST_NODE_IDENTIFIER:
|
|
case AST_NODE_FIELD_ACCESS:
|
|
case AST_NODE_DEREF:
|
|
case AST_NODE_ARRAY_ACCESS:
|
|
case AST_NODE_WHILE_SIMPLE:
|
|
case AST_NODE_WHILE_CONT:
|
|
case AST_NODE_WHILE:
|
|
case AST_NODE_FOR_SIMPLE:
|
|
case AST_NODE_FOR:
|
|
case AST_NODE_IF_SIMPLE:
|
|
case AST_NODE_IF:
|
|
case AST_NODE_SWITCH:
|
|
case AST_NODE_SWITCH_COMMA:
|
|
case AST_NODE_CALL_ONE:
|
|
case AST_NODE_CALL_ONE_COMMA:
|
|
case AST_NODE_CALL:
|
|
case AST_NODE_CALL_COMMA:
|
|
case AST_NODE_ASM_SIMPLE:
|
|
case AST_NODE_ASM_LEGACY:
|
|
case AST_NODE_ASM:
|
|
case AST_NODE_CATCH:
|
|
case AST_NODE_ORELSE:
|
|
return EVAL_TO_ERROR_MAYBE;
|
|
// Forward to sub-expression.
|
|
case AST_NODE_TRY:
|
|
case AST_NODE_COMPTIME:
|
|
case AST_NODE_NOSUSPEND:
|
|
n = tree->nodes.datas[n].lhs;
|
|
continue;
|
|
case AST_NODE_GROUPED_EXPRESSION:
|
|
case AST_NODE_UNWRAP_OPTIONAL:
|
|
n = tree->nodes.datas[n].lhs;
|
|
continue;
|
|
// Labeled blocks may need a memory location.
|
|
case AST_NODE_BLOCK_TWO:
|
|
case AST_NODE_BLOCK_TWO_SEMICOLON:
|
|
case AST_NODE_BLOCK:
|
|
case AST_NODE_BLOCK_SEMICOLON: {
|
|
uint32_t lbrace = tree->nodes.main_tokens[n];
|
|
if (lbrace > 0 && tree->tokens.tags[lbrace - 1] == TOKEN_COLON)
|
|
return EVAL_TO_ERROR_MAYBE;
|
|
return EVAL_TO_ERROR_NEVER;
|
|
}
|
|
// Builtins: look up per-builtin eval_to_error
|
|
// (AstGen.zig:10530-10541).
|
|
case AST_NODE_BUILTIN_CALL:
|
|
case AST_NODE_BUILTIN_CALL_COMMA:
|
|
case AST_NODE_BUILTIN_CALL_TWO:
|
|
case AST_NODE_BUILTIN_CALL_TWO_COMMA:
|
|
return builtinEvalToError(tree, n);
|
|
// Everything else: .never
|
|
default:
|
|
return EVAL_TO_ERROR_NEVER;
|
|
}
|
|
}
|
|
}
|
|
|
|
// --- nodeMayAppendToErrorTrace (AstGen.zig:10315) ---
|
|
// Returns true if the expression may append to the error return trace.
|
|
static bool nodeMayAppendToErrorTrace(const Ast* tree, uint32_t node) {
|
|
uint32_t n = node;
|
|
while (true) {
|
|
AstNodeTag tag = tree->nodes.tags[n];
|
|
switch (tag) {
|
|
// These don't call runtime functions.
|
|
case AST_NODE_ERROR_VALUE:
|
|
case AST_NODE_IDENTIFIER:
|
|
case AST_NODE_COMPTIME:
|
|
return false;
|
|
// Forward to sub-expression.
|
|
case AST_NODE_TRY:
|
|
case AST_NODE_NOSUSPEND:
|
|
n = tree->nodes.datas[n].lhs;
|
|
continue;
|
|
case AST_NODE_GROUPED_EXPRESSION:
|
|
case AST_NODE_UNWRAP_OPTIONAL:
|
|
n = tree->nodes.datas[n].lhs;
|
|
continue;
|
|
// Anything else: check if it may eval to error.
|
|
default:
|
|
return nodeMayEvalToError(tree, n) != EVAL_TO_ERROR_NEVER;
|
|
}
|
|
}
|
|
}
|
|
|
|
// --- addSaveErrRetIndex (AstGen.zig:12556) ---
|
|
// Emits SAVE_ERR_RET_INDEX instruction.
|
|
// operand is the init inst ref (or ZIR_REF_NONE for .always).
|
|
static void addSaveErrRetIndex(GenZir* gz, uint32_t operand) {
|
|
ZirInstData data;
|
|
data.save_err_ret_index.operand = operand;
|
|
data.save_err_ret_index._pad = 0;
|
|
addInstruction(gz, ZIR_INST_SAVE_ERR_RET_INDEX, data);
|
|
}
|
|
|
|
// --- addRestoreErrRetIndexBlock (AstGen.zig:12607-12614) ---
|
|
// Emits extended RESTORE_ERR_RET_INDEX with block target (if_non_error
|
|
// condition). Payload: src_node, block_ref, operand.
|
|
static void addRestoreErrRetIndexBlock(
|
|
GenZir* gz, uint32_t block_inst, uint32_t operand, uint32_t node) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
ensureExtraCapacity(ag, 3);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++]
|
|
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
|
|
ag->extra[ag->extra_len++] = block_inst + ZIR_REF_START_INDEX;
|
|
ag->extra[ag->extra_len++] = operand;
|
|
|
|
ZirInstData ext_data;
|
|
ext_data.extended.opcode = (uint16_t)ZIR_EXT_RESTORE_ERR_RET_INDEX;
|
|
ext_data.extended.small = 0;
|
|
ext_data.extended.operand = payload_index;
|
|
addInstruction(gz, ZIR_INST_EXTENDED, ext_data);
|
|
}
|
|
|
|
// --- restoreErrRetIndex (AstGen.zig:2121-2148) ---
|
|
// Emits restore_err_ret_index for block target based on nodeMayEvalToError.
|
|
static void restoreErrRetIndex(GenZir* gz, uint32_t block_inst, ResultLoc rl,
|
|
uint32_t node, uint32_t result) {
|
|
const Ast* tree = gz->astgen->tree;
|
|
int eval = nodeMayEvalToError(tree, node);
|
|
if (eval == EVAL_TO_ERROR_ALWAYS)
|
|
return; // never restore/pop
|
|
uint32_t op;
|
|
if (eval == EVAL_TO_ERROR_NEVER) {
|
|
op = ZIR_REF_NONE; // always restore/pop
|
|
} else {
|
|
// EVAL_TO_ERROR_MAYBE
|
|
// Simplified: without ri.ctx, treat non-ptr RL as result
|
|
// (AstGen.zig:2131-2144).
|
|
if (rl.tag == RL_PTR) {
|
|
op = addUnNode(gz, ZIR_INST_LOAD, rl.data, node);
|
|
} else if (rl.tag == RL_INFERRED_PTR) {
|
|
op = ZIR_REF_NONE;
|
|
} else {
|
|
op = result;
|
|
}
|
|
}
|
|
addRestoreErrRetIndexBlock(gz, block_inst, op, node);
|
|
}
|
|
|
|
// --- varDecl (AstGen.zig:3189) ---
|
|
// Handles local const/var declarations. Returns new scope with the variable.
|
|
// scope_out: set to new scope if variable is added; unchanged otherwise.
|
|
|
|
static void varDecl(GenZir* gz, Scope* scope, uint32_t node,
|
|
ScopeLocalVal* val_out, ScopeLocalPtr* ptr_out, Scope** scope_out) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
emitDbgNode(gz, node); // AstGen.zig:3196
|
|
const Ast* tree = ag->tree;
|
|
AstData nd = tree->nodes.datas[node];
|
|
AstNodeTag tag = tree->nodes.tags[node];
|
|
|
|
uint32_t mut_token = tree->nodes.main_tokens[node];
|
|
uint32_t name_token = mut_token + 1;
|
|
bool is_const = (tree->source[tree->tokens.starts[mut_token]] == 'c');
|
|
|
|
uint32_t ident_name = identAsString(ag, name_token);
|
|
|
|
// Extract comptime_token by scanning backwards from mut_token
|
|
// (Ast.zig:2012-2023, fullVarDeclComponents).
|
|
bool has_comptime_token = false;
|
|
if (mut_token > 0
|
|
&& tree->tokens.tags[mut_token - 1] == TOKEN_KEYWORD_COMPTIME) {
|
|
has_comptime_token = true;
|
|
}
|
|
|
|
// Extract type_node and init_node based on variant.
|
|
uint32_t type_node = 0;
|
|
uint32_t align_node = 0;
|
|
uint32_t init_node = 0;
|
|
|
|
if (tag == AST_NODE_SIMPLE_VAR_DECL) {
|
|
// lhs = type (optional), rhs = init (optional).
|
|
type_node = nd.lhs;
|
|
init_node = nd.rhs;
|
|
} else if (tag == AST_NODE_LOCAL_VAR_DECL) {
|
|
// lhs = extra_data index, rhs = init.
|
|
// extra: {type_node, align_node, addrspace_node, section_node}
|
|
uint32_t extra_idx = nd.lhs;
|
|
type_node = tree->extra_data.arr[extra_idx]; // type_node
|
|
align_node = tree->extra_data.arr[extra_idx + 1]; // align_node
|
|
init_node = nd.rhs;
|
|
} else if (tag == AST_NODE_ALIGNED_VAR_DECL) {
|
|
// lhs = align expr, rhs = init.
|
|
// No type node in this variant.
|
|
align_node = nd.lhs;
|
|
init_node = nd.rhs;
|
|
} else {
|
|
// global_var_decl or unknown — bail.
|
|
SET_ERROR(ag);
|
|
return;
|
|
}
|
|
|
|
if (init_node == 0) {
|
|
// Variables must be initialized (AstGen.zig:3228).
|
|
SET_ERROR(ag);
|
|
return;
|
|
}
|
|
|
|
// Evaluate alignment expression (AstGen.zig:3227-3230).
|
|
uint32_t align_inst = ZIR_REF_NONE;
|
|
if (align_node != 0) {
|
|
ResultLoc align_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_U29_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE,
|
|
.components = NULL,
|
|
.components_len = 0 };
|
|
align_inst = exprRl(gz, scope, align_rl, align_node);
|
|
}
|
|
|
|
if (is_const) {
|
|
// --- CONST path (AstGen.zig:3232-3340) ---
|
|
|
|
// `comptime const` is a non-fatal error; treat it like the init was
|
|
// marked `comptime` (AstGen.zig:3234-3239).
|
|
uint32_t force_comptime
|
|
= has_comptime_token ? COMPTIME_REASON_COMPTIME_KEYWORD : 0;
|
|
|
|
if (!nodesNeedRlContains(ag, node)) {
|
|
// Rvalue path (AstGen.zig:3246-3271).
|
|
// Evaluate type annotation and build result_info
|
|
// (AstGen.zig:3247-3250).
|
|
ResultLoc result_info;
|
|
if (type_node != 0) {
|
|
uint32_t type_ref = typeExpr(gz, scope, type_node);
|
|
result_info = (ResultLoc) { .tag = RL_TY,
|
|
.data = type_ref,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_CONST_INIT };
|
|
} else {
|
|
result_info = (ResultLoc) { .tag = RL_NONE,
|
|
.data = 0,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_CONST_INIT };
|
|
}
|
|
|
|
// Evaluate init expression (AstGen.zig:3251-3252).
|
|
uint32_t init_ref = reachableExprComptime(
|
|
gz, scope, result_info, init_node, node, force_comptime);
|
|
|
|
if (ag->has_compile_errors)
|
|
return;
|
|
|
|
// validate_const (AstGen.zig:3266).
|
|
addUnNode(gz, ZIR_INST_VALIDATE_CONST, init_ref, init_node);
|
|
|
|
// dbg_var_val (AstGen.zig:3269).
|
|
addDbgVar(gz, ZIR_INST_DBG_VAR_VAL, ident_name, init_ref);
|
|
|
|
// save_err_ret_index (AstGen.zig:3259-3260).
|
|
if (nodeMayAppendToErrorTrace(tree, init_node))
|
|
addSaveErrRetIndex(gz, init_ref);
|
|
|
|
// Create ScopeLocalVal (AstGen.zig:3276-3284).
|
|
val_out->base.tag = SCOPE_LOCAL_VAL;
|
|
val_out->parent = *scope_out;
|
|
val_out->gen_zir = gz;
|
|
val_out->inst = init_ref;
|
|
val_out->token_src = name_token;
|
|
val_out->name = ident_name;
|
|
val_out->is_used_or_discarded = NULL;
|
|
*scope_out = &val_out->base;
|
|
} else {
|
|
// Alloc path (AstGen.zig:3277-3340).
|
|
// The init expression needs a result pointer (nodes_need_rl).
|
|
bool is_comptime_init = gz->is_comptime
|
|
|| tree->nodes.tags[init_node] == AST_NODE_COMPTIME;
|
|
|
|
uint32_t var_ptr;
|
|
bool resolve_inferred;
|
|
|
|
if (type_node != 0) {
|
|
// Typed const: alloc (AstGen.zig:3280).
|
|
uint32_t type_ref = typeExpr(gz, scope, type_node);
|
|
var_ptr = addUnNode(gz, ZIR_INST_ALLOC, type_ref, node);
|
|
resolve_inferred = false;
|
|
} else {
|
|
// Inferred type: alloc_inferred (AstGen.zig:3291-3296).
|
|
ZirInstTag alloc_tag = is_comptime_init
|
|
? ZIR_INST_ALLOC_INFERRED_COMPTIME
|
|
: ZIR_INST_ALLOC_INFERRED;
|
|
ZirInstData adata;
|
|
adata.node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
var_ptr = addInstruction(gz, alloc_tag, adata);
|
|
resolve_inferred = true;
|
|
}
|
|
|
|
// Evaluate init with RL pointing to alloc (AstGen.zig:3313-3316).
|
|
ResultLoc init_rl;
|
|
init_rl.components = NULL;
|
|
init_rl.components_len = 0;
|
|
if (type_node != 0) {
|
|
init_rl.tag = RL_PTR;
|
|
init_rl.data = var_ptr;
|
|
init_rl.src_node = 0; // upstream: .none (PtrResultLoc.src_node
|
|
// defaults to null)
|
|
} else {
|
|
init_rl.tag = RL_INFERRED_PTR;
|
|
init_rl.data = var_ptr;
|
|
init_rl.src_node = 0;
|
|
}
|
|
init_rl.ctx = RI_CTX_CONST_INIT;
|
|
uint32_t init_ref = reachableExprComptime(
|
|
gz, scope, init_rl, init_node, node, force_comptime);
|
|
|
|
if (ag->has_compile_errors)
|
|
return;
|
|
|
|
// save_err_ret_index (AstGen.zig:3320-3321).
|
|
if (nodeMayAppendToErrorTrace(tree, init_node))
|
|
addSaveErrRetIndex(gz, init_ref);
|
|
|
|
// resolve_inferred_alloc or make_ptr_const (AstGen.zig:3323-3326).
|
|
uint32_t const_ptr;
|
|
if (resolve_inferred)
|
|
const_ptr = addUnNode(
|
|
gz, ZIR_INST_RESOLVE_INFERRED_ALLOC, var_ptr, node);
|
|
else
|
|
const_ptr
|
|
= addUnNode(gz, ZIR_INST_MAKE_PTR_CONST, var_ptr, node);
|
|
|
|
// dbg_var_ptr (AstGen.zig:3328).
|
|
addDbgVar(gz, ZIR_INST_DBG_VAR_PTR, ident_name, const_ptr);
|
|
|
|
// Create ScopeLocalPtr (AstGen.zig:3330-3340).
|
|
ptr_out->base.tag = SCOPE_LOCAL_PTR;
|
|
ptr_out->parent = *scope_out;
|
|
ptr_out->gen_zir = gz;
|
|
ptr_out->ptr = const_ptr;
|
|
ptr_out->token_src = name_token;
|
|
ptr_out->name = ident_name;
|
|
ptr_out->maybe_comptime = true;
|
|
*scope_out = &ptr_out->base;
|
|
}
|
|
} else {
|
|
// --- VAR path (AstGen.zig:3342-3416) ---
|
|
|
|
// comptime_token handling (AstGen.zig:3343-3345).
|
|
bool is_comptime = has_comptime_token || gz->is_comptime;
|
|
|
|
uint32_t alloc_ref;
|
|
bool resolve_inferred = false;
|
|
|
|
if (type_node != 0) {
|
|
// Typed var: alloc_mut (AstGen.zig:3346-3365).
|
|
uint32_t type_ref = typeExpr(gz, scope, type_node);
|
|
if (align_inst == ZIR_REF_NONE) {
|
|
ZirInstTag alloc_tag = is_comptime
|
|
? ZIR_INST_ALLOC_COMPTIME_MUT
|
|
: ZIR_INST_ALLOC_MUT;
|
|
alloc_ref = addUnNode(gz, alloc_tag, type_ref, node);
|
|
} else {
|
|
// addAllocExtended (AstGen.zig:12781-12830).
|
|
ensureExtraCapacity(ag, 3); // src_node + type + align
|
|
uint32_t payload_index = ag->extra_len;
|
|
int32_t src_off = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
memcpy(&ag->extra[ag->extra_len], &src_off, sizeof(uint32_t));
|
|
ag->extra_len++;
|
|
ag->extra[ag->extra_len++] = type_ref;
|
|
ag->extra[ag->extra_len++] = align_inst;
|
|
// small: has_type=1, has_align=1, is_const=0, is_comptime
|
|
uint16_t small = (uint16_t)(1u | (1u << 1)
|
|
| ((is_comptime ? 1u : 0u) << 3));
|
|
ZirInstData edata;
|
|
edata.extended.opcode = (uint16_t)ZIR_EXT_ALLOC;
|
|
edata.extended.small = small;
|
|
edata.extended.operand = payload_index;
|
|
alloc_ref = addInstruction(gz, ZIR_INST_EXTENDED, edata);
|
|
}
|
|
} else {
|
|
// Inferred type var: alloc_inferred_mut
|
|
// (AstGen.zig:3366-3385).
|
|
if (align_inst == ZIR_REF_NONE) {
|
|
ZirInstTag alloc_tag = is_comptime
|
|
? ZIR_INST_ALLOC_INFERRED_COMPTIME_MUT
|
|
: ZIR_INST_ALLOC_INFERRED_MUT;
|
|
ZirInstData adata;
|
|
adata.node = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
alloc_ref = addInstruction(gz, alloc_tag, adata);
|
|
} else {
|
|
// addAllocExtended without type (AstGen.zig:12781-12830).
|
|
ensureExtraCapacity(ag, 2); // src_node + align
|
|
uint32_t payload_index = ag->extra_len;
|
|
int32_t src_off = (int32_t)node - (int32_t)gz->decl_node_index;
|
|
memcpy(&ag->extra[ag->extra_len], &src_off, sizeof(uint32_t));
|
|
ag->extra_len++;
|
|
ag->extra[ag->extra_len++] = align_inst;
|
|
// small: has_type=0, has_align=1, is_const=0, is_comptime
|
|
uint16_t small
|
|
= (uint16_t)((1u << 1) | ((is_comptime ? 1u : 0u) << 3));
|
|
ZirInstData edata;
|
|
edata.extended.opcode = (uint16_t)ZIR_EXT_ALLOC;
|
|
edata.extended.small = small;
|
|
edata.extended.operand = payload_index;
|
|
alloc_ref = addInstruction(gz, ZIR_INST_EXTENDED, edata);
|
|
}
|
|
resolve_inferred = true;
|
|
}
|
|
|
|
// Evaluate init with RL pointing to alloc (AstGen.zig:3395-3402).
|
|
ResultLoc var_init_rl;
|
|
var_init_rl.components = NULL;
|
|
var_init_rl.components_len = 0;
|
|
if (type_node != 0) {
|
|
var_init_rl.tag = RL_PTR;
|
|
var_init_rl.data = alloc_ref;
|
|
var_init_rl.src_node = 0; // upstream: .none (PtrResultLoc.src_node
|
|
// defaults to null)
|
|
} else {
|
|
var_init_rl.tag = RL_INFERRED_PTR;
|
|
var_init_rl.data = alloc_ref;
|
|
var_init_rl.src_node = 0;
|
|
}
|
|
var_init_rl.ctx = RI_CTX_NONE;
|
|
uint32_t comptime_reason
|
|
= has_comptime_token ? COMPTIME_REASON_COMPTIME_KEYWORD : 0;
|
|
uint32_t init_ref = reachableExprComptime(
|
|
gz, scope, var_init_rl, init_node, node, comptime_reason);
|
|
(void)init_ref;
|
|
|
|
if (ag->has_compile_errors)
|
|
return;
|
|
|
|
// resolve_inferred_alloc if type was inferred
|
|
// (AstGen.zig:3407-3408).
|
|
uint32_t final_ptr = alloc_ref;
|
|
if (resolve_inferred)
|
|
final_ptr = addUnNode(
|
|
gz, ZIR_INST_RESOLVE_INFERRED_ALLOC, alloc_ref, node);
|
|
|
|
// dbg_var_ptr (AstGen.zig:3411).
|
|
addDbgVar(gz, ZIR_INST_DBG_VAR_PTR, ident_name, final_ptr);
|
|
|
|
// Create ScopeLocalPtr (AstGen.zig:3413-3422).
|
|
ptr_out->base.tag = SCOPE_LOCAL_PTR;
|
|
ptr_out->parent = *scope_out;
|
|
ptr_out->gen_zir = gz;
|
|
ptr_out->ptr = final_ptr;
|
|
ptr_out->token_src = name_token;
|
|
ptr_out->name = ident_name;
|
|
ptr_out->maybe_comptime = is_comptime;
|
|
*scope_out = &ptr_out->base;
|
|
}
|
|
}
|
|
|
|
// --- addEnsureResult (AstGen.zig:2649) ---
|
|
// After evaluating an expression as a statement, optionally emits
|
|
// ensure_result_used. For call/field_call, sets flag in extra data instead.
|
|
// Returns true if the result is noreturn (AstGen.zig:2909).
|
|
static bool addEnsureResult(
|
|
GenZir* gz, uint32_t maybe_unused_result, uint32_t statement) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
bool elide_check;
|
|
bool is_noreturn = false;
|
|
if (maybe_unused_result >= ZIR_REF_START_INDEX) {
|
|
uint32_t inst = maybe_unused_result - ZIR_REF_START_INDEX;
|
|
ZirInstTag tag = ag->inst_tags[inst];
|
|
switch (tag) {
|
|
// For call/field_call: set ensure_result_used flag
|
|
// (bit 3 of flags at offset 0). Flags *must* be at offset 0
|
|
// (AstGen.zig:2658-2665, Zir.zig:3022).
|
|
case ZIR_INST_CALL:
|
|
case ZIR_INST_FIELD_CALL: {
|
|
uint32_t pi = ag->inst_datas[inst].pl_node.payload_index;
|
|
ag->extra[pi] |= (1u << 3); // ensure_result_used
|
|
elide_check = true;
|
|
break;
|
|
}
|
|
// For builtin_call: ensure_result_used is at bit 1, not bit 3.
|
|
case ZIR_INST_BUILTIN_CALL: {
|
|
uint32_t pi = ag->inst_datas[inst].pl_node.payload_index;
|
|
ag->extra[pi] |= (1u << 1); // ensure_result_used
|
|
elide_check = true;
|
|
break;
|
|
}
|
|
// Always noreturn → elide (AstGen.zig:2909).
|
|
case ZIR_INST_BREAK:
|
|
case ZIR_INST_BREAK_INLINE:
|
|
case ZIR_INST_CONDBR:
|
|
case ZIR_INST_CONDBR_INLINE:
|
|
case ZIR_INST_RET_NODE:
|
|
case ZIR_INST_RET_LOAD:
|
|
case ZIR_INST_RET_IMPLICIT:
|
|
case ZIR_INST_RET_ERR_VALUE:
|
|
case ZIR_INST_UNREACHABLE:
|
|
case ZIR_INST_REPEAT:
|
|
case ZIR_INST_REPEAT_INLINE:
|
|
case ZIR_INST_PANIC:
|
|
case ZIR_INST_TRAP:
|
|
case ZIR_INST_CHECK_COMPTIME_CONTROL_FLOW:
|
|
case ZIR_INST_SWITCH_CONTINUE:
|
|
case ZIR_INST_COMPILE_ERROR:
|
|
is_noreturn = true;
|
|
elide_check = true;
|
|
break;
|
|
// Always void → elide.
|
|
case ZIR_INST_DBG_STMT:
|
|
case ZIR_INST_DBG_VAR_PTR:
|
|
case ZIR_INST_DBG_VAR_VAL:
|
|
case ZIR_INST_ENSURE_RESULT_USED:
|
|
case ZIR_INST_ENSURE_RESULT_NON_ERROR:
|
|
case ZIR_INST_ENSURE_ERR_UNION_PAYLOAD_VOID:
|
|
case ZIR_INST_EXPORT:
|
|
case ZIR_INST_SET_EVAL_BRANCH_QUOTA:
|
|
case ZIR_INST_ATOMIC_STORE:
|
|
case ZIR_INST_STORE_NODE:
|
|
case ZIR_INST_STORE_TO_INFERRED_PTR:
|
|
case ZIR_INST_RESOLVE_INFERRED_ALLOC:
|
|
case ZIR_INST_SET_RUNTIME_SAFETY:
|
|
case ZIR_INST_MEMCPY:
|
|
case ZIR_INST_MEMSET:
|
|
case ZIR_INST_MEMMOVE:
|
|
case ZIR_INST_VALIDATE_DEREF:
|
|
case ZIR_INST_VALIDATE_DESTRUCTURE:
|
|
case ZIR_INST_SAVE_ERR_RET_INDEX:
|
|
case ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL:
|
|
case ZIR_INST_RESTORE_ERR_RET_INDEX_FN_ENTRY:
|
|
case ZIR_INST_VALIDATE_STRUCT_INIT_TY:
|
|
case ZIR_INST_VALIDATE_STRUCT_INIT_RESULT_TY:
|
|
case ZIR_INST_VALIDATE_PTR_STRUCT_INIT:
|
|
case ZIR_INST_VALIDATE_ARRAY_INIT_TY:
|
|
case ZIR_INST_VALIDATE_ARRAY_INIT_RESULT_TY:
|
|
case ZIR_INST_VALIDATE_PTR_ARRAY_INIT:
|
|
case ZIR_INST_VALIDATE_REF_TY:
|
|
case ZIR_INST_VALIDATE_CONST:
|
|
elide_check = true;
|
|
break;
|
|
// Extended: check opcode.
|
|
case ZIR_INST_EXTENDED: {
|
|
uint32_t opcode = ag->inst_datas[inst].extended.opcode;
|
|
elide_check = (opcode == ZIR_EXT_BREAKPOINT
|
|
|| opcode == ZIR_EXT_BRANCH_HINT
|
|
|| opcode == ZIR_EXT_SET_FLOAT_MODE
|
|
|| opcode == ZIR_EXT_DISABLE_INSTRUMENTATION
|
|
|| opcode == ZIR_EXT_DISABLE_INTRINSICS);
|
|
break;
|
|
}
|
|
// Everything else: might produce non-void result → emit check.
|
|
default:
|
|
elide_check = false;
|
|
break;
|
|
}
|
|
} else {
|
|
// Named ref constant.
|
|
is_noreturn = (maybe_unused_result == ZIR_REF_UNREACHABLE_VALUE);
|
|
elide_check
|
|
= (is_noreturn || maybe_unused_result == ZIR_REF_VOID_VALUE);
|
|
}
|
|
if (!elide_check) {
|
|
addUnNode(
|
|
gz, ZIR_INST_ENSURE_RESULT_USED, maybe_unused_result, statement);
|
|
}
|
|
return is_noreturn;
|
|
}
|
|
|
|
// --- countDefers (AstGen.zig:2966) ---
|
|
// Walk scope chain and count defer types.
|
|
|
|
static DeferCounts countDefers(const Scope* outer_scope, Scope* inner_scope) {
|
|
DeferCounts c = { false, false, false, false };
|
|
Scope* s = inner_scope;
|
|
while (s != outer_scope) {
|
|
switch (s->tag) {
|
|
case SCOPE_GEN_ZIR:
|
|
s = ((GenZir*)s)->parent;
|
|
break;
|
|
case SCOPE_LOCAL_VAL:
|
|
s = ((ScopeLocalVal*)s)->parent;
|
|
break;
|
|
case SCOPE_LOCAL_PTR:
|
|
s = ((ScopeLocalPtr*)s)->parent;
|
|
break;
|
|
case SCOPE_DEFER_NORMAL: {
|
|
ScopeDefer* d = (ScopeDefer*)s;
|
|
s = d->parent;
|
|
c.have_normal = true;
|
|
break;
|
|
}
|
|
case SCOPE_DEFER_ERROR: {
|
|
ScopeDefer* d = (ScopeDefer*)s;
|
|
s = d->parent;
|
|
c.have_err = true;
|
|
// need_err_code if remapped_err_code exists (we don't
|
|
// implement err capture yet, so always false).
|
|
break;
|
|
}
|
|
default:
|
|
return c;
|
|
}
|
|
}
|
|
c.have_any = c.have_normal || c.have_err;
|
|
return c;
|
|
}
|
|
|
|
// --- genDefers (AstGen.zig:3014) ---
|
|
// Walk scope chain from inner to outer, emitting .defer instructions.
|
|
// which: DEFER_NORMAL_ONLY or DEFER_BOTH_SANS_ERR.
|
|
|
|
static void genDefers(
|
|
GenZir* gz, const Scope* outer_scope, Scope* inner_scope, int which) {
|
|
Scope* s = inner_scope;
|
|
while (s != outer_scope) {
|
|
switch (s->tag) {
|
|
case SCOPE_GEN_ZIR: {
|
|
GenZir* g = (GenZir*)s;
|
|
s = g->parent;
|
|
break;
|
|
}
|
|
case SCOPE_LOCAL_VAL: {
|
|
ScopeLocalVal* lv = (ScopeLocalVal*)s;
|
|
s = lv->parent;
|
|
break;
|
|
}
|
|
case SCOPE_LOCAL_PTR: {
|
|
ScopeLocalPtr* lp = (ScopeLocalPtr*)s;
|
|
s = lp->parent;
|
|
break;
|
|
}
|
|
case SCOPE_DEFER_NORMAL: {
|
|
ScopeDefer* d = (ScopeDefer*)s;
|
|
s = d->parent;
|
|
// Emit ZIR_INST_DEFER (AstGen.zig:3031).
|
|
ZirInstData data;
|
|
data.defer_data.index = d->index;
|
|
data.defer_data.len = d->len;
|
|
addInstruction(gz, ZIR_INST_DEFER, data);
|
|
break;
|
|
}
|
|
case SCOPE_DEFER_ERROR: {
|
|
ScopeDefer* d = (ScopeDefer*)s;
|
|
s = d->parent;
|
|
if (which == DEFER_BOTH_SANS_ERR) {
|
|
// Emit regular DEFER for error defers too (AstGen.zig:3038).
|
|
ZirInstData data;
|
|
data.defer_data.index = d->index;
|
|
data.defer_data.len = d->len;
|
|
addInstruction(gz, ZIR_INST_DEFER, data);
|
|
}
|
|
// DEFER_NORMAL_ONLY: skip error defers (AstGen.zig:3063).
|
|
break;
|
|
}
|
|
case SCOPE_LABEL: {
|
|
// Labels store parent in the GenZir they're attached to.
|
|
// Just skip by going to the parent scope stored in parent.
|
|
// Actually labels don't have a separate parent pointer in our
|
|
// representation; they're part of GenZir. This case shouldn't
|
|
// appear when walking from blockExprStmts scope.
|
|
return;
|
|
}
|
|
case SCOPE_NAMESPACE:
|
|
case SCOPE_TOP:
|
|
default:
|
|
return;
|
|
}
|
|
}
|
|
}
|
|
|
|
// --- blockExprStmts (AstGen.zig:2538) ---
|
|
// Processes block statements sequentially, threading scope.
|
|
|
|
static void blockExprStmts(GenZir* gz, Scope* scope,
|
|
const uint32_t* statements, uint32_t stmt_count) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
// Stack-allocated scope storage for local variables and defers.
|
|
// Max 128 local variable declarations and 128 defers per block.
|
|
ScopeLocalVal val_scopes[128];
|
|
ScopeLocalPtr ptr_scopes[128];
|
|
ScopeDefer defer_scopes[128];
|
|
uint32_t val_idx = 0;
|
|
uint32_t ptr_idx = 0;
|
|
uint32_t defer_idx = 0;
|
|
Scope* cur_scope = scope;
|
|
bool noreturn_stmt = false;
|
|
|
|
for (uint32_t i = 0; i < stmt_count; i++) {
|
|
if (ag->has_compile_errors)
|
|
return;
|
|
uint32_t stmt = statements[i];
|
|
// Unwrap grouped_expression (parentheses) before dispatching
|
|
// (AstGen.zig:2569-2630).
|
|
uint32_t inner_node = stmt;
|
|
for (;;) {
|
|
AstNodeTag tag = ag->tree->nodes.tags[inner_node];
|
|
switch (tag) {
|
|
case AST_NODE_ASSIGN:
|
|
assignStmt(gz, cur_scope, inner_node);
|
|
break;
|
|
// assign_destructure (AstGen.zig:2578).
|
|
case AST_NODE_ASSIGN_DESTRUCTURE:
|
|
cur_scope
|
|
= assignDestructureMaybeDecls(gz, cur_scope, inner_node,
|
|
val_scopes, &val_idx, ptr_scopes, &ptr_idx, 128);
|
|
break;
|
|
// Shift assignment operators (AstGen.zig:2585-2586).
|
|
case AST_NODE_ASSIGN_SHL:
|
|
assignShift(gz, cur_scope, inner_node, ZIR_INST_SHL);
|
|
break;
|
|
case AST_NODE_ASSIGN_SHR:
|
|
assignShift(gz, cur_scope, inner_node, ZIR_INST_SHR);
|
|
break;
|
|
// Saturating shift-left assignment (AstGen.zig:680-682 via expr).
|
|
case AST_NODE_ASSIGN_SHL_SAT:
|
|
assignShiftSat(gz, cur_scope, inner_node);
|
|
break;
|
|
// Compound assignment operators (AstGen.zig:2588-2607).
|
|
case AST_NODE_ASSIGN_ADD:
|
|
assignOp(gz, cur_scope, inner_node, ZIR_INST_ADD);
|
|
break;
|
|
case AST_NODE_ASSIGN_SUB:
|
|
assignOp(gz, cur_scope, inner_node, ZIR_INST_SUB);
|
|
break;
|
|
case AST_NODE_ASSIGN_MUL:
|
|
assignOp(gz, cur_scope, inner_node, ZIR_INST_MUL);
|
|
break;
|
|
case AST_NODE_ASSIGN_DIV:
|
|
assignOp(gz, cur_scope, inner_node, ZIR_INST_DIV);
|
|
break;
|
|
case AST_NODE_ASSIGN_MOD:
|
|
assignOp(gz, cur_scope, inner_node, ZIR_INST_MOD_REM);
|
|
break;
|
|
case AST_NODE_ASSIGN_BIT_AND:
|
|
assignOp(gz, cur_scope, inner_node, ZIR_INST_BIT_AND);
|
|
break;
|
|
case AST_NODE_ASSIGN_BIT_OR:
|
|
assignOp(gz, cur_scope, inner_node, ZIR_INST_BIT_OR);
|
|
break;
|
|
case AST_NODE_ASSIGN_BIT_XOR:
|
|
assignOp(gz, cur_scope, inner_node, ZIR_INST_XOR);
|
|
break;
|
|
case AST_NODE_ASSIGN_ADD_WRAP:
|
|
assignOp(gz, cur_scope, inner_node, ZIR_INST_ADDWRAP);
|
|
break;
|
|
case AST_NODE_ASSIGN_SUB_WRAP:
|
|
assignOp(gz, cur_scope, inner_node, ZIR_INST_SUBWRAP);
|
|
break;
|
|
case AST_NODE_ASSIGN_MUL_WRAP:
|
|
assignOp(gz, cur_scope, inner_node, ZIR_INST_MULWRAP);
|
|
break;
|
|
case AST_NODE_ASSIGN_ADD_SAT:
|
|
assignOp(gz, cur_scope, inner_node, ZIR_INST_ADD_SAT);
|
|
break;
|
|
case AST_NODE_ASSIGN_SUB_SAT:
|
|
assignOp(gz, cur_scope, inner_node, ZIR_INST_SUB_SAT);
|
|
break;
|
|
case AST_NODE_ASSIGN_MUL_SAT:
|
|
assignOp(gz, cur_scope, inner_node, ZIR_INST_MUL_SAT);
|
|
break;
|
|
case AST_NODE_SIMPLE_VAR_DECL:
|
|
case AST_NODE_LOCAL_VAR_DECL:
|
|
case AST_NODE_ALIGNED_VAR_DECL:
|
|
if (val_idx < 128 && ptr_idx < 128) {
|
|
varDecl(gz, cur_scope, stmt, &val_scopes[val_idx],
|
|
&ptr_scopes[ptr_idx], &cur_scope);
|
|
// Check which one was used: if scope now points to
|
|
// val_scopes[val_idx], advance val_idx; same for ptr.
|
|
if (cur_scope == &val_scopes[val_idx].base)
|
|
val_idx++;
|
|
else if (cur_scope == &ptr_scopes[ptr_idx].base)
|
|
ptr_idx++;
|
|
} else {
|
|
SET_ERROR(ag);
|
|
}
|
|
break;
|
|
// defer/errdefer (AstGen.zig:2580-2581).
|
|
case AST_NODE_DEFER:
|
|
case AST_NODE_ERRDEFER: {
|
|
if (defer_idx >= 128) {
|
|
SET_ERROR(ag);
|
|
break;
|
|
}
|
|
ScopeTag scope_tag = (tag == AST_NODE_DEFER)
|
|
? SCOPE_DEFER_NORMAL
|
|
: SCOPE_DEFER_ERROR;
|
|
// Create sub-block for defer body (AstGen.zig:3123-3126).
|
|
GenZir defer_gen = makeSubBlock(gz, cur_scope);
|
|
defer_gen.any_defer_node = stmt; // AstGen.zig:3125
|
|
|
|
// Evaluate deferred expression (AstGen.zig:3165).
|
|
// DEFER: lhs is the deferred expression, rhs = 0.
|
|
// ERRDEFER: lhs is optional error capture token, rhs is expr.
|
|
AstData dnd = ag->tree->nodes.datas[stmt];
|
|
uint32_t expr_node;
|
|
if (tag == AST_NODE_DEFER) {
|
|
expr_node = dnd.lhs;
|
|
} else {
|
|
expr_node = dnd.rhs;
|
|
}
|
|
// unusedResultExpr pattern (AstGen.zig:3165, 2641-2646).
|
|
emitDbgNode(&defer_gen, expr_node);
|
|
uint32_t defer_result
|
|
= expr(&defer_gen, &defer_gen.base, expr_node);
|
|
addEnsureResult(&defer_gen, defer_result, expr_node);
|
|
|
|
// Add break_inline at end (AstGen.zig:3167).
|
|
addBreak(&defer_gen, ZIR_INST_BREAK_INLINE, 0,
|
|
ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
|
|
|
|
// Write body to extra (AstGen.zig:3173-3175).
|
|
uint32_t raw_body_len = gzInstructionsLen(&defer_gen);
|
|
const uint32_t* body = gzInstructionsSlice(&defer_gen);
|
|
uint32_t extra_index = ag->extra_len;
|
|
uint32_t fixup_len
|
|
= countBodyLenAfterFixups(ag, body, raw_body_len);
|
|
ensureExtraCapacity(ag, fixup_len);
|
|
for (uint32_t b = 0; b < raw_body_len; b++)
|
|
appendPossiblyRefdBodyInst(ag, body[b]);
|
|
gzUnstack(&defer_gen);
|
|
|
|
// Create scope (AstGen.zig:3179-3185).
|
|
defer_scopes[defer_idx] = (ScopeDefer) {
|
|
.base = { .tag = scope_tag },
|
|
.parent = cur_scope,
|
|
.index = extra_index,
|
|
.len = fixup_len,
|
|
};
|
|
cur_scope = &defer_scopes[defer_idx].base;
|
|
defer_idx++;
|
|
break;
|
|
}
|
|
// Grouped expression: unwrap parentheses (AstGen.zig:2600-2602).
|
|
case AST_NODE_GROUPED_EXPRESSION:
|
|
inner_node = ag->tree->nodes.datas[inner_node].lhs;
|
|
continue;
|
|
// while/for as statements (AstGen.zig:2605-2610).
|
|
// These do NOT get emitDbgNode; they emit their own dbg_stmt.
|
|
case AST_NODE_WHILE_SIMPLE:
|
|
case AST_NODE_WHILE_CONT:
|
|
case AST_NODE_WHILE:
|
|
(void)whileExpr(gz, cur_scope, RL_NONE_VAL, inner_node, true);
|
|
break;
|
|
case AST_NODE_FOR_SIMPLE:
|
|
case AST_NODE_FOR:
|
|
(void)forExpr(gz, cur_scope, RL_NONE_VAL, inner_node, true);
|
|
break;
|
|
default: {
|
|
// Expression statement (AstGen.zig:2627 unusedResultExpr).
|
|
emitDbgNode(gz, inner_node);
|
|
uint32_t result = expr(gz, cur_scope, inner_node);
|
|
noreturn_stmt = addEnsureResult(gz, result, inner_node);
|
|
break;
|
|
}
|
|
}
|
|
break; // Break out of the for(;;) unwrapping loop.
|
|
}
|
|
}
|
|
// Emit normal defers at block exit (AstGen.zig:2633-2634).
|
|
if (!noreturn_stmt) {
|
|
genDefers(gz, scope, cur_scope, DEFER_NORMAL_ONLY);
|
|
}
|
|
}
|
|
|
|
// --- fullBodyExpr (AstGen.zig:2358) ---
|
|
// Processes a body expression. If it's an unlabeled block, processes
|
|
// statements inline without creating a BLOCK instruction (unlike blockExprExpr
|
|
// which wraps in BLOCK). Returns the result ref.
|
|
|
|
static uint32_t fullBodyExpr(
|
|
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
|
|
const Ast* tree = gz->astgen->tree;
|
|
AstNodeTag tag = tree->nodes.tags[node];
|
|
|
|
// Extract block statements (AstGen.zig:2368).
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t stmt_buf[2];
|
|
const uint32_t* statements = NULL;
|
|
uint32_t stmt_count = 0;
|
|
|
|
switch (tag) {
|
|
case AST_NODE_BLOCK_TWO:
|
|
case AST_NODE_BLOCK_TWO_SEMICOLON: {
|
|
uint32_t idx = 0;
|
|
if (nd.lhs != 0)
|
|
stmt_buf[idx++] = nd.lhs;
|
|
if (nd.rhs != 0)
|
|
stmt_buf[idx++] = nd.rhs;
|
|
statements = stmt_buf;
|
|
stmt_count = idx;
|
|
break;
|
|
}
|
|
case AST_NODE_BLOCK:
|
|
case AST_NODE_BLOCK_SEMICOLON: {
|
|
uint32_t start = nd.lhs;
|
|
uint32_t end = nd.rhs;
|
|
statements = tree->extra_data.arr + start;
|
|
stmt_count = end - start;
|
|
break;
|
|
}
|
|
default:
|
|
// Not a block — treat as single expression (AstGen.zig:2369).
|
|
return exprRl(gz, scope, rl, node);
|
|
}
|
|
|
|
// Check if labeled (AstGen.zig:2373-2377).
|
|
uint32_t lbrace = tree->nodes.main_tokens[node];
|
|
bool is_labeled
|
|
= (lbrace >= 2 && tree->tokens.tags[lbrace - 1] == TOKEN_COLON
|
|
&& tree->tokens.tags[lbrace - 2] == TOKEN_IDENTIFIER);
|
|
if (is_labeled) {
|
|
// Labeled blocks need a proper block instruction.
|
|
return blockExprExpr(gz, scope, rl, node);
|
|
}
|
|
|
|
// Unlabeled block: process statements inline (AstGen.zig:2380-2383).
|
|
GenZir sub_gz = makeSubBlock(gz, scope);
|
|
blockExprStmts(&sub_gz, &sub_gz.base, statements, stmt_count);
|
|
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
|
|
}
|
|
|
|
// --- lastToken (Ast.zig:874) ---
|
|
// Mechanical port of Ast.lastToken. Uses iterative end_offset accumulation.
|
|
|
|
static uint32_t lastToken(const Ast* tree, uint32_t node) {
|
|
uint32_t n = node;
|
|
uint32_t end_offset = 0;
|
|
while (1) {
|
|
AstNodeTag tag = tree->nodes.tags[n];
|
|
AstData nd = tree->nodes.datas[n];
|
|
switch (tag) {
|
|
case AST_NODE_ROOT:
|
|
return tree->tokens.len - 1;
|
|
|
|
// Binary ops: recurse into RHS (Ast.zig:893-948).
|
|
case AST_NODE_ASSIGN_MUL:
|
|
case AST_NODE_ASSIGN_DIV:
|
|
case AST_NODE_ASSIGN_MOD:
|
|
case AST_NODE_ASSIGN_ADD:
|
|
case AST_NODE_ASSIGN_SUB:
|
|
case AST_NODE_ASSIGN_SHL:
|
|
case AST_NODE_ASSIGN_SHL_SAT:
|
|
case AST_NODE_ASSIGN_SHR:
|
|
case AST_NODE_ASSIGN_BIT_AND:
|
|
case AST_NODE_ASSIGN_BIT_XOR:
|
|
case AST_NODE_ASSIGN_BIT_OR:
|
|
case AST_NODE_ASSIGN_MUL_WRAP:
|
|
case AST_NODE_ASSIGN_ADD_WRAP:
|
|
case AST_NODE_ASSIGN_SUB_WRAP:
|
|
case AST_NODE_ASSIGN_MUL_SAT:
|
|
case AST_NODE_ASSIGN_ADD_SAT:
|
|
case AST_NODE_ASSIGN_SUB_SAT:
|
|
case AST_NODE_ASSIGN:
|
|
case AST_NODE_ADD:
|
|
case AST_NODE_SUB:
|
|
case AST_NODE_MUL:
|
|
case AST_NODE_DIV:
|
|
case AST_NODE_MOD:
|
|
case AST_NODE_BIT_AND:
|
|
case AST_NODE_BIT_OR:
|
|
case AST_NODE_BIT_XOR:
|
|
case AST_NODE_SHL:
|
|
case AST_NODE_SHR:
|
|
case AST_NODE_ARRAY_CAT:
|
|
case AST_NODE_ARRAY_MULT:
|
|
case AST_NODE_ADD_WRAP:
|
|
case AST_NODE_SUB_WRAP:
|
|
case AST_NODE_ADD_SAT:
|
|
case AST_NODE_SUB_SAT:
|
|
case AST_NODE_MUL_WRAP:
|
|
case AST_NODE_MUL_SAT:
|
|
case AST_NODE_MERGE_ERROR_SETS:
|
|
case AST_NODE_EQUAL_EQUAL:
|
|
case AST_NODE_BANG_EQUAL:
|
|
case AST_NODE_LESS_THAN:
|
|
case AST_NODE_GREATER_THAN:
|
|
case AST_NODE_LESS_OR_EQUAL:
|
|
case AST_NODE_GREATER_OR_EQUAL:
|
|
case AST_NODE_BOOL_AND:
|
|
case AST_NODE_BOOL_OR:
|
|
case AST_NODE_ORELSE:
|
|
case AST_NODE_CATCH:
|
|
case AST_NODE_ERROR_UNION:
|
|
case AST_NODE_SHL_SAT:
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
// field_access: return field token + end_offset (Ast.zig:979).
|
|
case AST_NODE_FIELD_ACCESS:
|
|
return nd.rhs + end_offset;
|
|
|
|
// test_decl: recurse into body node (Ast.zig:950).
|
|
case AST_NODE_TEST_DECL:
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
// defer: recurse into body (lhs) (Ast.zig:951).
|
|
case AST_NODE_DEFER:
|
|
n = nd.lhs;
|
|
continue;
|
|
|
|
// errdefer: recurse into body (rhs) (Ast.zig:950).
|
|
case AST_NODE_ERRDEFER:
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
// block (Ast.zig:1085): end_offset += 1 (rbrace), recurse into last.
|
|
case AST_NODE_BLOCK: {
|
|
uint32_t start = nd.lhs;
|
|
uint32_t end = nd.rhs;
|
|
assert(start != end);
|
|
end_offset += 1;
|
|
n = tree->extra_data.arr[end - 1];
|
|
continue;
|
|
}
|
|
|
|
// block_semicolon (Ast.zig:1097): += 2 (semicolon + rbrace).
|
|
case AST_NODE_BLOCK_SEMICOLON: {
|
|
uint32_t start = nd.lhs;
|
|
uint32_t end = nd.rhs;
|
|
assert(start != end);
|
|
end_offset += 2;
|
|
n = tree->extra_data.arr[end - 1];
|
|
continue;
|
|
}
|
|
|
|
// block_two (Ast.zig:1117): if rhs, recurse rhs +1; if lhs, +1; else
|
|
// +1. Note: C parser uses 0 for "none" (OptionalIndex), not
|
|
// UINT32_MAX.
|
|
case AST_NODE_BLOCK_TWO: {
|
|
if (nd.rhs != 0) {
|
|
end_offset += 1;
|
|
n = nd.rhs;
|
|
} else if (nd.lhs != 0) {
|
|
end_offset += 1;
|
|
n = nd.lhs;
|
|
} else {
|
|
end_offset += 1;
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
}
|
|
continue;
|
|
}
|
|
|
|
// block_two_semicolon (Ast.zig:1153).
|
|
case AST_NODE_BLOCK_TWO_SEMICOLON: {
|
|
if (nd.rhs != 0) {
|
|
end_offset += 2;
|
|
n = nd.rhs;
|
|
} else if (nd.lhs != 0) {
|
|
end_offset += 2;
|
|
n = nd.lhs;
|
|
} else {
|
|
end_offset += 1;
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
}
|
|
continue;
|
|
}
|
|
|
|
// builtin_call_two (Ast.zig:1118): recurse into args + rparen.
|
|
case AST_NODE_BUILTIN_CALL_TWO: {
|
|
if (nd.rhs != 0) {
|
|
end_offset += 1;
|
|
n = nd.rhs;
|
|
} else if (nd.lhs != 0) {
|
|
end_offset += 1;
|
|
n = nd.lhs;
|
|
} else {
|
|
end_offset += 2; // lparen + rparen
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
}
|
|
continue;
|
|
}
|
|
|
|
case AST_NODE_BUILTIN_CALL_TWO_COMMA: {
|
|
if (nd.rhs != 0) {
|
|
end_offset += 2; // comma + rparen
|
|
n = nd.rhs;
|
|
} else if (nd.lhs != 0) {
|
|
end_offset += 2;
|
|
n = nd.lhs;
|
|
} else {
|
|
end_offset += 1;
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
}
|
|
continue;
|
|
}
|
|
|
|
// Unary ops: recurse into lhs (Ast.zig:880-891).
|
|
case AST_NODE_BOOL_NOT:
|
|
case AST_NODE_BIT_NOT:
|
|
case AST_NODE_NEGATION:
|
|
case AST_NODE_NEGATION_WRAP:
|
|
case AST_NODE_ADDRESS_OF:
|
|
case AST_NODE_TRY:
|
|
case AST_NODE_AWAIT:
|
|
case AST_NODE_OPTIONAL_TYPE:
|
|
case AST_NODE_SUSPEND:
|
|
case AST_NODE_COMPTIME:
|
|
case AST_NODE_NOSUSPEND:
|
|
case AST_NODE_RESUME:
|
|
n = nd.lhs;
|
|
continue;
|
|
|
|
// return: optional operand (Ast.zig:998-1002).
|
|
case AST_NODE_RETURN:
|
|
if (nd.lhs != 0) {
|
|
n = nd.lhs;
|
|
continue;
|
|
}
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
|
|
// deref: main_token is the `.*` token (Ast.zig:993).
|
|
case AST_NODE_DEREF:
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
|
|
// unwrap_optional (Ast.zig:980): return rhs token + end_offset.
|
|
case AST_NODE_UNWRAP_OPTIONAL:
|
|
return nd.rhs + end_offset;
|
|
|
|
// for_range (Ast.zig:973-977): recurse into rhs if present, else
|
|
// main_token + end_offset.
|
|
case AST_NODE_FOR_RANGE:
|
|
if (nd.rhs != 0) {
|
|
n = nd.rhs;
|
|
} else {
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
}
|
|
continue;
|
|
|
|
// error_value: main_token is `error`, last token is name (+2)
|
|
// (Ast.zig:986).
|
|
case AST_NODE_ERROR_VALUE:
|
|
return tree->nodes.main_tokens[n] + 2 + end_offset;
|
|
|
|
// Terminals: return main_token + end_offset (Ast.zig:988-996).
|
|
case AST_NODE_NUMBER_LITERAL:
|
|
case AST_NODE_STRING_LITERAL:
|
|
case AST_NODE_IDENTIFIER:
|
|
case AST_NODE_ENUM_LITERAL:
|
|
case AST_NODE_CHAR_LITERAL:
|
|
case AST_NODE_UNREACHABLE_LITERAL:
|
|
case AST_NODE_ANYFRAME_LITERAL:
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
|
|
// call_one (Ast.zig:1107-1114): +1 for rparen, recurse into
|
|
// first_param if present.
|
|
case AST_NODE_CALL_ONE:
|
|
end_offset += 1; // rparen
|
|
if (nd.rhs != 0) {
|
|
n = nd.rhs;
|
|
} else {
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
}
|
|
continue;
|
|
case AST_NODE_CALL_ONE_COMMA:
|
|
end_offset += 2; // comma + rparen
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
// array_access: end_offset += 1 (rbracket), recurse rhs.
|
|
case AST_NODE_ARRAY_ACCESS:
|
|
end_offset += 1;
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
// simple_var_decl: recurse into init/type (Ast.zig:1169-1178).
|
|
case AST_NODE_SIMPLE_VAR_DECL:
|
|
if (nd.rhs != 0) {
|
|
n = nd.rhs; // init expr
|
|
} else if (nd.lhs != 0) {
|
|
n = nd.lhs; // type expr
|
|
} else {
|
|
end_offset += 1; // from mut token to name
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
}
|
|
continue;
|
|
|
|
// aligned_var_decl: recurse into init/align (Ast.zig:1180-1187).
|
|
case AST_NODE_ALIGNED_VAR_DECL:
|
|
if (nd.rhs != 0) {
|
|
n = nd.rhs; // init expr
|
|
} else {
|
|
end_offset += 1; // rparen
|
|
n = nd.lhs; // align expr
|
|
}
|
|
continue;
|
|
|
|
// local_var_decl (Ast.zig:1209-1217).
|
|
// extra[lhs] = LocalVarDecl { type_node, align_node }
|
|
case AST_NODE_LOCAL_VAR_DECL:
|
|
if (nd.rhs != 0) {
|
|
n = nd.rhs; // init expr
|
|
} else {
|
|
end_offset += 1; // rparen
|
|
n = tree->extra_data.arr[nd.lhs + 1]; // align_node
|
|
}
|
|
continue;
|
|
|
|
// global_var_decl (Ast.zig:1189-1207).
|
|
// extra[lhs] = GlobalVarDecl { type_node, align_node,
|
|
// addrspace_node, section_node }
|
|
case AST_NODE_GLOBAL_VAR_DECL:
|
|
if (nd.rhs != 0) {
|
|
n = nd.rhs; // init expr
|
|
} else {
|
|
uint32_t section_node = tree->extra_data.arr[nd.lhs + 3];
|
|
uint32_t align_node = tree->extra_data.arr[nd.lhs + 1];
|
|
uint32_t type_node = tree->extra_data.arr[nd.lhs];
|
|
if (section_node != 0) {
|
|
end_offset += 1; // rparen
|
|
n = section_node;
|
|
} else if (align_node != 0) {
|
|
end_offset += 1; // rparen
|
|
n = align_node;
|
|
} else if (type_node != 0) {
|
|
n = type_node;
|
|
} else {
|
|
end_offset += 1; // from mut token to name
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
}
|
|
}
|
|
continue;
|
|
|
|
// slice_open: end_offset += 2 (ellipsis2 + rbracket), recurse rhs
|
|
// (Ast.zig:1245-1248).
|
|
case AST_NODE_SLICE_OPEN:
|
|
end_offset += 2;
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
// grouped_expression (Ast.zig:983): return rhs token + end_offset.
|
|
case AST_NODE_GROUPED_EXPRESSION:
|
|
return nd.rhs + end_offset;
|
|
|
|
// if_simple: recurse into body (rhs) (Ast.zig:942).
|
|
case AST_NODE_IF_SIMPLE:
|
|
case AST_NODE_WHILE_SIMPLE:
|
|
case AST_NODE_FOR_SIMPLE:
|
|
case AST_NODE_FN_DECL:
|
|
case AST_NODE_ARRAY_TYPE:
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
// if: recurse into else_expr (Ast.zig:1295).
|
|
case AST_NODE_IF: {
|
|
// If[rhs]: { then_expr, else_expr }
|
|
n = tree->extra_data.arr[nd.rhs + 1]; // else_expr
|
|
continue;
|
|
}
|
|
|
|
// while: recurse into else_expr (Ast.zig:1290).
|
|
case AST_NODE_WHILE: {
|
|
// While[rhs]: { cont_expr, then_expr, else_expr }
|
|
n = tree->extra_data.arr[nd.rhs + 2]; // else_expr
|
|
continue;
|
|
}
|
|
|
|
// while_cont: recurse into then_expr (Ast.zig:943-like).
|
|
case AST_NODE_WHILE_CONT: {
|
|
// WhileCont[rhs]: { cont_expr, then_expr }
|
|
n = tree->extra_data.arr[nd.rhs + 1]; // then_expr
|
|
continue;
|
|
}
|
|
|
|
// switch: recurse into last case (Ast.zig:1031-1041).
|
|
case AST_NODE_SWITCH: {
|
|
uint32_t ei = nd.rhs;
|
|
uint32_t cs = tree->extra_data.arr[ei];
|
|
uint32_t ce = tree->extra_data.arr[ei + 1];
|
|
if (cs == ce) {
|
|
end_offset += 3; // rparen, lbrace, rbrace
|
|
n = nd.lhs;
|
|
} else {
|
|
end_offset += 1; // rbrace
|
|
n = tree->extra_data.arr[ce - 1];
|
|
}
|
|
continue;
|
|
}
|
|
case AST_NODE_SWITCH_COMMA: {
|
|
uint32_t ei = nd.rhs;
|
|
uint32_t cs = tree->extra_data.arr[ei];
|
|
uint32_t ce = tree->extra_data.arr[ei + 1];
|
|
assert(cs != ce);
|
|
end_offset += 2; // comma + rbrace
|
|
n = tree->extra_data.arr[ce - 1];
|
|
continue;
|
|
}
|
|
|
|
// switch_case_one: recurse into rhs (body) (Ast.zig:942).
|
|
case AST_NODE_SWITCH_CASE_ONE:
|
|
case AST_NODE_SWITCH_CASE_INLINE_ONE:
|
|
case AST_NODE_SWITCH_CASE:
|
|
case AST_NODE_SWITCH_CASE_INLINE:
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
// switch_range: recurse into rhs (Ast.zig: binary op pattern).
|
|
case AST_NODE_SWITCH_RANGE:
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
// struct_init_one: recurse into field if present, +1.
|
|
case AST_NODE_STRUCT_INIT_ONE:
|
|
end_offset += 1; // rbrace
|
|
if (nd.rhs != 0) {
|
|
n = nd.rhs;
|
|
} else {
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
}
|
|
continue;
|
|
case AST_NODE_STRUCT_INIT_ONE_COMMA:
|
|
end_offset += 2; // comma + rbrace
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
// struct_init_dot_two: similar to block_two.
|
|
case AST_NODE_STRUCT_INIT_DOT_TWO:
|
|
if (nd.rhs != 0) {
|
|
end_offset += 1;
|
|
n = nd.rhs;
|
|
} else if (nd.lhs != 0) {
|
|
end_offset += 1;
|
|
n = nd.lhs;
|
|
} else {
|
|
end_offset += 1; // rbrace
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
}
|
|
continue;
|
|
case AST_NODE_STRUCT_INIT_DOT_TWO_COMMA:
|
|
end_offset += 2;
|
|
if (nd.rhs != 0) {
|
|
n = nd.rhs;
|
|
} else {
|
|
n = nd.lhs;
|
|
}
|
|
continue;
|
|
|
|
// struct_init_dot: SubRange pattern.
|
|
case AST_NODE_STRUCT_INIT_DOT:
|
|
assert(nd.lhs != nd.rhs);
|
|
end_offset += 1;
|
|
n = tree->extra_data.arr[nd.rhs - 1];
|
|
continue;
|
|
|
|
// struct_init: node_and_extra SubRange pattern.
|
|
case AST_NODE_STRUCT_INIT: {
|
|
uint32_t si = tree->extra_data.arr[nd.rhs];
|
|
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
|
|
assert(si != se);
|
|
end_offset += 1;
|
|
n = tree->extra_data.arr[se - 1];
|
|
continue;
|
|
}
|
|
|
|
// call: SubRange pattern.
|
|
case AST_NODE_CALL: {
|
|
uint32_t si = tree->extra_data.arr[nd.rhs];
|
|
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
|
|
assert(si != se);
|
|
end_offset += 1;
|
|
n = tree->extra_data.arr[se - 1];
|
|
continue;
|
|
}
|
|
case AST_NODE_CALL_COMMA: {
|
|
uint32_t si = tree->extra_data.arr[nd.rhs];
|
|
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
|
|
assert(si != se);
|
|
end_offset += 2;
|
|
n = tree->extra_data.arr[se - 1];
|
|
continue;
|
|
}
|
|
|
|
// fn_proto_simple: recurse into rhs (return type).
|
|
case AST_NODE_FN_PROTO_SIMPLE:
|
|
case AST_NODE_FN_PROTO_ONE:
|
|
case AST_NODE_FN_PROTO_MULTI:
|
|
case AST_NODE_FN_PROTO:
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
// error_set_decl: rhs is the closing rbrace token.
|
|
case AST_NODE_ERROR_SET_DECL:
|
|
return nd.rhs + end_offset;
|
|
|
|
// ptr_type variants: recurse into rhs (child type).
|
|
case AST_NODE_PTR_TYPE_ALIGNED:
|
|
case AST_NODE_PTR_TYPE_SENTINEL:
|
|
case AST_NODE_PTR_TYPE:
|
|
case AST_NODE_PTR_TYPE_BIT_RANGE:
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
// container_decl: extra_range pattern.
|
|
case AST_NODE_CONTAINER_DECL:
|
|
case AST_NODE_TAGGED_UNION:
|
|
assert(nd.lhs != nd.rhs);
|
|
end_offset += 1;
|
|
n = tree->extra_data.arr[nd.rhs - 1];
|
|
continue;
|
|
case AST_NODE_CONTAINER_DECL_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_TRAILING:
|
|
assert(nd.lhs != nd.rhs);
|
|
end_offset += 2;
|
|
n = tree->extra_data.arr[nd.rhs - 1];
|
|
continue;
|
|
|
|
// container_decl_two / tagged_union_two (Ast.zig:1120-1151).
|
|
case AST_NODE_CONTAINER_DECL_TWO:
|
|
case AST_NODE_TAGGED_UNION_TWO:
|
|
if (nd.rhs != 0) {
|
|
end_offset += 1;
|
|
n = nd.rhs;
|
|
} else if (nd.lhs != 0) {
|
|
end_offset += 1;
|
|
n = nd.lhs;
|
|
} else {
|
|
if (tag == AST_NODE_CONTAINER_DECL_TWO) {
|
|
uint32_t i = 2; // lbrace + rbrace
|
|
while (tree->tokens.tags[tree->nodes.main_tokens[n] + i]
|
|
== TOKEN_CONTAINER_DOC_COMMENT)
|
|
i += 1;
|
|
end_offset += i;
|
|
} else {
|
|
// tagged_union_two: (enum) {}
|
|
uint32_t i = 5;
|
|
while (tree->tokens.tags[tree->nodes.main_tokens[n] + i]
|
|
== TOKEN_CONTAINER_DOC_COMMENT)
|
|
i += 1;
|
|
end_offset += i;
|
|
}
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
}
|
|
continue;
|
|
case AST_NODE_CONTAINER_DECL_TWO_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_TWO_TRAILING:
|
|
end_offset += 2;
|
|
if (nd.rhs != 0) {
|
|
n = nd.rhs;
|
|
} else {
|
|
n = nd.lhs;
|
|
}
|
|
continue;
|
|
|
|
// container_decl_arg: node_and_extra SubRange.
|
|
case AST_NODE_CONTAINER_DECL_ARG: {
|
|
uint32_t si = tree->extra_data.arr[nd.rhs];
|
|
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
|
|
if (si == se) {
|
|
end_offset += 3; // rparen + lbrace + rbrace
|
|
n = nd.lhs;
|
|
} else {
|
|
end_offset += 1;
|
|
n = tree->extra_data.arr[se - 1];
|
|
}
|
|
continue;
|
|
}
|
|
case AST_NODE_CONTAINER_DECL_ARG_TRAILING: {
|
|
uint32_t si = tree->extra_data.arr[nd.rhs];
|
|
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
|
|
assert(si != se);
|
|
end_offset += 2;
|
|
n = tree->extra_data.arr[se - 1];
|
|
continue;
|
|
}
|
|
|
|
// slice: extra data pattern.
|
|
case AST_NODE_SLICE: {
|
|
// Slice[rhs]: { start, end }
|
|
end_offset += 1;
|
|
n = tree->extra_data.arr[nd.rhs + 1]; // end
|
|
continue;
|
|
}
|
|
case AST_NODE_SLICE_SENTINEL: {
|
|
// SliceSentinel[rhs]: { start, end, sentinel }
|
|
end_offset += 1;
|
|
n = tree->extra_data.arr[nd.rhs + 2]; // sentinel
|
|
continue;
|
|
}
|
|
|
|
// array_type_sentinel: extra data.
|
|
case AST_NODE_ARRAY_TYPE_SENTINEL: {
|
|
// ArrayTypeSentinel[rhs]: { sentinel, elem_type }
|
|
n = tree->extra_data.arr[nd.rhs + 1]; // elem_type
|
|
continue;
|
|
}
|
|
|
|
// multiline_string_literal: main_token + end_offset.
|
|
case AST_NODE_MULTILINE_STRING_LITERAL:
|
|
return nd.rhs + end_offset;
|
|
|
|
// break/continue (Ast.zig:1275-1283).
|
|
// lhs is opt_token (null_token = UINT32_MAX), rhs is opt_node (0 =
|
|
// none).
|
|
case AST_NODE_BREAK:
|
|
case AST_NODE_CONTINUE:
|
|
if (nd.rhs != 0) {
|
|
n = nd.rhs; // optional rhs expression
|
|
} else if (nd.lhs != UINT32_MAX) {
|
|
return nd.lhs + end_offset; // label token
|
|
} else {
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
}
|
|
continue;
|
|
|
|
// array_init_one: end_offset += 1 (rbrace), recurse rhs
|
|
// (Ast.zig:1224-1230).
|
|
case AST_NODE_ARRAY_INIT_ONE:
|
|
end_offset += 1;
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
case AST_NODE_ARRAY_INIT_ONE_COMMA:
|
|
end_offset += 2; // comma + rbrace
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
// struct_init_dot_comma: SubRange pattern.
|
|
case AST_NODE_STRUCT_INIT_DOT_COMMA:
|
|
assert(nd.lhs != nd.rhs);
|
|
end_offset += 2; // comma + rbrace
|
|
n = tree->extra_data.arr[nd.rhs - 1];
|
|
continue;
|
|
|
|
// struct_init_comma: node_and_extra SubRange.
|
|
case AST_NODE_STRUCT_INIT_COMMA: {
|
|
uint32_t si = tree->extra_data.arr[nd.rhs];
|
|
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
|
|
assert(si != se);
|
|
end_offset += 2;
|
|
n = tree->extra_data.arr[se - 1];
|
|
continue;
|
|
}
|
|
|
|
// array_init variants.
|
|
case AST_NODE_ARRAY_INIT: {
|
|
uint32_t si = tree->extra_data.arr[nd.rhs];
|
|
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
|
|
assert(si != se);
|
|
end_offset += 1;
|
|
n = tree->extra_data.arr[se - 1];
|
|
continue;
|
|
}
|
|
case AST_NODE_ARRAY_INIT_COMMA: {
|
|
uint32_t si = tree->extra_data.arr[nd.rhs];
|
|
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
|
|
assert(si != se);
|
|
end_offset += 2;
|
|
n = tree->extra_data.arr[se - 1];
|
|
continue;
|
|
}
|
|
|
|
// array_init_dot variants.
|
|
case AST_NODE_ARRAY_INIT_DOT_TWO:
|
|
if (nd.rhs != 0) {
|
|
end_offset += 1;
|
|
n = nd.rhs;
|
|
} else if (nd.lhs != 0) {
|
|
end_offset += 1;
|
|
n = nd.lhs;
|
|
} else {
|
|
end_offset += 1;
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
}
|
|
continue;
|
|
case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA:
|
|
end_offset += 2;
|
|
if (nd.rhs != 0) {
|
|
n = nd.rhs;
|
|
} else {
|
|
n = nd.lhs;
|
|
}
|
|
continue;
|
|
case AST_NODE_ARRAY_INIT_DOT:
|
|
assert(nd.lhs != nd.rhs);
|
|
end_offset += 1;
|
|
n = tree->extra_data.arr[nd.rhs - 1];
|
|
continue;
|
|
case AST_NODE_ARRAY_INIT_DOT_COMMA:
|
|
assert(nd.lhs != nd.rhs);
|
|
end_offset += 2;
|
|
n = tree->extra_data.arr[nd.rhs - 1];
|
|
continue;
|
|
|
|
// builtin_call (Ast.zig:1083-1105).
|
|
case AST_NODE_BUILTIN_CALL: {
|
|
uint32_t si = tree->extra_data.arr[nd.rhs];
|
|
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
|
|
assert(si != se);
|
|
end_offset += 1;
|
|
n = tree->extra_data.arr[se - 1];
|
|
continue;
|
|
}
|
|
case AST_NODE_BUILTIN_CALL_COMMA: {
|
|
uint32_t si = tree->extra_data.arr[nd.rhs];
|
|
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
|
|
assert(si != se);
|
|
end_offset += 2;
|
|
n = tree->extra_data.arr[se - 1];
|
|
continue;
|
|
}
|
|
|
|
// for (Ast.zig:1300-1303): complex extra data.
|
|
case AST_NODE_FOR: {
|
|
// lhs = span.start (extra_data index),
|
|
// rhs = packed(inputs:u31, has_else:u1 at bit 31).
|
|
// extra[lhs..] = input nodes, then_body, [else_body].
|
|
uint32_t span_start = nd.lhs;
|
|
uint32_t for_packed = nd.rhs;
|
|
uint32_t inputs = for_packed & 0x7FFFFFFFu;
|
|
bool has_else = (for_packed >> 31) != 0;
|
|
uint32_t last_idx = span_start + inputs + (has_else ? 1 : 0);
|
|
n = tree->extra_data.arr[last_idx];
|
|
continue;
|
|
}
|
|
|
|
// anyframe_type (Ast.zig:952): recurse into rhs.
|
|
case AST_NODE_ANYFRAME_TYPE:
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
// assign_destructure (Ast.zig:960-965): recurse into rhs.
|
|
case AST_NODE_ASSIGN_DESTRUCTURE:
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
// asm_simple (Ast.zig:981): return nd.rhs + end_offset.
|
|
case AST_NODE_ASM_SIMPLE:
|
|
return nd.rhs + end_offset;
|
|
|
|
// asm_input (Ast.zig:983): return nd.rhs + end_offset.
|
|
case AST_NODE_ASM_INPUT:
|
|
return nd.rhs + end_offset;
|
|
|
|
// asm_output (Ast.zig:985): return nd.rhs + end_offset.
|
|
case AST_NODE_ASM_OUTPUT:
|
|
return nd.rhs + end_offset;
|
|
|
|
// asm_legacy (Ast.zig:1053-1057): read rparen from extra data.
|
|
case AST_NODE_ASM_LEGACY: {
|
|
// extra[rhs] = AsmLegacy { items_start, items_end, rparen }
|
|
uint32_t rparen = tree->extra_data.arr[nd.rhs + 2];
|
|
return rparen + end_offset;
|
|
}
|
|
|
|
// asm (Ast.zig:1058-1062): read rparen from extra data.
|
|
case AST_NODE_ASM: {
|
|
// extra[rhs] = Asm { items_start, items_end, clobbers, rparen }
|
|
uint32_t rparen = tree->extra_data.arr[nd.rhs + 3];
|
|
return rparen + end_offset;
|
|
}
|
|
|
|
// container_field_init (Ast.zig:1219-1222): recurse into
|
|
// value_expr or type_expr.
|
|
case AST_NODE_CONTAINER_FIELD_INIT:
|
|
if (nd.rhs != 0) {
|
|
n = nd.rhs; // value_expr
|
|
} else {
|
|
n = nd.lhs; // type_expr
|
|
}
|
|
continue;
|
|
|
|
// container_field_align (Ast.zig:1224-1231): +1 for rparen,
|
|
// recurse rhs.
|
|
case AST_NODE_CONTAINER_FIELD_ALIGN:
|
|
end_offset += 1;
|
|
n = nd.rhs;
|
|
continue;
|
|
|
|
// container_field (Ast.zig:1232-1236): read value_expr from
|
|
// extra data.
|
|
case AST_NODE_CONTAINER_FIELD: {
|
|
// extra[rhs] = ContainerField { align_expr, value_expr }
|
|
uint32_t value_expr = tree->extra_data.arr[nd.rhs + 1];
|
|
n = value_expr;
|
|
continue;
|
|
}
|
|
|
|
// tagged_union_enum_tag (Ast.zig:1011-1021): SubRange handling.
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG: {
|
|
uint32_t si = tree->extra_data.arr[nd.rhs];
|
|
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
|
|
if (si == se) {
|
|
end_offset += 4; // rparen + rparen + lbrace + rbrace
|
|
n = nd.lhs;
|
|
} else {
|
|
end_offset += 1; // rbrace
|
|
n = tree->extra_data.arr[se - 1];
|
|
}
|
|
continue;
|
|
}
|
|
// tagged_union_enum_tag_trailing (Ast.zig:1022-1030).
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING: {
|
|
uint32_t si = tree->extra_data.arr[nd.rhs];
|
|
uint32_t se = tree->extra_data.arr[nd.rhs + 1];
|
|
assert(si != se);
|
|
end_offset += 2; // comma/semicolon + rbrace
|
|
n = tree->extra_data.arr[se - 1];
|
|
continue;
|
|
}
|
|
|
|
default:
|
|
// Fallback: return main_token + end_offset.
|
|
return tree->nodes.main_tokens[n] + end_offset;
|
|
}
|
|
}
|
|
}
|
|
|
|
// --- addParam (AstGen.zig:12390) ---
|
|
// Creates a param instruction with pl_tok data and type body in extra.
|
|
|
|
static uint32_t addParam(GenZir* gz, GenZir* param_gz, ZirInstTag tag,
|
|
uint32_t abs_tok_index, uint32_t name, bool is_generic) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
|
|
uint32_t body_len = gzInstructionsLen(param_gz);
|
|
const uint32_t* param_body = gzInstructionsSlice(param_gz);
|
|
|
|
// Param payload: name, type{body_len:u31|is_generic:u1}
|
|
ensureExtraCapacity(ag, 2 + body_len);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = name;
|
|
ag->extra[ag->extra_len++]
|
|
= (body_len & 0x7FFFFFFFu) | (is_generic ? 0x80000000u : 0u);
|
|
for (uint32_t i = 0; i < body_len; i++) {
|
|
ag->extra[ag->extra_len++] = param_body[i];
|
|
}
|
|
gzUnstack(param_gz);
|
|
|
|
// Emit the param instruction.
|
|
ensureInstCapacity(ag, 1);
|
|
uint32_t idx = ag->inst_len;
|
|
ag->inst_tags[idx] = tag;
|
|
ZirInstData data;
|
|
data.pl_tok.src_tok = tokenIndexToRelative(gz, abs_tok_index);
|
|
data.pl_tok.payload_index = payload_index;
|
|
ag->inst_datas[idx] = data;
|
|
ag->inst_len++;
|
|
gzAppendInstruction(gz, idx);
|
|
return idx;
|
|
}
|
|
|
|
// --- addDbgVar (AstGen.zig:13196) ---
|
|
|
|
static void addDbgVar(
|
|
GenZir* gz, ZirInstTag tag, uint32_t name, uint32_t inst) {
|
|
if (gz->is_comptime)
|
|
return;
|
|
ZirInstData data;
|
|
data.str_op.str = name;
|
|
data.str_op.operand = inst;
|
|
addInstruction(gz, tag, data);
|
|
}
|
|
|
|
// --- addFunc (AstGen.zig:12023) ---
|
|
// Handles non-fancy func/func_inferred instructions.
|
|
// ret_body/ret_body_len: instructions for the return type sub-block (may be
|
|
// 0). ret_ref: if ret_body_len==0, the return type as a simple Ref.
|
|
|
|
static uint32_t addFunc(GenZir* gz, uint32_t src_node, uint32_t block_node,
|
|
uint32_t param_block, uint32_t ret_ref, const uint32_t* ret_body,
|
|
uint32_t ret_body_len, const uint32_t* body, uint32_t body_len,
|
|
const uint32_t* param_insts, uint32_t param_insts_len,
|
|
uint32_t lbrace_line, uint32_t lbrace_column, bool is_inferred_error,
|
|
bool ret_ty_is_generic, const uint32_t* ret_param_refs,
|
|
uint32_t ret_param_refs_len) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
uint32_t rbrace_tok = lastToken(tree, block_node);
|
|
uint32_t rbrace_start = tree->tokens.starts[rbrace_tok];
|
|
advanceSourceCursor(ag, rbrace_start);
|
|
uint32_t rbrace_line = ag->source_line - gz->decl_line;
|
|
uint32_t rbrace_column = ag->source_column;
|
|
|
|
// Build Func payload (Zir.Inst.Func: ret_ty, param_block, body_len).
|
|
// (AstGen.zig:12182-12194)
|
|
uint32_t ret_ty_packed_len;
|
|
if (ret_body_len > 0) {
|
|
ret_ty_packed_len
|
|
= countBodyLenAfterFixups(ag, ret_param_refs, ret_param_refs_len)
|
|
+ countBodyLenAfterFixups(ag, ret_body, ret_body_len);
|
|
} else if (ret_ref != ZIR_REF_NONE) {
|
|
ret_ty_packed_len = 1; // simple Ref
|
|
} else {
|
|
ret_ty_packed_len = 0; // void return
|
|
}
|
|
// Pack RetTy: body_len:u31 | is_generic:bool(u1).
|
|
uint32_t ret_ty_packed = (ret_ty_packed_len & 0x7FFFFFFFu)
|
|
| (ret_ty_is_generic ? 0x80000000u : 0u);
|
|
|
|
uint32_t fixup_body_len = countBodyLenAfterFixupsExtraRefs(
|
|
ag, body, body_len, param_insts, param_insts_len);
|
|
ensureExtraCapacity(ag, 3 + ret_ty_packed_len + fixup_body_len + 7);
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = ret_ty_packed; // Func.ret_ty
|
|
ag->extra[ag->extra_len++] = param_block; // Func.param_block
|
|
ag->extra[ag->extra_len++] = fixup_body_len; // Func.body_len
|
|
|
|
// Trailing ret_ty: either body instructions (with ret_param_refs
|
|
// prepended) or a single ref. (AstGen.zig:12196-12204)
|
|
if (ret_body_len > 0) {
|
|
appendBodyWithFixups(ag, ret_param_refs, ret_param_refs_len);
|
|
appendBodyWithFixups(ag, ret_body, ret_body_len);
|
|
} else if (ret_ref != ZIR_REF_NONE) {
|
|
ag->extra[ag->extra_len++] = ret_ref;
|
|
}
|
|
|
|
// Body instructions with extra_refs for param_insts
|
|
// (AstGen.zig:12206).
|
|
appendBodyWithFixupsExtraRefs(
|
|
ag, body, body_len, param_insts, param_insts_len);
|
|
|
|
// SrcLocs (AstGen.zig:12098-12106).
|
|
uint32_t columns = (lbrace_column & 0xFFFFu) | (rbrace_column << 16);
|
|
ag->extra[ag->extra_len++] = lbrace_line;
|
|
ag->extra[ag->extra_len++] = rbrace_line;
|
|
ag->extra[ag->extra_len++] = columns;
|
|
// proto_hash (4 words): zero for now.
|
|
ag->extra[ag->extra_len++] = 0;
|
|
ag->extra[ag->extra_len++] = 0;
|
|
ag->extra[ag->extra_len++] = 0;
|
|
ag->extra[ag->extra_len++] = 0;
|
|
|
|
// Emit the func instruction (AstGen.zig:12220-12226).
|
|
ZirInstTag tag
|
|
= is_inferred_error ? ZIR_INST_FUNC_INFERRED : ZIR_INST_FUNC;
|
|
ZirInstData data;
|
|
data.pl_node.src_node = (int32_t)src_node - (int32_t)gz->decl_node_index;
|
|
data.pl_node.payload_index = payload_index;
|
|
return addInstruction(gz, tag, data);
|
|
}
|
|
|
|
// --- addFuncFancy (AstGen.zig:12112-12173) ---
|
|
// Emits func_fancy instruction when cc_ref, is_var_args, noalias_bits,
|
|
// or is_noinline are present.
|
|
|
|
static uint32_t addFuncFancy(GenZir* gz, uint32_t src_node,
|
|
uint32_t block_node, uint32_t param_block, uint32_t ret_ref,
|
|
const uint32_t* ret_body, uint32_t ret_body_len, uint32_t cc_ref,
|
|
const uint32_t* cc_body, uint32_t cc_body_len, const uint32_t* body,
|
|
uint32_t body_len, const uint32_t* param_insts, uint32_t param_insts_len,
|
|
uint32_t lbrace_line, uint32_t lbrace_column, bool is_var_args,
|
|
bool is_inferred_error, bool is_noinline, uint32_t noalias_bits,
|
|
bool ret_ty_is_generic, const uint32_t* ret_param_refs,
|
|
uint32_t ret_param_refs_len) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
uint32_t rbrace_tok = lastToken(tree, block_node);
|
|
uint32_t rbrace_start = tree->tokens.starts[rbrace_tok];
|
|
advanceSourceCursor(ag, rbrace_start);
|
|
uint32_t rbrace_line = ag->source_line - gz->decl_line;
|
|
uint32_t rbrace_column = ag->source_column;
|
|
|
|
uint32_t fixup_body_len = countBodyLenAfterFixupsExtraRefs(
|
|
ag, body, body_len, param_insts, param_insts_len);
|
|
|
|
// Calculate cc extra len (AstGen.zig:12231-12236).
|
|
uint32_t cc_extra_len = 0;
|
|
if (cc_body_len > 0) {
|
|
cc_extra_len = countBodyLenAfterFixups(ag, cc_body, cc_body_len) + 1;
|
|
} else if (cc_ref != ZIR_REF_NONE) {
|
|
cc_extra_len = 1;
|
|
}
|
|
|
|
// Calculate ret extra len (AstGen.zig:12231-12236).
|
|
uint32_t ret_extra_len = 0;
|
|
if (ret_body_len > 0) {
|
|
ret_extra_len
|
|
= countBodyLenAfterFixups(ag, ret_param_refs, ret_param_refs_len)
|
|
+ countBodyLenAfterFixups(ag, ret_body, ret_body_len) + 1;
|
|
} else if (ret_ref != ZIR_REF_NONE) {
|
|
ret_extra_len = 1;
|
|
}
|
|
|
|
// FuncFancy has 3 fields: param_block, body_len, bits.
|
|
uint32_t total_extra = 3 + cc_extra_len + ret_extra_len
|
|
+ ((noalias_bits != 0) ? 1u : 0u) + fixup_body_len + 7;
|
|
ensureExtraCapacity(ag, total_extra);
|
|
|
|
// FuncFancy payload (Zir.zig:2589-2610).
|
|
uint32_t payload_index = ag->extra_len;
|
|
ag->extra[ag->extra_len++] = param_block;
|
|
ag->extra[ag->extra_len++] = fixup_body_len;
|
|
// Bits packed as u32 (Zir.zig:2598-2609).
|
|
uint32_t bits = 0;
|
|
if (is_var_args)
|
|
bits |= (1u << 0);
|
|
if (is_inferred_error)
|
|
bits |= (1u << 1);
|
|
if (is_noinline)
|
|
bits |= (1u << 2);
|
|
if (cc_ref != ZIR_REF_NONE)
|
|
bits |= (1u << 3); // has_cc_ref
|
|
if (cc_body_len > 0)
|
|
bits |= (1u << 4); // has_cc_body
|
|
if (ret_ref != ZIR_REF_NONE)
|
|
bits |= (1u << 5); // has_ret_ty_ref
|
|
if (ret_body_len > 0)
|
|
bits |= (1u << 6); // has_ret_ty_body
|
|
if (noalias_bits != 0)
|
|
bits |= (1u << 7); // has_any_noalias
|
|
if (ret_ty_is_generic)
|
|
bits |= (1u << 8); // ret_ty_is_generic
|
|
ag->extra[ag->extra_len++] = bits;
|
|
|
|
// Trailing cc (AstGen.zig:12143-12151).
|
|
if (cc_body_len > 0) {
|
|
ag->extra[ag->extra_len++]
|
|
= countBodyLenAfterFixups(ag, cc_body, cc_body_len);
|
|
for (uint32_t i = 0; i < cc_body_len; i++)
|
|
appendPossiblyRefdBodyInst(ag, cc_body[i]);
|
|
} else if (cc_ref != ZIR_REF_NONE) {
|
|
ag->extra[ag->extra_len++] = cc_ref;
|
|
}
|
|
|
|
// Trailing ret_ty (AstGen.zig:12152-12164).
|
|
if (ret_body_len > 0) {
|
|
ag->extra[ag->extra_len++]
|
|
= countBodyLenAfterFixups(ag, ret_param_refs, ret_param_refs_len)
|
|
+ countBodyLenAfterFixups(ag, ret_body, ret_body_len);
|
|
appendBodyWithFixups(ag, ret_param_refs, ret_param_refs_len);
|
|
appendBodyWithFixups(ag, ret_body, ret_body_len);
|
|
} else if (ret_ref != ZIR_REF_NONE) {
|
|
ag->extra[ag->extra_len++] = ret_ref;
|
|
}
|
|
|
|
// Trailing noalias_bits (AstGen.zig:12166-12168).
|
|
if (noalias_bits != 0)
|
|
ag->extra[ag->extra_len++] = noalias_bits;
|
|
|
|
// Body (AstGen.zig:12170).
|
|
appendBodyWithFixupsExtraRefs(
|
|
ag, body, body_len, param_insts, param_insts_len);
|
|
|
|
// SrcLocs (AstGen.zig:12098-12106).
|
|
uint32_t columns = (lbrace_column & 0xFFFFu) | (rbrace_column << 16);
|
|
ag->extra[ag->extra_len++] = lbrace_line;
|
|
ag->extra[ag->extra_len++] = rbrace_line;
|
|
ag->extra[ag->extra_len++] = columns;
|
|
// proto_hash (4 words): zero for now.
|
|
ag->extra[ag->extra_len++] = 0;
|
|
ag->extra[ag->extra_len++] = 0;
|
|
ag->extra[ag->extra_len++] = 0;
|
|
ag->extra[ag->extra_len++] = 0;
|
|
|
|
// Emit the func_fancy instruction (AstGen.zig:12220-12226).
|
|
ZirInstData data;
|
|
data.pl_node.src_node = (int32_t)src_node - (int32_t)gz->decl_node_index;
|
|
data.pl_node.payload_index = payload_index;
|
|
return addInstruction(gz, ZIR_INST_FUNC_FANCY, data);
|
|
}
|
|
|
|
// --- testDecl (AstGen.zig:4708) ---
|
|
|
|
static void testDecl(AstGenCtx* ag, GenZir* gz, Scope* scope,
|
|
uint32_t* wip_decl_insts, uint32_t* decl_idx, uint32_t node) {
|
|
const Ast* tree = ag->tree;
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t body_node = nd.rhs;
|
|
|
|
// makeDeclaration before advanceSourceCursorToNode (AstGen.zig:4726-4729).
|
|
uint32_t decl_inst = makeDeclaration(ag, node);
|
|
wip_decl_insts[*decl_idx] = decl_inst;
|
|
(*decl_idx)++;
|
|
advanceSourceCursorToNode(ag, node);
|
|
|
|
uint32_t decl_line = ag->source_line;
|
|
uint32_t decl_column = ag->source_column;
|
|
|
|
// Extract test name (AstGen.zig:4748-4835).
|
|
uint32_t test_token = tree->nodes.main_tokens[node];
|
|
uint32_t test_name_token = test_token + 1;
|
|
uint32_t test_name = 0; // NullTerminatedString.empty
|
|
DeclFlagsId decl_id = DECL_ID_UNNAMED_TEST;
|
|
|
|
// Check if the token after 'test' is a string literal.
|
|
// We identify string literals by checking the source character.
|
|
uint32_t name_tok_start = tree->tokens.starts[test_name_token];
|
|
if (name_tok_start < tree->source_len
|
|
&& tree->source[name_tok_start] == '"') {
|
|
// String literal name.
|
|
uint32_t name_len;
|
|
strLitAsString(ag, test_name_token, &test_name, &name_len);
|
|
decl_id = DECL_ID_TEST;
|
|
} else if (tree->tokens.tags[test_name_token] == TOKEN_IDENTIFIER) {
|
|
// Identifier test name (decltest) (AstGen.zig:4763-4834).
|
|
// Upstream performs full scope resolution for validation; we skip
|
|
// the validation and just record the identifier as the test name.
|
|
test_name = identAsString(ag, test_name_token);
|
|
decl_id = DECL_ID_DECLTEST;
|
|
}
|
|
|
|
// Set up decl_block GenZir (AstGen.zig:4735-4743).
|
|
GenZir decl_block;
|
|
memset(&decl_block, 0, sizeof(decl_block));
|
|
decl_block.base.tag = SCOPE_GEN_ZIR;
|
|
decl_block.parent = scope;
|
|
decl_block.astgen = ag;
|
|
decl_block.decl_node_index = node;
|
|
decl_block.decl_line = decl_line;
|
|
decl_block.is_comptime = true;
|
|
decl_block.instructions_top = ag->scratch_inst_len;
|
|
decl_block.break_block = UINT32_MAX;
|
|
decl_block.any_defer_node = UINT32_MAX;
|
|
|
|
// Set up fn_block GenZir (AstGen.zig:4837-4845).
|
|
GenZir fn_block;
|
|
memset(&fn_block, 0, sizeof(fn_block));
|
|
fn_block.base.tag = SCOPE_GEN_ZIR;
|
|
fn_block.parent = &decl_block.base;
|
|
fn_block.astgen = ag;
|
|
fn_block.decl_node_index = node;
|
|
fn_block.decl_line = decl_line;
|
|
fn_block.is_comptime = false;
|
|
fn_block.instructions_top = ag->scratch_inst_len;
|
|
fn_block.break_block = UINT32_MAX;
|
|
fn_block.any_defer_node = UINT32_MAX;
|
|
|
|
// Set within_fn, fn_block and fn_ret_ty for the body
|
|
// (AstGen.zig:4848-4853).
|
|
bool prev_within_fn = ag->within_fn;
|
|
void* prev_fn_block = ag->fn_block;
|
|
uint32_t prev_fn_ret_ty = ag->fn_ret_ty;
|
|
ag->within_fn = true;
|
|
setFnBlock(ag, &fn_block);
|
|
ag->fn_ret_ty = ZIR_REF_ANYERROR_VOID_ERROR_UNION_TYPE;
|
|
|
|
// Compute lbrace source location (AstGen.zig:4860-4862).
|
|
advanceSourceCursorToNode(ag, body_node);
|
|
uint32_t lbrace_line = ag->source_line - decl_line;
|
|
uint32_t lbrace_column = ag->source_column;
|
|
|
|
// Process test body (AstGen.zig:4864).
|
|
uint32_t block_result
|
|
= fullBodyExpr(&fn_block, &fn_block.base, RL_NONE_VAL, body_node);
|
|
|
|
ag->within_fn = prev_within_fn;
|
|
ag->fn_block = prev_fn_block;
|
|
ag->fn_ret_ty = prev_fn_ret_ty;
|
|
|
|
// If we hit unimplemented features, bail out.
|
|
if (ag->has_compile_errors)
|
|
return;
|
|
|
|
// Add restore_err_ret_index + ret_implicit (AstGen.zig:4865-4871).
|
|
if (gzInstructionsLen(&fn_block) == 0
|
|
|| !refIsNoReturn(&fn_block, block_result)) {
|
|
ZirInstData rdata;
|
|
rdata.un_node.operand = ZIR_REF_NONE; // .none for .ret
|
|
rdata.un_node.src_node
|
|
= (int32_t)node - (int32_t)fn_block.decl_node_index;
|
|
addInstruction(
|
|
&fn_block, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata);
|
|
|
|
uint32_t body_last_tok = lastToken(tree, body_node);
|
|
ZirInstData rdata2;
|
|
rdata2.un_tok.operand = ZIR_REF_VOID_VALUE;
|
|
rdata2.un_tok.src_tok = tokenIndexToRelative(&fn_block, body_last_tok);
|
|
addInstruction(&fn_block, ZIR_INST_RET_IMPLICIT, rdata2);
|
|
}
|
|
|
|
// Read fn_block body before unstacking (AstGen.zig:4874).
|
|
// Upstream unstacks fn_block inside addFunc before appending the func
|
|
// instruction to decl_block. We must unstack fn_block first so that
|
|
// addFunc's addInstruction goes into decl_block's range.
|
|
const uint32_t* fn_body = gzInstructionsSlice(&fn_block);
|
|
uint32_t fn_body_len = gzInstructionsLen(&fn_block);
|
|
gzUnstack(&fn_block);
|
|
|
|
// Create func instruction (AstGen.zig:4874-4897).
|
|
uint32_t func_ref = addFunc(&decl_block, node, body_node, decl_inst,
|
|
ZIR_REF_ANYERROR_VOID_ERROR_UNION_TYPE, NULL, 0, fn_body, fn_body_len,
|
|
NULL, 0, lbrace_line, lbrace_column, false, false, NULL, 0);
|
|
|
|
// break_inline returning func to declaration (AstGen.zig:4899).
|
|
makeBreakInline(&decl_block, decl_inst, func_ref, AST_NODE_OFFSET_NONE);
|
|
|
|
// setDeclaration (AstGen.zig:4903-4923).
|
|
setDeclaration(ag, decl_inst,
|
|
(SetDeclArgs) { .src_line = decl_line,
|
|
.src_column = decl_column,
|
|
.id = decl_id,
|
|
.name = test_name,
|
|
.lib_name = UINT32_MAX,
|
|
.value_body = gzInstructionsSlice(&decl_block),
|
|
.value_body_len = gzInstructionsLen(&decl_block) });
|
|
gzUnstack(&decl_block);
|
|
|
|
(void)gz;
|
|
}
|
|
|
|
// --- fnDecl (AstGen.zig:4067) / fnDeclInner (AstGen.zig:4228) ---
|
|
// Handles function declarations with bodies (fn_decl) and
|
|
// function prototypes without bodies (fn_proto*).
|
|
|
|
static void fnDecl(AstGenCtx* ag, GenZir* gz, Scope* scope,
|
|
uint32_t* wip_decl_insts, uint32_t* decl_idx, uint32_t node) {
|
|
const Ast* tree = ag->tree;
|
|
AstNodeTag node_tag = tree->nodes.tags[node];
|
|
AstData nd = tree->nodes.datas[node];
|
|
|
|
// For fn_decl: data.lhs = fn_proto node, data.rhs = body node.
|
|
// For fn_proto*: the node itself IS the proto, no body.
|
|
uint32_t proto_node;
|
|
uint32_t body_node;
|
|
if (node_tag == AST_NODE_FN_DECL) {
|
|
proto_node = nd.lhs;
|
|
body_node = nd.rhs;
|
|
} else {
|
|
// fn_proto_simple, fn_proto_multi, fn_proto_one, fn_proto
|
|
proto_node = node;
|
|
body_node = 0;
|
|
}
|
|
|
|
// Get function name token (main_token of proto + 1 = fn name).
|
|
uint32_t fn_token = tree->nodes.main_tokens[proto_node];
|
|
uint32_t fn_name_token = fn_token + 1;
|
|
|
|
// Check for 'pub', 'export', 'inline', 'noinline' modifiers
|
|
// (Ast.zig:2003-2025, AstGen.zig:4102-4106, 4240-4247).
|
|
bool is_pub = false;
|
|
bool is_export = false;
|
|
bool is_noinline = false;
|
|
bool has_inline_keyword = false;
|
|
for (uint32_t i = fn_token; i > 0;) {
|
|
i--;
|
|
uint32_t ttag = tree->tokens.tags[i];
|
|
if (ttag == TOKEN_KEYWORD_PUB)
|
|
is_pub = true;
|
|
else if (ttag == TOKEN_KEYWORD_EXPORT)
|
|
is_export = true;
|
|
else if (ttag == TOKEN_KEYWORD_NOINLINE)
|
|
is_noinline = true;
|
|
else if (ttag == TOKEN_KEYWORD_INLINE)
|
|
has_inline_keyword = true;
|
|
else
|
|
break;
|
|
}
|
|
|
|
// makeDeclaration on fn_proto node (AstGen.zig:4090).
|
|
uint32_t decl_inst = makeDeclaration(ag, proto_node);
|
|
wip_decl_insts[*decl_idx] = decl_inst;
|
|
(*decl_idx)++;
|
|
|
|
advanceSourceCursorToNode(ag, node);
|
|
uint32_t decl_line = ag->source_line;
|
|
uint32_t decl_column = ag->source_column;
|
|
|
|
// Set this now, since parameter types, return type, etc may be generic
|
|
// (AstGen.zig:4097-4100).
|
|
bool prev_within_fn = ag->within_fn;
|
|
ag->within_fn = true;
|
|
|
|
// Save source cursor for restoring after ret_gz (AstGen.zig:4387-4388).
|
|
uint32_t saved_source_offset = ag->source_offset;
|
|
uint32_t saved_source_line = ag->source_line;
|
|
uint32_t saved_source_column = ag->source_column;
|
|
|
|
AstNodeTag proto_tag = tree->nodes.tags[proto_node];
|
|
AstData proto_data = tree->nodes.datas[proto_node];
|
|
|
|
// Extract return type node (rhs for all fn_proto variants).
|
|
uint32_t return_type_node = proto_data.rhs;
|
|
|
|
// Detect inferred error set: token before return type is '!'
|
|
// (AstGen.zig:4249-4251).
|
|
bool is_inferred_error = false;
|
|
if (return_type_node != 0) {
|
|
uint32_t ret_first_tok = firstToken(tree, return_type_node);
|
|
if (ret_first_tok > 0) {
|
|
uint32_t maybe_bang = ret_first_tok - 1;
|
|
uint32_t bang_start = tree->tokens.starts[maybe_bang];
|
|
if (tree->source[bang_start] == '!')
|
|
is_inferred_error = true;
|
|
}
|
|
}
|
|
|
|
// Extract param type nodes and callconv_expr from proto variant
|
|
// (AstGen.zig:4253-4254, Ast.zig:1456-1520).
|
|
uint32_t param_nodes_buf[1]; // buffer for fn_proto_simple/fn_proto_one
|
|
const uint32_t* param_nodes = NULL;
|
|
uint32_t params_len = 0;
|
|
uint32_t callconv_expr_node = 0; // 0 = none (OptionalIndex)
|
|
|
|
if (proto_tag == AST_NODE_FN_PROTO_SIMPLE) {
|
|
// data.lhs = optional param node, data.rhs = return type.
|
|
// callconv_expr = .none (Ast.zig:1468).
|
|
if (proto_data.lhs != 0) {
|
|
param_nodes_buf[0] = proto_data.lhs;
|
|
param_nodes = param_nodes_buf;
|
|
params_len = 1;
|
|
}
|
|
} else if (proto_tag == AST_NODE_FN_PROTO_ONE) {
|
|
// data.lhs = extra_data index → AstFnProtoOne.
|
|
uint32_t extra_idx = proto_data.lhs;
|
|
uint32_t param
|
|
= tree->extra_data.arr[extra_idx]; // AstFnProtoOne.param
|
|
if (param != 0) {
|
|
param_nodes_buf[0] = param;
|
|
param_nodes = param_nodes_buf;
|
|
params_len = 1;
|
|
}
|
|
// AstFnProtoOne.callconv_expr at offset 4 (Ast.zig:4076).
|
|
callconv_expr_node = tree->extra_data.arr[extra_idx + 4];
|
|
} else if (proto_tag == AST_NODE_FN_PROTO_MULTI) {
|
|
// data.lhs = extra_data index → SubRange{start, end}.
|
|
// callconv_expr = .none (Ast.zig:1484).
|
|
uint32_t extra_idx = proto_data.lhs;
|
|
uint32_t range_start = tree->extra_data.arr[extra_idx];
|
|
uint32_t range_end = tree->extra_data.arr[extra_idx + 1];
|
|
param_nodes = tree->extra_data.arr + range_start;
|
|
params_len = range_end - range_start;
|
|
} else if (proto_tag == AST_NODE_FN_PROTO) {
|
|
// data.lhs = extra_data index → AstFnProto{params_start, params_end,
|
|
// ...}.
|
|
uint32_t extra_idx = proto_data.lhs;
|
|
uint32_t pstart = tree->extra_data.arr[extra_idx]; // params_start
|
|
uint32_t pend = tree->extra_data.arr[extra_idx + 1]; // params_end
|
|
param_nodes = tree->extra_data.arr + pstart;
|
|
params_len = pend - pstart;
|
|
// AstFnProto.callconv_expr at offset 5 (Ast.zig:4089).
|
|
callconv_expr_node = tree->extra_data.arr[extra_idx + 5];
|
|
}
|
|
|
|
// decl_gz (called value_gz in caller, decl_gz in fnDeclInner)
|
|
// (AstGen.zig:4194-4201).
|
|
GenZir decl_gz;
|
|
memset(&decl_gz, 0, sizeof(decl_gz));
|
|
decl_gz.base.tag = SCOPE_GEN_ZIR;
|
|
decl_gz.parent = scope;
|
|
decl_gz.astgen = ag;
|
|
decl_gz.decl_node_index = proto_node;
|
|
decl_gz.decl_line = decl_line;
|
|
decl_gz.is_comptime = true;
|
|
decl_gz.instructions_top = ag->scratch_inst_len;
|
|
decl_gz.break_block = UINT32_MAX;
|
|
decl_gz.any_defer_node = UINT32_MAX;
|
|
|
|
// --- Parameter iteration (AstGen.zig:4260-4363) ---
|
|
// Walk params, creating param instructions and ScopeLocalVal entries.
|
|
// We keep param scopes on the C stack (max 32 params like upstream).
|
|
Scope* params_scope = &decl_gz.base;
|
|
ScopeLocalVal param_scopes[32];
|
|
uint32_t param_scope_count = 0;
|
|
// Collect param instruction indices (AstGen.zig:4254, 4360).
|
|
uint32_t param_insts[32];
|
|
uint32_t param_insts_len = 0;
|
|
// noalias_bits tracking (AstGen.zig:4259).
|
|
uint32_t noalias_bits = 0;
|
|
// Generic parameter/return type tracking (AstGen.zig:4257).
|
|
bool any_param_used = false;
|
|
// is_var_args detection (AstGen.zig:4261).
|
|
bool is_var_args = false;
|
|
|
|
// Parameter iteration using token-based iterator, mirroring upstream
|
|
// FnProto.Iterator (Ast.zig:2680-2768, AstGen.zig:4260-4363).
|
|
// The params array only contains type-expression params; anytype params
|
|
// exist as tokens between/after the type-expression nodes.
|
|
uint32_t lparen = fn_name_token + 1; // '(' token
|
|
uint32_t iter_tok_i = lparen + 1; // first token after '('
|
|
bool iter_tok_flag = true; // start in token-scanning mode
|
|
uint32_t iter_param_i = 0;
|
|
ResultLoc coerced_type_ri = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_TYPE_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
|
|
uint32_t param_type_i = 0; // index for noalias_bits (AstGen.zig:4262)
|
|
while (true) {
|
|
uint32_t name_token = 0;
|
|
uint32_t comptime_noalias_token = 0;
|
|
bool is_comptime_param = false;
|
|
bool is_anytype = false;
|
|
uint32_t param_type_node = 0;
|
|
|
|
if (!iter_tok_flag) {
|
|
// Return next param from params array.
|
|
if (iter_param_i >= params_len)
|
|
break;
|
|
param_type_node = param_nodes[iter_param_i];
|
|
// Scan backwards from type expression to find
|
|
// name/comptime/noalias (Ast.zig:2698-2705).
|
|
uint32_t tok_i = firstToken(tree, param_type_node);
|
|
while (tok_i > 0) {
|
|
tok_i--;
|
|
uint32_t ttag = tree->tokens.tags[tok_i];
|
|
if (ttag == TOKEN_COLON)
|
|
continue;
|
|
if (ttag == TOKEN_IDENTIFIER) {
|
|
name_token = tok_i;
|
|
continue;
|
|
}
|
|
if (ttag == TOKEN_KEYWORD_COMPTIME
|
|
|| ttag == TOKEN_KEYWORD_NOALIAS) {
|
|
comptime_noalias_token = tok_i;
|
|
continue;
|
|
}
|
|
break;
|
|
}
|
|
iter_param_i++;
|
|
iter_tok_i = lastToken(tree, param_type_node) + 1;
|
|
// Skip comma after param for anytype scanning.
|
|
if (tree->tokens.tags[iter_tok_i] == TOKEN_COMMA)
|
|
iter_tok_i++;
|
|
iter_tok_flag = true;
|
|
} else {
|
|
// Token-scanning mode: look for anytype/ellipsis params
|
|
// (Ast.zig:2721-2767).
|
|
if (tree->tokens.tags[iter_tok_i] == TOKEN_COMMA)
|
|
iter_tok_i++;
|
|
if (tree->tokens.tags[iter_tok_i] == TOKEN_R_PAREN)
|
|
break;
|
|
// Skip doc comments.
|
|
while (tree->tokens.tags[iter_tok_i] == TOKEN_DOC_COMMENT)
|
|
iter_tok_i++;
|
|
// Check for ellipsis3 (varargs) (AstGen.zig:4275-4281).
|
|
if (tree->tokens.tags[iter_tok_i] == TOKEN_ELLIPSIS3) {
|
|
is_var_args = true;
|
|
break;
|
|
}
|
|
// Check for comptime/noalias prefix.
|
|
if (tree->tokens.tags[iter_tok_i] == TOKEN_KEYWORD_COMPTIME
|
|
|| tree->tokens.tags[iter_tok_i] == TOKEN_KEYWORD_NOALIAS) {
|
|
comptime_noalias_token = iter_tok_i;
|
|
iter_tok_i++;
|
|
}
|
|
// Check for name: identifier followed by colon.
|
|
if (tree->tokens.tags[iter_tok_i] == TOKEN_IDENTIFIER
|
|
&& tree->tokens.tags[iter_tok_i + 1] == TOKEN_COLON) {
|
|
name_token = iter_tok_i;
|
|
iter_tok_i += 2;
|
|
}
|
|
// Check for anytype keyword.
|
|
if (tree->tokens.tags[iter_tok_i] == TOKEN_KEYWORD_ANYTYPE) {
|
|
is_anytype = true;
|
|
iter_tok_i++;
|
|
} else {
|
|
// Not an anytype param; switch to param-array mode.
|
|
iter_tok_flag = false;
|
|
continue;
|
|
}
|
|
}
|
|
|
|
// Determine is_comptime and noalias from comptime_noalias token
|
|
// (AstGen.zig:4265-4273).
|
|
if (comptime_noalias_token != 0
|
|
&& tree->tokens.tags[comptime_noalias_token]
|
|
== TOKEN_KEYWORD_NOALIAS) {
|
|
// Track noalias_bits (AstGen.zig:4266-4269).
|
|
if (param_type_i < 32)
|
|
noalias_bits |= (1u << param_type_i);
|
|
}
|
|
if (comptime_noalias_token != 0
|
|
&& tree->tokens.tags[comptime_noalias_token]
|
|
== TOKEN_KEYWORD_COMPTIME) {
|
|
is_comptime_param = true;
|
|
}
|
|
|
|
// Determine param name string (AstGen.zig:4283-4321).
|
|
// Must be resolved BEFORE type expression to match upstream string
|
|
// table ordering.
|
|
uint32_t param_name_str = 0; // NullTerminatedString.empty
|
|
if (name_token != 0) {
|
|
uint32_t name_start = tree->tokens.starts[name_token];
|
|
char nch = tree->source[name_start];
|
|
// Skip "_" params (AstGen.zig:4285-4286).
|
|
if (nch == '_') {
|
|
uint32_t next_start = tree->tokens.starts[name_token + 1];
|
|
if (next_start == name_start + 1) {
|
|
// Single underscore: empty name.
|
|
param_name_str = 0;
|
|
} else {
|
|
param_name_str = identAsString(ag, name_token);
|
|
}
|
|
} else {
|
|
param_name_str = identAsString(ag, name_token);
|
|
}
|
|
}
|
|
|
|
// Emit param instruction (AstGen.zig:4323-4345).
|
|
uint32_t param_inst_ref;
|
|
if (is_anytype) {
|
|
// anytype parameter: emit param_anytype/param_anytype_comptime
|
|
// (AstGen.zig:4323-4329).
|
|
uint32_t anytype_name_token
|
|
= name_token != 0 ? name_token : (iter_tok_i - 1);
|
|
ZirInstTag anytype_tag = is_comptime_param
|
|
? ZIR_INST_PARAM_ANYTYPE_COMPTIME
|
|
: ZIR_INST_PARAM_ANYTYPE;
|
|
uint32_t anytype_inst = addStrTok(
|
|
&decl_gz, anytype_tag, param_name_str, anytype_name_token);
|
|
param_inst_ref = anytype_inst; // already a ref (toRef())
|
|
if (param_insts_len < 32)
|
|
param_insts[param_insts_len++]
|
|
= anytype_inst - ZIR_REF_START_INDEX; // toIndex()
|
|
} else {
|
|
// Type-expression parameter (AstGen.zig:4330-4344).
|
|
any_param_used = false; // reset before evaluating type body
|
|
GenZir param_gz = makeSubBlock(&decl_gz, params_scope);
|
|
uint32_t param_type_ref = fullBodyExpr(
|
|
¶m_gz, params_scope, coerced_type_ri, param_type_node);
|
|
|
|
if (ag->has_compile_errors)
|
|
return;
|
|
|
|
// The break_inline target is the param instruction we're about
|
|
// to create (AstGen.zig:4336-4337).
|
|
uint32_t param_inst_expected = ag->inst_len + 1;
|
|
makeBreakInline(¶m_gz, param_inst_expected, param_type_ref,
|
|
(int32_t)param_type_node - (int32_t)param_gz.decl_node_index);
|
|
bool param_type_is_generic = any_param_used;
|
|
|
|
// Create param instruction (AstGen.zig:4341-4343).
|
|
ZirInstTag param_tag
|
|
= is_comptime_param ? ZIR_INST_PARAM_COMPTIME : ZIR_INST_PARAM;
|
|
uint32_t name_tok_for_src = name_token != 0
|
|
? name_token
|
|
: tree->nodes.main_tokens[param_type_node];
|
|
uint32_t param_inst = addParam(&decl_gz, ¶m_gz, param_tag,
|
|
name_tok_for_src, param_name_str, param_type_is_generic);
|
|
(void)param_inst_expected;
|
|
param_inst_ref = param_inst + ZIR_REF_START_INDEX;
|
|
if (param_insts_len < 32)
|
|
param_insts[param_insts_len++] = param_inst;
|
|
}
|
|
|
|
// Create ScopeLocalVal for this param (AstGen.zig:4349-4359).
|
|
if (param_name_str != 0 && param_scope_count < 32) {
|
|
ScopeLocalVal* lv = ¶m_scopes[param_scope_count++];
|
|
lv->base.tag = SCOPE_LOCAL_VAL;
|
|
lv->parent = params_scope;
|
|
lv->gen_zir = &decl_gz;
|
|
lv->inst = param_inst_ref;
|
|
lv->token_src = name_token;
|
|
lv->name = param_name_str;
|
|
lv->is_used_or_discarded = &any_param_used;
|
|
params_scope = &lv->base;
|
|
}
|
|
param_type_i++;
|
|
}
|
|
|
|
// --- Return type (AstGen.zig:4369-4383) ---
|
|
any_param_used = false; // reset for return type generic detection
|
|
GenZir ret_gz = makeSubBlock(&decl_gz, params_scope);
|
|
uint32_t ret_ref = ZIR_REF_NONE;
|
|
if (return_type_node != 0) {
|
|
ret_ref = fullBodyExpr(
|
|
&ret_gz, params_scope, coerced_type_ri, return_type_node);
|
|
if (ag->has_compile_errors)
|
|
return;
|
|
// If ret_gz produced instructions, add break_inline
|
|
// (AstGen.zig:4377-4381).
|
|
if (gzInstructionsLen(&ret_gz) > 0) {
|
|
// break_inline targets the func instruction (which doesn't
|
|
// exist yet). We use 0 as placeholder and patch later.
|
|
makeBreakInline(&ret_gz, 0, ret_ref, AST_NODE_OFFSET_NONE);
|
|
}
|
|
}
|
|
// Fetch ref entries for params used in return type (AstGen.zig:4384).
|
|
uint32_t ret_param_refs[32];
|
|
uint32_t ret_param_refs_len = fetchRemoveRefEntries(
|
|
ag, param_insts, param_insts_len, ret_param_refs, 32);
|
|
bool ret_ty_is_generic = any_param_used;
|
|
// Map void_type → .none (AstGen.zig:12054).
|
|
if (ret_ref == ZIR_REF_VOID_TYPE)
|
|
ret_ref = ZIR_REF_NONE;
|
|
|
|
uint32_t ret_body_len = gzInstructionsLen(&ret_gz);
|
|
// Copy ret_body before unstacking: body_gz reuses the same scratch area.
|
|
uint32_t* ret_body = NULL;
|
|
if (ret_body_len > 0) {
|
|
ret_body = malloc(ret_body_len * sizeof(uint32_t));
|
|
if (!ret_body)
|
|
abort();
|
|
memcpy(ret_body, gzInstructionsSlice(&ret_gz),
|
|
ret_body_len * sizeof(uint32_t));
|
|
}
|
|
gzUnstack(&ret_gz);
|
|
|
|
// Restore source cursor (AstGen.zig:4387-4388).
|
|
ag->source_offset = saved_source_offset;
|
|
ag->source_line = saved_source_line;
|
|
ag->source_column = saved_source_column;
|
|
|
|
// --- Calling convention (AstGen.zig:4390-4413) ---
|
|
// Note: cc_gz uses `scope` (= &decl_gz.base), not params_scope.
|
|
GenZir cc_gz = makeSubBlock(&decl_gz, &decl_gz.base);
|
|
uint32_t cc_ref = ZIR_REF_NONE;
|
|
if (callconv_expr_node != 0) {
|
|
// Explicit callconv(expr) (AstGen.zig:4393-4405).
|
|
uint32_t cc_ty = addBuiltinValue(
|
|
&cc_gz, callconv_expr_node, ZIR_BUILTIN_VALUE_CALLING_CONVENTION);
|
|
ResultLoc cc_ri = { .tag = RL_COERCED_TY,
|
|
.data = cc_ty,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
cc_ref = exprRl(&cc_gz, &decl_gz.base, cc_ri, callconv_expr_node);
|
|
if (ag->has_compile_errors) {
|
|
free(ret_body);
|
|
return;
|
|
}
|
|
if (gzInstructionsLen(&cc_gz) > 0) {
|
|
// break_inline targets the func instruction (patched later).
|
|
makeBreakInline(&cc_gz, 0, cc_ref, AST_NODE_OFFSET_NONE);
|
|
}
|
|
} else if (has_inline_keyword) {
|
|
// inline keyword → calling_convention_inline
|
|
// (AstGen.zig:4406-4409).
|
|
cc_ref = addBuiltinValue(
|
|
&cc_gz, node, ZIR_BUILTIN_VALUE_CALLING_CONVENTION_INLINE);
|
|
makeBreakInline(&cc_gz, 0, cc_ref, AST_NODE_OFFSET_NONE);
|
|
}
|
|
|
|
uint32_t cc_body_len = gzInstructionsLen(&cc_gz);
|
|
uint32_t* cc_body = NULL;
|
|
if (cc_body_len > 0) {
|
|
cc_body = malloc(cc_body_len * sizeof(uint32_t));
|
|
if (!cc_body)
|
|
abort();
|
|
memcpy(cc_body, gzInstructionsSlice(&cc_gz),
|
|
cc_body_len * sizeof(uint32_t));
|
|
}
|
|
gzUnstack(&cc_gz);
|
|
|
|
// --- Body handling ---
|
|
// For fn_proto* (no body): extern function, emit type only
|
|
// (AstGen.zig:4136-4164).
|
|
// For fn_decl (has body): emit function value (AstGen.zig:4197-4201).
|
|
uint32_t func_ref;
|
|
if (body_node == 0) {
|
|
// fn_proto without body: extern function type.
|
|
// Upstream emits fnProtoExprInner; we SET_ERROR for now as
|
|
// fnProtoExprInner is not yet ported.
|
|
// TODO: implement fnProtoExprInner for extern fn support.
|
|
SET_ERROR(ag);
|
|
free(ret_body);
|
|
free(cc_body);
|
|
gzUnstack(&decl_gz);
|
|
ag->within_fn = prev_within_fn;
|
|
return;
|
|
}
|
|
|
|
// --- Body (AstGen.zig:4415-4424) ---
|
|
GenZir body_gz;
|
|
memset(&body_gz, 0, sizeof(body_gz));
|
|
body_gz.base.tag = SCOPE_GEN_ZIR;
|
|
body_gz.parent = params_scope;
|
|
body_gz.astgen = ag;
|
|
body_gz.decl_node_index = proto_node;
|
|
body_gz.decl_line = decl_line;
|
|
body_gz.is_comptime = false;
|
|
body_gz.instructions_top = ag->scratch_inst_len;
|
|
body_gz.any_defer_node = UINT32_MAX;
|
|
|
|
// Set fn_block, fn_ret_ty, fn_var_args for the body
|
|
// (AstGen.zig:4442-4459).
|
|
void* prev_fn_block = ag->fn_block;
|
|
setFnBlock(ag, &body_gz);
|
|
uint32_t prev_fn_ret_ty = ag->fn_ret_ty;
|
|
bool prev_fn_var_args = ag->fn_var_args;
|
|
ag->fn_var_args = is_var_args;
|
|
if (is_inferred_error || ret_ref == ZIR_REF_NONE) {
|
|
// Non-void non-trivial return type: emit ret_type instruction.
|
|
if (ret_body_len > 0 || is_inferred_error) {
|
|
ZirInstData rtdata;
|
|
memset(&rtdata, 0, sizeof(rtdata));
|
|
rtdata.node = (int32_t)node - (int32_t)body_gz.decl_node_index;
|
|
ag->fn_ret_ty
|
|
= addInstruction(&body_gz, ZIR_INST_RET_TYPE, rtdata);
|
|
} else {
|
|
ag->fn_ret_ty = ret_ref; // void
|
|
}
|
|
} else {
|
|
// ret_ref is a simple ref (not void, not inferred error).
|
|
// Still need ret_type instruction if it resolved to an inst.
|
|
if (ret_ref >= ZIR_REF_START_INDEX) {
|
|
ZirInstData rtdata;
|
|
memset(&rtdata, 0, sizeof(rtdata));
|
|
rtdata.node = (int32_t)node - (int32_t)body_gz.decl_node_index;
|
|
ag->fn_ret_ty
|
|
= addInstruction(&body_gz, ZIR_INST_RET_TYPE, rtdata);
|
|
} else {
|
|
ag->fn_ret_ty = ret_ref;
|
|
}
|
|
}
|
|
|
|
// Process function body (AstGen.zig:4461-4465).
|
|
advanceSourceCursorToNode(ag, body_node);
|
|
uint32_t lbrace_line = ag->source_line - decl_line;
|
|
uint32_t lbrace_column = ag->source_column;
|
|
|
|
fullBodyExpr(&body_gz, &body_gz.base, RL_NONE_VAL, body_node);
|
|
|
|
ag->within_fn = prev_within_fn;
|
|
ag->fn_block = prev_fn_block;
|
|
ag->fn_ret_ty = prev_fn_ret_ty;
|
|
ag->fn_var_args = prev_fn_var_args;
|
|
|
|
if (ag->has_compile_errors) {
|
|
free(ret_body);
|
|
free(cc_body);
|
|
return;
|
|
}
|
|
|
|
// Add implicit return at end of function body
|
|
// (AstGen.zig:4465-4871).
|
|
if (!endsWithNoReturn(&body_gz)) {
|
|
ZirInstData rdata;
|
|
rdata.un_node.operand = ZIR_REF_NONE;
|
|
rdata.un_node.src_node
|
|
= (int32_t)node - (int32_t)body_gz.decl_node_index;
|
|
addInstruction(
|
|
&body_gz, ZIR_INST_RESTORE_ERR_RET_INDEX_UNCONDITIONAL, rdata);
|
|
|
|
uint32_t body_last_tok = lastToken(tree, body_node);
|
|
ZirInstData rdata2;
|
|
rdata2.un_tok.operand = ZIR_REF_VOID_VALUE;
|
|
rdata2.un_tok.src_tok = tokenIndexToRelative(&body_gz, body_last_tok);
|
|
addInstruction(&body_gz, ZIR_INST_RET_IMPLICIT, rdata2);
|
|
}
|
|
|
|
// Read body before unstacking (AstGen.zig:12215-12218).
|
|
const uint32_t* fn_body = gzInstructionsSlice(&body_gz);
|
|
uint32_t fn_body_len = gzInstructionsLen(&body_gz);
|
|
gzUnstack(&body_gz);
|
|
|
|
// Create func/func_fancy instruction (AstGen.zig:4476-4494,
|
|
// 12112-12173).
|
|
bool need_fancy = cc_ref != ZIR_REF_NONE || is_var_args
|
|
|| noalias_bits != 0 || is_noinline;
|
|
if (need_fancy) {
|
|
func_ref = addFuncFancy(&decl_gz, node, body_node, decl_inst, ret_ref,
|
|
ret_body, ret_body_len, cc_ref, cc_body, cc_body_len, fn_body,
|
|
fn_body_len, param_insts, param_insts_len, lbrace_line,
|
|
lbrace_column, is_var_args, is_inferred_error, is_noinline,
|
|
noalias_bits, ret_ty_is_generic, ret_param_refs,
|
|
ret_param_refs_len);
|
|
} else {
|
|
func_ref = addFunc(&decl_gz, node, body_node, decl_inst, ret_ref,
|
|
ret_body, ret_body_len, fn_body, fn_body_len, param_insts,
|
|
param_insts_len, lbrace_line, lbrace_column, is_inferred_error,
|
|
ret_ty_is_generic, ret_param_refs, ret_param_refs_len);
|
|
}
|
|
|
|
// Patch ret_body break_inline to point to func instruction
|
|
// (AstGen.zig:12199-12202).
|
|
if (ret_body_len > 0) {
|
|
uint32_t break_inst = ret_body[ret_body_len - 1];
|
|
uint32_t break_payload
|
|
= ag->inst_datas[break_inst].break_data.payload_index;
|
|
ag->extra[break_payload + 1] = func_ref - ZIR_REF_START_INDEX;
|
|
}
|
|
// Patch cc_body break_inline to point to func instruction
|
|
// (AstGen.zig:12146-12148).
|
|
if (cc_body_len > 0) {
|
|
uint32_t break_inst = cc_body[cc_body_len - 1];
|
|
uint32_t break_payload
|
|
= ag->inst_datas[break_inst].break_data.payload_index;
|
|
ag->extra[break_payload + 1] = func_ref - ZIR_REF_START_INDEX;
|
|
}
|
|
free(ret_body);
|
|
free(cc_body);
|
|
|
|
// break_inline returning func to declaration (AstGen.zig:4495).
|
|
// nodeIndexToRelative(decl_node) = node - decl_gz.decl_node_index.
|
|
makeBreakInline(
|
|
&decl_gz, decl_inst, func_ref, (int32_t)node - (int32_t)proto_node);
|
|
|
|
// setDeclaration (AstGen.zig:4208-4225).
|
|
// Linkage: export > normal (AstGen.zig:4217).
|
|
DeclFlagsId decl_id;
|
|
if (is_export)
|
|
decl_id = is_pub ? DECL_ID_PUB_EXPORT_CONST : DECL_ID_EXPORT_CONST;
|
|
else
|
|
decl_id = is_pub ? DECL_ID_PUB_CONST_SIMPLE : DECL_ID_CONST_SIMPLE;
|
|
uint32_t name_str = identAsString(ag, fn_name_token);
|
|
setDeclaration(ag, decl_inst,
|
|
(SetDeclArgs) { .src_line = decl_line,
|
|
.src_column = decl_column,
|
|
.id = decl_id,
|
|
.name = name_str,
|
|
.lib_name = UINT32_MAX,
|
|
.value_body = gzInstructionsSlice(&decl_gz),
|
|
.value_body_len = gzInstructionsLen(&decl_gz) });
|
|
gzUnstack(&decl_gz);
|
|
|
|
(void)gz;
|
|
}
|
|
|
|
// --- comptimeDecl (AstGen.zig:4645) ---
|
|
|
|
static void comptimeDecl(AstGenCtx* ag, GenZir* gz, Scope* scope,
|
|
uint32_t* wip_decl_insts, uint32_t* decl_idx, uint32_t node) {
|
|
// makeDeclaration before advanceSourceCursorToNode (AstGen.zig:4663-4665).
|
|
uint32_t decl_inst = makeDeclaration(ag, node);
|
|
wip_decl_insts[*decl_idx] = decl_inst;
|
|
(*decl_idx)++;
|
|
|
|
advanceSourceCursorToNode(ag, node);
|
|
|
|
uint32_t decl_line = ag->source_line;
|
|
uint32_t decl_column = ag->source_column;
|
|
|
|
// Value sub-block (AstGen.zig:4675-4686).
|
|
GenZir value_gz;
|
|
memset(&value_gz, 0, sizeof(value_gz));
|
|
value_gz.base.tag = SCOPE_GEN_ZIR;
|
|
value_gz.parent = scope;
|
|
value_gz.astgen = ag;
|
|
value_gz.decl_node_index = node;
|
|
value_gz.decl_line = decl_line;
|
|
value_gz.is_comptime = true;
|
|
value_gz.instructions_top = ag->scratch_inst_len;
|
|
value_gz.any_defer_node = UINT32_MAX;
|
|
|
|
// For comptime {}: body is empty block → no instructions generated.
|
|
// comptime_gz.isEmpty() == true → addBreak(.break_inline, decl_inst,
|
|
// .void_value) (AstGen.zig:4685-4686)
|
|
makeBreakInline(
|
|
&value_gz, decl_inst, ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
|
|
|
|
setDeclaration(ag, decl_inst,
|
|
(SetDeclArgs) { .src_line = decl_line,
|
|
.src_column = decl_column,
|
|
.id = DECL_ID_COMPTIME,
|
|
.name = 0,
|
|
.lib_name = UINT32_MAX,
|
|
.value_body = gzInstructionsSlice(&value_gz),
|
|
.value_body_len = gzInstructionsLen(&value_gz) });
|
|
gzUnstack(&value_gz);
|
|
|
|
(void)gz;
|
|
}
|
|
|
|
// --- globalVarDecl (AstGen.zig:4498) ---
|
|
|
|
// Extract VarDecl fields from an AST node (Ast.zig:1326-1380).
|
|
typedef struct {
|
|
uint32_t mut_token;
|
|
uint32_t type_node; // 0 = none
|
|
uint32_t align_node; // 0 = none
|
|
uint32_t addrspace_node; // 0 = none
|
|
uint32_t section_node; // 0 = none
|
|
uint32_t init_node; // UINT32_MAX = none
|
|
bool is_pub;
|
|
bool is_extern;
|
|
bool is_export;
|
|
bool is_mutable;
|
|
bool is_threadlocal;
|
|
uint32_t lib_name_token; // UINT32_MAX = none
|
|
} VarDeclInfo;
|
|
|
|
static VarDeclInfo extractVarDecl(const Ast* tree, uint32_t node) {
|
|
AstNodeTag tag = tree->nodes.tags[node];
|
|
AstData nd = tree->nodes.datas[node];
|
|
uint32_t mut_token = tree->nodes.main_tokens[node];
|
|
VarDeclInfo info;
|
|
memset(&info, 0, sizeof(info));
|
|
info.mut_token = mut_token;
|
|
info.init_node = UINT32_MAX;
|
|
info.lib_name_token = UINT32_MAX;
|
|
|
|
switch (tag) {
|
|
case AST_NODE_SIMPLE_VAR_DECL:
|
|
// lhs = type_node (optional), rhs = init_node (optional)
|
|
info.type_node = nd.lhs;
|
|
info.init_node = nd.rhs;
|
|
break;
|
|
case AST_NODE_ALIGNED_VAR_DECL:
|
|
// lhs = align_node, rhs = init_node (optional)
|
|
info.align_node = nd.lhs;
|
|
info.init_node = nd.rhs;
|
|
break;
|
|
case AST_NODE_GLOBAL_VAR_DECL: {
|
|
// lhs = extra_data index, rhs = init_node (optional)
|
|
uint32_t ei = nd.lhs;
|
|
info.type_node = tree->extra_data.arr[ei + 0];
|
|
info.align_node = tree->extra_data.arr[ei + 1];
|
|
info.addrspace_node = tree->extra_data.arr[ei + 2];
|
|
info.section_node = tree->extra_data.arr[ei + 3];
|
|
info.init_node = nd.rhs;
|
|
break;
|
|
}
|
|
case AST_NODE_LOCAL_VAR_DECL: {
|
|
// lhs = extra_data index, rhs = init_node (optional)
|
|
uint32_t ei = nd.lhs;
|
|
info.type_node = tree->extra_data.arr[ei + 0];
|
|
info.align_node = tree->extra_data.arr[ei + 1];
|
|
info.init_node = nd.rhs;
|
|
break;
|
|
}
|
|
default:
|
|
break;
|
|
}
|
|
|
|
// Scan backwards from mut_token to find modifiers (Ast.zig:2003-2025).
|
|
info.is_mutable = (tree->tokens.tags[mut_token] == TOKEN_KEYWORD_VAR);
|
|
for (uint32_t i = mut_token; i > 0;) {
|
|
i--;
|
|
TokenizerTag ttag = tree->tokens.tags[i];
|
|
if (ttag == TOKEN_KEYWORD_EXTERN)
|
|
info.is_extern = true;
|
|
else if (ttag == TOKEN_KEYWORD_EXPORT)
|
|
info.is_export = true;
|
|
else if (ttag == TOKEN_KEYWORD_PUB)
|
|
info.is_pub = true;
|
|
else if (ttag == TOKEN_KEYWORD_THREADLOCAL)
|
|
info.is_threadlocal = true;
|
|
else if (ttag == TOKEN_STRING_LITERAL)
|
|
info.lib_name_token = i;
|
|
else
|
|
break;
|
|
}
|
|
return info;
|
|
}
|
|
|
|
// Compute DeclFlagsId from VarDecl properties (AstGen.zig:13916-13972).
|
|
static DeclFlagsId computeVarDeclId(bool is_mutable, bool is_pub,
|
|
bool is_extern, bool is_export, bool is_threadlocal, bool has_type_body,
|
|
bool has_special_body, bool has_lib_name) {
|
|
if (!is_mutable) {
|
|
// const
|
|
if (is_extern) {
|
|
if (is_pub) {
|
|
if (has_lib_name || has_special_body)
|
|
return DECL_ID_PUB_EXTERN_CONST;
|
|
return DECL_ID_PUB_EXTERN_CONST_SIMPLE;
|
|
}
|
|
if (has_lib_name || has_special_body)
|
|
return DECL_ID_EXTERN_CONST;
|
|
return DECL_ID_EXTERN_CONST_SIMPLE;
|
|
}
|
|
if (is_export)
|
|
return is_pub ? DECL_ID_PUB_EXPORT_CONST : DECL_ID_EXPORT_CONST;
|
|
if (is_pub) {
|
|
if (has_special_body)
|
|
return DECL_ID_PUB_CONST;
|
|
if (has_type_body)
|
|
return DECL_ID_PUB_CONST_TYPED;
|
|
return DECL_ID_PUB_CONST_SIMPLE;
|
|
}
|
|
if (has_special_body)
|
|
return DECL_ID_CONST;
|
|
if (has_type_body)
|
|
return DECL_ID_CONST_TYPED;
|
|
return DECL_ID_CONST_SIMPLE;
|
|
}
|
|
// var
|
|
if (is_extern) {
|
|
if (is_pub) {
|
|
if (is_threadlocal)
|
|
return DECL_ID_PUB_EXTERN_VAR_THREADLOCAL;
|
|
return DECL_ID_PUB_EXTERN_VAR;
|
|
}
|
|
if (is_threadlocal)
|
|
return DECL_ID_EXTERN_VAR_THREADLOCAL;
|
|
return DECL_ID_EXTERN_VAR;
|
|
}
|
|
if (is_export) {
|
|
if (is_pub) {
|
|
if (is_threadlocal)
|
|
return DECL_ID_PUB_EXPORT_VAR_THREADLOCAL;
|
|
return DECL_ID_PUB_EXPORT_VAR;
|
|
}
|
|
if (is_threadlocal)
|
|
return DECL_ID_EXPORT_VAR_THREADLOCAL;
|
|
return DECL_ID_EXPORT_VAR;
|
|
}
|
|
if (is_pub) {
|
|
if (is_threadlocal)
|
|
return DECL_ID_PUB_VAR_THREADLOCAL;
|
|
if (has_special_body || has_type_body)
|
|
return DECL_ID_PUB_VAR;
|
|
return DECL_ID_PUB_VAR_SIMPLE;
|
|
}
|
|
if (is_threadlocal)
|
|
return DECL_ID_VAR_THREADLOCAL;
|
|
if (has_special_body || has_type_body)
|
|
return DECL_ID_VAR;
|
|
return DECL_ID_VAR_SIMPLE;
|
|
}
|
|
|
|
// Mirrors nameStratExpr (AstGen.zig:1160-1199).
|
|
// Checks if node is a container decl or @Type builtin; if so, dispatches
|
|
// with the given name_strategy. Returns true if handled (result stored in
|
|
// *out_ref), false if caller should fall back to expr().
|
|
static bool nameStratExpr(GenZir* gz, Scope* scope, ResultLoc rl,
|
|
uint32_t node, uint8_t name_strategy, uint32_t* out_ref) {
|
|
const AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
AstNodeTag tag = tree->nodes.tags[node];
|
|
(void)rl; // Used by builtinReify (not yet implemented).
|
|
|
|
switch (tag) {
|
|
case AST_NODE_CONTAINER_DECL:
|
|
case AST_NODE_CONTAINER_DECL_TRAILING:
|
|
case AST_NODE_CONTAINER_DECL_TWO:
|
|
case AST_NODE_CONTAINER_DECL_TWO_TRAILING:
|
|
case AST_NODE_CONTAINER_DECL_ARG:
|
|
case AST_NODE_CONTAINER_DECL_ARG_TRAILING:
|
|
case AST_NODE_TAGGED_UNION:
|
|
case AST_NODE_TAGGED_UNION_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_TWO:
|
|
case AST_NODE_TAGGED_UNION_TWO_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG:
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING:
|
|
*out_ref = containerDecl(gz, scope, node, name_strategy);
|
|
return true;
|
|
// @Type builtin: upstream calls builtinReify (AstGen.zig:1186-1196).
|
|
// Not yet implemented; fall through to expr().
|
|
default:
|
|
return false;
|
|
}
|
|
}
|
|
|
|
static void globalVarDecl(AstGenCtx* ag, GenZir* gz, Scope* scope,
|
|
uint32_t* wip_decl_insts, uint32_t* decl_idx, uint32_t node) {
|
|
const Ast* tree = ag->tree;
|
|
VarDeclInfo vd = extractVarDecl(tree, node);
|
|
uint32_t name_token = vd.mut_token + 1;
|
|
|
|
// "threadlocal variable cannot be constant" (AstGen.zig:4526-4528).
|
|
if (vd.is_threadlocal && !vd.is_mutable) {
|
|
SET_ERROR(ag);
|
|
return;
|
|
}
|
|
|
|
// lib_name validation (AstGen.zig:4531-4540).
|
|
uint32_t lib_name = UINT32_MAX;
|
|
if (vd.lib_name_token != UINT32_MAX) {
|
|
uint32_t li, ll;
|
|
strLitAsString(ag, vd.lib_name_token, &li, &ll);
|
|
// "library name cannot contain null bytes" (AstGen.zig:4534-4535).
|
|
if (memchr(ag->string_bytes + li, 0, ll) != NULL) {
|
|
SET_ERROR(ag);
|
|
return;
|
|
}
|
|
// "library name cannot be empty" (AstGen.zig:4536-4537).
|
|
if (ll == 0) {
|
|
SET_ERROR(ag);
|
|
return;
|
|
}
|
|
lib_name = li;
|
|
}
|
|
|
|
// advanceSourceCursorToNode before makeDeclaration (AstGen.zig:4542-4546).
|
|
advanceSourceCursorToNode(ag, node);
|
|
uint32_t decl_column = ag->source_column;
|
|
|
|
uint32_t decl_inst = makeDeclaration(ag, node);
|
|
wip_decl_insts[*decl_idx] = decl_inst;
|
|
(*decl_idx)++;
|
|
|
|
// "extern variables have no initializers" (AstGen.zig:4549-4556).
|
|
if (vd.init_node != UINT32_MAX && vd.init_node != 0) {
|
|
if (vd.is_extern) {
|
|
SET_ERROR(ag);
|
|
return;
|
|
}
|
|
} else {
|
|
// "variables must be initialized" (AstGen.zig:4557-4561).
|
|
if (!vd.is_extern) {
|
|
SET_ERROR(ag);
|
|
return;
|
|
}
|
|
}
|
|
|
|
// "unable to infer variable type" (AstGen.zig:4563-4565).
|
|
if (vd.is_extern && vd.type_node == 0) {
|
|
SET_ERROR(ag);
|
|
return;
|
|
}
|
|
|
|
// Set up type sub-block (AstGen.zig:4574-4582).
|
|
GenZir type_gz;
|
|
memset(&type_gz, 0, sizeof(type_gz));
|
|
type_gz.base.tag = SCOPE_GEN_ZIR;
|
|
type_gz.parent = scope;
|
|
type_gz.astgen = ag;
|
|
type_gz.decl_node_index = node;
|
|
type_gz.instructions_top = ag->scratch_inst_len;
|
|
type_gz.decl_line = ag->source_line;
|
|
type_gz.is_comptime = true;
|
|
type_gz.any_defer_node = UINT32_MAX;
|
|
|
|
if (vd.type_node != 0) {
|
|
uint32_t type_inst = typeExpr(&type_gz, &type_gz.base, vd.type_node);
|
|
makeBreakInline(&type_gz, decl_inst, type_inst, 0);
|
|
}
|
|
|
|
// Record type_gz boundary for slicing.
|
|
uint32_t type_top = ag->scratch_inst_len;
|
|
|
|
// Align sub-block (AstGen.zig:4585-4591).
|
|
GenZir align_gz;
|
|
memset(&align_gz, 0, sizeof(align_gz));
|
|
align_gz.base.tag = SCOPE_GEN_ZIR;
|
|
align_gz.parent = scope;
|
|
align_gz.astgen = ag;
|
|
align_gz.decl_node_index = node;
|
|
align_gz.instructions_top = type_top;
|
|
align_gz.decl_line = ag->source_line;
|
|
align_gz.is_comptime = true;
|
|
align_gz.any_defer_node = UINT32_MAX;
|
|
|
|
if (vd.align_node != 0) {
|
|
// coerced_align_ri = { .rl = .{ .coerced_ty = .u29_type } }
|
|
// (AstGen.zig:389, 4589).
|
|
ResultLoc align_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_U29_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t align_inst
|
|
= exprRl(&align_gz, &align_gz.base, align_rl, vd.align_node);
|
|
makeBreakInline(&align_gz, decl_inst, align_inst, 0);
|
|
}
|
|
|
|
uint32_t align_top = ag->scratch_inst_len;
|
|
|
|
// Linksection sub-block (AstGen.zig:4593-4599).
|
|
GenZir linksection_gz;
|
|
memset(&linksection_gz, 0, sizeof(linksection_gz));
|
|
linksection_gz.base.tag = SCOPE_GEN_ZIR;
|
|
linksection_gz.parent = scope;
|
|
linksection_gz.astgen = ag;
|
|
linksection_gz.decl_node_index = node;
|
|
linksection_gz.instructions_top = align_top;
|
|
linksection_gz.decl_line = ag->source_line;
|
|
linksection_gz.is_comptime = true;
|
|
linksection_gz.any_defer_node = UINT32_MAX;
|
|
|
|
if (vd.section_node != 0) {
|
|
// coerced_linksection_ri = { .rl = .{ .coerced_ty =
|
|
// .slice_const_u8_type } } (AstGen.zig:390, 4597).
|
|
ResultLoc ls_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_SLICE_CONST_U8_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t ls_inst = exprRl(
|
|
&linksection_gz, &linksection_gz.base, ls_rl, vd.section_node);
|
|
makeBreakInline(&linksection_gz, decl_inst, ls_inst, 0);
|
|
}
|
|
|
|
uint32_t linksection_top = ag->scratch_inst_len;
|
|
|
|
// Addrspace sub-block (AstGen.zig:4601-4608).
|
|
GenZir addrspace_gz;
|
|
memset(&addrspace_gz, 0, sizeof(addrspace_gz));
|
|
addrspace_gz.base.tag = SCOPE_GEN_ZIR;
|
|
addrspace_gz.parent = scope;
|
|
addrspace_gz.astgen = ag;
|
|
addrspace_gz.decl_node_index = node;
|
|
addrspace_gz.instructions_top = linksection_top;
|
|
addrspace_gz.decl_line = ag->source_line;
|
|
addrspace_gz.is_comptime = true;
|
|
addrspace_gz.any_defer_node = UINT32_MAX;
|
|
|
|
if (vd.addrspace_node != 0) {
|
|
// Upstream: addBuiltinValue(addrspace_node, .address_space) then
|
|
// coerced_ty with that result (AstGen.zig:4605-4606).
|
|
uint32_t addrspace_ty = addBuiltinValue(
|
|
&addrspace_gz, vd.addrspace_node, ZIR_BUILTIN_VALUE_ADDRESS_SPACE);
|
|
ResultLoc as_rl = { .tag = RL_COERCED_TY,
|
|
.data = addrspace_ty,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t as_inst = exprRl(
|
|
&addrspace_gz, &addrspace_gz.base, as_rl, vd.addrspace_node);
|
|
makeBreakInline(&addrspace_gz, decl_inst, as_inst, 0);
|
|
}
|
|
|
|
uint32_t addrspace_top = ag->scratch_inst_len;
|
|
|
|
// Value sub-block (AstGen.zig:4610-4621).
|
|
GenZir value_gz;
|
|
memset(&value_gz, 0, sizeof(value_gz));
|
|
value_gz.base.tag = SCOPE_GEN_ZIR;
|
|
value_gz.parent = scope;
|
|
value_gz.astgen = ag;
|
|
value_gz.decl_node_index = node;
|
|
value_gz.instructions_top = addrspace_top;
|
|
value_gz.decl_line = ag->source_line;
|
|
value_gz.is_comptime = true;
|
|
value_gz.any_defer_node = UINT32_MAX;
|
|
|
|
if (vd.init_node != UINT32_MAX && vd.init_node != 0) {
|
|
// Upstream: coerced_ty = decl_inst.toRef() when type_node present
|
|
// (AstGen.zig:4614-4616).
|
|
ResultLoc init_rl;
|
|
memset(&init_rl, 0, sizeof(init_rl));
|
|
if (vd.type_node != 0) {
|
|
init_rl.tag = RL_COERCED_TY;
|
|
init_rl.data = decl_inst + ZIR_REF_START_INDEX;
|
|
} else {
|
|
init_rl.tag = RL_NONE;
|
|
}
|
|
// nameStratExpr: check if init is container decl (AstGen.zig:4617).
|
|
uint32_t init_ref;
|
|
if (!nameStratExpr(&value_gz, &value_gz.base, init_rl, vd.init_node,
|
|
0 /* parent */, &init_ref)) {
|
|
init_ref
|
|
= exprRl(&value_gz, &value_gz.base, init_rl, vd.init_node);
|
|
}
|
|
makeBreakInline(&value_gz, decl_inst, init_ref, 0);
|
|
}
|
|
|
|
// Compute body slices (instructionsSliceUpto).
|
|
const uint32_t* type_body
|
|
= ag->scratch_instructions + type_gz.instructions_top;
|
|
uint32_t type_body_len = type_top - type_gz.instructions_top;
|
|
const uint32_t* align_body
|
|
= ag->scratch_instructions + align_gz.instructions_top;
|
|
uint32_t align_body_len = align_top - align_gz.instructions_top;
|
|
const uint32_t* ls_body
|
|
= ag->scratch_instructions + linksection_gz.instructions_top;
|
|
uint32_t ls_body_len = linksection_top - linksection_gz.instructions_top;
|
|
const uint32_t* as_body
|
|
= ag->scratch_instructions + addrspace_gz.instructions_top;
|
|
uint32_t as_body_len = addrspace_top - addrspace_gz.instructions_top;
|
|
const uint32_t* val_body = gzInstructionsSlice(&value_gz);
|
|
uint32_t val_body_len = gzInstructionsLen(&value_gz);
|
|
|
|
bool has_type_body = (type_body_len > 0);
|
|
bool has_special_body
|
|
= (align_body_len > 0 || ls_body_len > 0 || as_body_len > 0);
|
|
bool has_lib_name = (vd.lib_name_token != UINT32_MAX);
|
|
|
|
uint32_t name_str = identAsString(ag, name_token);
|
|
|
|
DeclFlagsId decl_id = computeVarDeclId(vd.is_mutable, vd.is_pub,
|
|
vd.is_extern, vd.is_export, vd.is_threadlocal, has_type_body,
|
|
has_special_body, has_lib_name);
|
|
|
|
setDeclaration(ag, decl_inst,
|
|
(SetDeclArgs) { .src_line = type_gz.decl_line,
|
|
.src_column = decl_column,
|
|
.id = decl_id,
|
|
.name = name_str,
|
|
.lib_name = lib_name,
|
|
.type_body = type_body,
|
|
.type_body_len = type_body_len,
|
|
.align_body = align_body,
|
|
.align_body_len = align_body_len,
|
|
.linksection_body = ls_body,
|
|
.linksection_body_len = ls_body_len,
|
|
.addrspace_body = as_body,
|
|
.addrspace_body_len = as_body_len,
|
|
.value_body = val_body,
|
|
.value_body_len = val_body_len });
|
|
|
|
gzUnstack(&value_gz);
|
|
|
|
(void)gz;
|
|
}
|
|
|
|
// --- nodeImpliesMoreThanOnePossibleValue (AstGen.zig:10548) ---
|
|
// Check if an identifier is a primitive type with more than one value.
|
|
static bool identImpliesMoreThanOnePossibleValue(
|
|
const Ast* tree, uint32_t main_token) {
|
|
uint32_t start = tree->tokens.starts[main_token];
|
|
const char* src = tree->source + start;
|
|
// Match known primitive types that have more than one possible value.
|
|
// (AstGen.zig:10729-10766)
|
|
if (src[0] == 'u' || src[0] == 'i') {
|
|
// u8, u16, u32, u64, u128, u1, u29, usize, i8, i16, i32, i64, i128,
|
|
// isize
|
|
char c1 = src[1];
|
|
if (c1 >= '0' && c1 <= '9')
|
|
return true;
|
|
if (c1 == 's') // usize, isize
|
|
return (src[2] == 'i' && src[3] == 'z' && src[4] == 'e');
|
|
}
|
|
if (src[0] == 'f') {
|
|
// f16, f32, f64, f80, f128
|
|
char c1 = src[1];
|
|
if (c1 >= '0' && c1 <= '9')
|
|
return true;
|
|
}
|
|
if (src[0] == 'b' && src[1] == 'o' && src[2] == 'o' && src[3] == 'l'
|
|
&& !(src[4] >= 'a' && src[4] <= 'z')
|
|
&& !(src[4] >= 'A' && src[4] <= 'Z')
|
|
&& !(src[4] >= '0' && src[4] <= '9') && src[4] != '_')
|
|
return true;
|
|
if (src[0] == 'c' && src[1] == '_')
|
|
return true; // c_int, c_long, etc.
|
|
if (src[0] == 'a' && src[1] == 'n' && src[2] == 'y') {
|
|
// anyerror, anyframe, anyopaque
|
|
return true;
|
|
}
|
|
if (src[0] == 'c' && src[1] == 'o' && src[2] == 'm' && src[3] == 'p'
|
|
&& src[4] == 't' && src[5] == 'i' && src[6] == 'm' && src[7] == 'e')
|
|
return true; // comptime_float, comptime_int
|
|
if (src[0] == 't' && src[1] == 'y' && src[2] == 'p' && src[3] == 'e'
|
|
&& !(src[4] >= 'a' && src[4] <= 'z')
|
|
&& !(src[4] >= 'A' && src[4] <= 'Z')
|
|
&& !(src[4] >= '0' && src[4] <= '9') && src[4] != '_')
|
|
return true;
|
|
return false;
|
|
}
|
|
|
|
static bool nodeImpliesMoreThanOnePossibleValue(
|
|
const Ast* tree, uint32_t node) {
|
|
uint32_t cur = node;
|
|
while (1) {
|
|
AstNodeTag tag = tree->nodes.tags[cur];
|
|
switch (tag) {
|
|
// Pointer/optional/array/anyframe types → true
|
|
// (AstGen.zig:10718-10725)
|
|
case AST_NODE_PTR_TYPE_ALIGNED:
|
|
case AST_NODE_PTR_TYPE_SENTINEL:
|
|
case AST_NODE_PTR_TYPE:
|
|
case AST_NODE_PTR_TYPE_BIT_RANGE:
|
|
case AST_NODE_OPTIONAL_TYPE:
|
|
case AST_NODE_ANYFRAME_TYPE:
|
|
case AST_NODE_ARRAY_TYPE_SENTINEL:
|
|
return true;
|
|
// Forward to LHS: try, comptime, nosuspend
|
|
// (AstGen.zig:10710-10713)
|
|
case AST_NODE_TRY:
|
|
case AST_NODE_COMPTIME:
|
|
case AST_NODE_NOSUSPEND:
|
|
cur = tree->nodes.datas[cur].lhs;
|
|
continue;
|
|
// Forward to LHS: grouped_expression, unwrap_optional
|
|
// (AstGen.zig:10714-10716)
|
|
case AST_NODE_GROUPED_EXPRESSION:
|
|
case AST_NODE_UNWRAP_OPTIONAL:
|
|
cur = tree->nodes.datas[cur].lhs;
|
|
continue;
|
|
// Identifier: check primitives (AstGen.zig:10727-10780)
|
|
case AST_NODE_IDENTIFIER:
|
|
return identImpliesMoreThanOnePossibleValue(
|
|
tree, tree->nodes.main_tokens[cur]);
|
|
default:
|
|
return false;
|
|
}
|
|
}
|
|
}
|
|
|
|
// --- nodeImpliesComptimeOnly (AstGen.zig:10787) ---
|
|
|
|
static bool identImpliesComptimeOnly(const Ast* tree, uint32_t main_token) {
|
|
uint32_t start = tree->tokens.starts[main_token];
|
|
const char* src = tree->source + start;
|
|
// Only comptime_float, comptime_int, type → true
|
|
// (AstGen.zig:11010-11013)
|
|
if (src[0] == 'c' && src[1] == 'o' && src[2] == 'm' && src[3] == 'p'
|
|
&& src[4] == 't' && src[5] == 'i' && src[6] == 'm' && src[7] == 'e')
|
|
return true; // comptime_float, comptime_int
|
|
if (src[0] == 't' && src[1] == 'y' && src[2] == 'p' && src[3] == 'e'
|
|
&& !(src[4] >= 'a' && src[4] <= 'z')
|
|
&& !(src[4] >= 'A' && src[4] <= 'Z')
|
|
&& !(src[4] >= '0' && src[4] <= '9') && src[4] != '_')
|
|
return true;
|
|
return false;
|
|
}
|
|
|
|
static bool nodeImpliesComptimeOnly(const Ast* tree, uint32_t node) {
|
|
uint32_t cur = node;
|
|
while (1) {
|
|
AstNodeTag tag = tree->nodes.tags[cur];
|
|
switch (tag) {
|
|
// Function prototypes → true (AstGen.zig:10950-10955)
|
|
case AST_NODE_FN_PROTO_SIMPLE:
|
|
case AST_NODE_FN_PROTO_MULTI:
|
|
case AST_NODE_FN_PROTO_ONE:
|
|
case AST_NODE_FN_PROTO:
|
|
return true;
|
|
// Forward to LHS: try, comptime, nosuspend
|
|
case AST_NODE_TRY:
|
|
case AST_NODE_COMPTIME:
|
|
case AST_NODE_NOSUSPEND:
|
|
cur = tree->nodes.datas[cur].lhs;
|
|
continue;
|
|
case AST_NODE_GROUPED_EXPRESSION:
|
|
case AST_NODE_UNWRAP_OPTIONAL:
|
|
cur = tree->nodes.datas[cur].lhs;
|
|
continue;
|
|
// Identifier: check primitives
|
|
case AST_NODE_IDENTIFIER:
|
|
return identImpliesComptimeOnly(
|
|
tree, tree->nodes.main_tokens[cur]);
|
|
default:
|
|
return false;
|
|
}
|
|
}
|
|
}
|
|
|
|
// --- WipMembers (AstGen.zig:3989) ---
|
|
// Tracks decl indices, field bit-flags, and per-field data during container
|
|
// processing. All data lives in a single malloc'd array laid out as:
|
|
// [decls (decl_count)] [field_bits (ceil)] [fields (up to field_count*max)]
|
|
// Bodies are tracked separately in a dynamic array.
|
|
|
|
typedef struct {
|
|
uint32_t* payload; // malloc'd array
|
|
uint32_t payload_top; // always 0 (start of decls region)
|
|
uint32_t field_bits_start;
|
|
uint32_t fields_start;
|
|
uint32_t fields_end;
|
|
uint32_t decl_index;
|
|
uint32_t field_index;
|
|
// Bodies scratch: dynamically grown array for field type/align/init
|
|
// bodies.
|
|
uint32_t* bodies;
|
|
uint32_t bodies_len;
|
|
uint32_t bodies_cap;
|
|
} WipMembers;
|
|
|
|
// Parameterized init (AstGen.zig:3989 WipMembers.init).
|
|
static WipMembers wipMembersInitEx(uint32_t decl_count, uint32_t field_count,
|
|
uint32_t bits_per_field, uint32_t max_field_size) {
|
|
uint32_t fields_per_u32 = bits_per_field > 0 ? 32 / bits_per_field : 0;
|
|
uint32_t field_bits_start = decl_count;
|
|
uint32_t bit_words = (field_count > 0 && fields_per_u32 > 0)
|
|
? (field_count + fields_per_u32 - 1) / fields_per_u32
|
|
: 0;
|
|
uint32_t fields_start = field_bits_start + bit_words;
|
|
uint32_t payload_end = fields_start + field_count * max_field_size;
|
|
uint32_t alloc_size = payload_end > 0 ? payload_end : 1;
|
|
uint32_t* payload = calloc(alloc_size, sizeof(uint32_t));
|
|
if (!payload)
|
|
exit(1);
|
|
WipMembers wm;
|
|
memset(&wm, 0, sizeof(wm));
|
|
wm.payload = payload;
|
|
wm.payload_top = 0;
|
|
wm.field_bits_start = field_bits_start;
|
|
wm.fields_start = fields_start;
|
|
wm.fields_end = fields_start;
|
|
wm.decl_index = 0;
|
|
wm.field_index = 0;
|
|
wm.bodies = NULL;
|
|
wm.bodies_len = 0;
|
|
wm.bodies_cap = 0;
|
|
return wm;
|
|
}
|
|
|
|
static WipMembers wipMembersInit(uint32_t decl_count, uint32_t field_count) {
|
|
// bits_per_field = 4, max_field_size = 5
|
|
return wipMembersInitEx(decl_count, field_count, 4, 5);
|
|
}
|
|
|
|
static void wipMembersDeinit(WipMembers* wm) {
|
|
free(wm->payload);
|
|
free(wm->bodies);
|
|
}
|
|
|
|
static void wipMembersNextDecl(WipMembers* wm, uint32_t decl_inst) {
|
|
wm->payload[wm->payload_top + wm->decl_index] = decl_inst;
|
|
wm->decl_index++;
|
|
}
|
|
|
|
// bits_per_field = 4: bits[0]=have_align, bits[1]=have_value,
|
|
// bits[2]=is_comptime, bits[3]=have_type_body
|
|
static void wipMembersNextField(WipMembers* wm, bool bits[4]) {
|
|
uint32_t fields_per_u32 = 8; // 32 / 4
|
|
uint32_t index = wm->field_bits_start + wm->field_index / fields_per_u32;
|
|
uint32_t bit_bag
|
|
= (wm->field_index % fields_per_u32 == 0) ? 0 : wm->payload[index];
|
|
bit_bag >>= 4;
|
|
for (int i = 0; i < 4; i++) {
|
|
bit_bag |= ((uint32_t)(bits[i] ? 1 : 0)) << (32 - 4 + i);
|
|
}
|
|
wm->payload[index] = bit_bag;
|
|
wm->field_index++;
|
|
}
|
|
|
|
static void wipMembersAppendToField(WipMembers* wm, uint32_t data) {
|
|
wm->payload[wm->fields_end] = data;
|
|
wm->fields_end++;
|
|
}
|
|
|
|
static void wipMembersFinishBits(WipMembers* wm) {
|
|
uint32_t fields_per_u32 = 8; // 32 / 4
|
|
uint32_t empty_field_slots
|
|
= fields_per_u32 - (wm->field_index % fields_per_u32);
|
|
if (wm->field_index > 0 && empty_field_slots < fields_per_u32) {
|
|
uint32_t index
|
|
= wm->field_bits_start + wm->field_index / fields_per_u32;
|
|
wm->payload[index] >>= (empty_field_slots * 4);
|
|
}
|
|
}
|
|
|
|
// bits_per_field = 1: bits[0]=have_value (for enum fields).
|
|
static void wipMembersNextFieldEnum(WipMembers* wm, bool have_value) {
|
|
uint32_t fields_per_u32 = 32; // 32 / 1
|
|
uint32_t index = wm->field_bits_start + wm->field_index / fields_per_u32;
|
|
uint32_t bit_bag
|
|
= (wm->field_index % fields_per_u32 == 0) ? 0 : wm->payload[index];
|
|
bit_bag >>= 1;
|
|
bit_bag |= ((uint32_t)(have_value ? 1 : 0)) << 31;
|
|
wm->payload[index] = bit_bag;
|
|
wm->field_index++;
|
|
}
|
|
|
|
static void wipMembersFinishBitsEnum(WipMembers* wm) {
|
|
uint32_t fields_per_u32 = 32; // 32 / 1
|
|
uint32_t empty_field_slots
|
|
= fields_per_u32 - (wm->field_index % fields_per_u32);
|
|
if (wm->field_index > 0 && empty_field_slots < fields_per_u32) {
|
|
uint32_t index
|
|
= wm->field_bits_start + wm->field_index / fields_per_u32;
|
|
wm->payload[index] >>= empty_field_slots;
|
|
}
|
|
}
|
|
|
|
// Returns pointer to decls region and its length.
|
|
static const uint32_t* wipMembersDeclsSlice(
|
|
const WipMembers* wm, uint32_t* out_len) {
|
|
*out_len = wm->decl_index;
|
|
return wm->payload + wm->payload_top;
|
|
}
|
|
|
|
// Returns pointer to fields region (field_bits + field_data) and its length.
|
|
static const uint32_t* wipMembersFieldsSlice(
|
|
const WipMembers* wm, uint32_t* out_len) {
|
|
*out_len = wm->fields_end - wm->field_bits_start;
|
|
return wm->payload + wm->field_bits_start;
|
|
}
|
|
|
|
// Append body instructions to the WipMembers bodies scratch.
|
|
static void wipMembersBodiesAppend(
|
|
WipMembers* wm, const uint32_t* data, uint32_t len) {
|
|
if (wm->bodies_len + len > wm->bodies_cap) {
|
|
uint32_t new_cap = wm->bodies_cap == 0 ? 64 : wm->bodies_cap * 2;
|
|
while (new_cap < wm->bodies_len + len)
|
|
new_cap *= 2;
|
|
wm->bodies = realloc(wm->bodies, new_cap * sizeof(uint32_t));
|
|
if (!wm->bodies)
|
|
exit(1);
|
|
wm->bodies_cap = new_cap;
|
|
}
|
|
memcpy(wm->bodies + wm->bodies_len, data, len * sizeof(uint32_t));
|
|
wm->bodies_len += len;
|
|
}
|
|
|
|
// Append body instructions with ref_table fixups to wm->bodies.
|
|
static void wipMembersBodiesAppendWithFixups(
|
|
WipMembers* wm, AstGenCtx* ag, const uint32_t* body, uint32_t body_len) {
|
|
for (uint32_t i = 0; i < body_len; i++) {
|
|
uint32_t inst = body[i];
|
|
// Grow if needed.
|
|
if (wm->bodies_len + 1 > wm->bodies_cap) {
|
|
uint32_t new_cap = wm->bodies_cap == 0 ? 64 : wm->bodies_cap * 2;
|
|
wm->bodies = realloc(wm->bodies, new_cap * sizeof(uint32_t));
|
|
if (!wm->bodies)
|
|
exit(1);
|
|
wm->bodies_cap = new_cap;
|
|
}
|
|
wm->bodies[wm->bodies_len++] = inst;
|
|
// Check for ref fixup.
|
|
uint32_t ref_inst;
|
|
while (refTableFetchRemove(ag, inst, &ref_inst)) {
|
|
if (wm->bodies_len + 1 > wm->bodies_cap) {
|
|
uint32_t new_cap = wm->bodies_cap * 2;
|
|
wm->bodies = realloc(wm->bodies, new_cap * sizeof(uint32_t));
|
|
if (!wm->bodies)
|
|
exit(1);
|
|
wm->bodies_cap = new_cap;
|
|
}
|
|
wm->bodies[wm->bodies_len++] = ref_inst;
|
|
inst = ref_inst;
|
|
}
|
|
}
|
|
}
|
|
|
|
// --- containerDecl (AstGen.zig:5468) ---
|
|
// Handles container declarations as expressions (struct{}, enum{}, etc.).
|
|
|
|
static uint32_t containerDecl(
|
|
GenZir* gz, Scope* scope, uint32_t node, uint8_t name_strategy) {
|
|
AstGenCtx* ag = gz->astgen;
|
|
const Ast* tree = ag->tree;
|
|
AstNodeTag tag = tree->nodes.tags[node];
|
|
AstData nd = tree->nodes.datas[node];
|
|
|
|
// Extract members based on node type (Ast.zig:2459-2470).
|
|
uint32_t members_buf[2];
|
|
const uint32_t* members;
|
|
uint32_t members_len;
|
|
|
|
switch (tag) {
|
|
case AST_NODE_CONTAINER_DECL_TWO:
|
|
case AST_NODE_CONTAINER_DECL_TWO_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_TWO:
|
|
case AST_NODE_TAGGED_UNION_TWO_TRAILING: {
|
|
// lhs and rhs are optional member nodes (0 = none).
|
|
members_len = 0;
|
|
if (nd.lhs != 0)
|
|
members_buf[members_len++] = nd.lhs;
|
|
if (nd.rhs != 0)
|
|
members_buf[members_len++] = nd.rhs;
|
|
members = members_buf;
|
|
break;
|
|
}
|
|
case AST_NODE_CONTAINER_DECL:
|
|
case AST_NODE_CONTAINER_DECL_TRAILING:
|
|
case AST_NODE_TAGGED_UNION:
|
|
case AST_NODE_TAGGED_UNION_TRAILING: {
|
|
// extra_data[lhs..rhs] contains members.
|
|
members = tree->extra_data.arr + nd.lhs;
|
|
members_len = nd.rhs - nd.lhs;
|
|
break;
|
|
}
|
|
case AST_NODE_CONTAINER_DECL_ARG:
|
|
case AST_NODE_CONTAINER_DECL_ARG_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG:
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING: {
|
|
// lhs is arg node, rhs is extra index → SubRange(start, end).
|
|
uint32_t start = tree->extra_data.arr[nd.rhs];
|
|
uint32_t end = tree->extra_data.arr[nd.rhs + 1];
|
|
members = tree->extra_data.arr + start;
|
|
members_len = end - start;
|
|
break;
|
|
}
|
|
default:
|
|
SET_ERROR(ag);
|
|
return ZIR_REF_VOID_VALUE;
|
|
}
|
|
|
|
// Save/clear fn_block for nested containers (AstGen.zig:5480-5482).
|
|
void* prev_fn_block = ag->fn_block;
|
|
ag->fn_block = NULL;
|
|
|
|
// Extract arg node for container_decl_arg variants (AstGen.zig:5638).
|
|
// For enum(u8), lhs is the arg node.
|
|
uint32_t arg_node = 0; // 0 = none
|
|
switch (tag) {
|
|
case AST_NODE_CONTAINER_DECL_ARG:
|
|
case AST_NODE_CONTAINER_DECL_ARG_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG:
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING:
|
|
arg_node = nd.lhs;
|
|
break;
|
|
default:
|
|
break;
|
|
}
|
|
|
|
// Dispatch based on container keyword (AstGen.zig:5485-5536).
|
|
uint32_t main_token = tree->nodes.main_tokens[node];
|
|
TokenizerTag kw_tag = tree->tokens.tags[main_token];
|
|
uint32_t decl_inst;
|
|
switch (kw_tag) {
|
|
case TOKEN_KEYWORD_STRUCT: {
|
|
// Extract layout from token before main_token (AstGen.zig:5489-5493).
|
|
// auto=0, extern=1, packed=2.
|
|
uint8_t layout = 0; // auto
|
|
if (main_token > 0) {
|
|
TokenizerTag prev_tag = tree->tokens.tags[main_token - 1];
|
|
if (prev_tag == TOKEN_KEYWORD_PACKED)
|
|
layout = 2;
|
|
else if (prev_tag == TOKEN_KEYWORD_EXTERN)
|
|
layout = 1;
|
|
}
|
|
decl_inst = structDeclInner(ag, gz, node, members, members_len, layout,
|
|
arg_node, name_strategy);
|
|
break;
|
|
}
|
|
case TOKEN_KEYWORD_ENUM:
|
|
decl_inst = enumDeclInner(
|
|
ag, gz, node, members, members_len, arg_node, name_strategy);
|
|
break;
|
|
default:
|
|
// union/opaque: fall back to struct for now.
|
|
decl_inst = structDeclInner(
|
|
ag, gz, node, members, members_len, 0, 0, name_strategy);
|
|
break;
|
|
}
|
|
(void)scope;
|
|
|
|
ag->fn_block = prev_fn_block;
|
|
return decl_inst + ZIR_REF_START_INDEX;
|
|
}
|
|
|
|
// --- EnumDecl.Small packing (Zir.zig EnumDecl.Small) ---
|
|
|
|
typedef struct {
|
|
bool has_tag_type;
|
|
bool has_captures_len;
|
|
bool has_body_len;
|
|
bool has_fields_len;
|
|
bool has_decls_len;
|
|
uint8_t name_strategy; // 2 bits
|
|
bool nonexhaustive;
|
|
} EnumDeclSmall;
|
|
|
|
static uint16_t packEnumDeclSmall(EnumDeclSmall s) {
|
|
uint16_t r = 0;
|
|
if (s.has_tag_type)
|
|
r |= (1u << 0);
|
|
if (s.has_captures_len)
|
|
r |= (1u << 1);
|
|
if (s.has_body_len)
|
|
r |= (1u << 2);
|
|
if (s.has_fields_len)
|
|
r |= (1u << 3);
|
|
if (s.has_decls_len)
|
|
r |= (1u << 4);
|
|
r |= (uint16_t)(s.name_strategy & 0x3u) << 5;
|
|
if (s.nonexhaustive)
|
|
r |= (1u << 7);
|
|
return r;
|
|
}
|
|
|
|
// Mirrors GenZir.setEnum (AstGen.zig:13064-13123).
|
|
static void setEnum(AstGenCtx* ag, uint32_t inst, uint32_t src_node,
|
|
uint32_t tag_type, uint32_t captures_len, uint32_t body_len,
|
|
uint32_t fields_len, uint32_t decls_len, bool nonexhaustive,
|
|
uint8_t name_strategy) {
|
|
EnumDeclSmall small;
|
|
memset(&small, 0, sizeof(small));
|
|
small.has_tag_type = (tag_type != ZIR_REF_NONE);
|
|
small.has_captures_len = (captures_len != 0);
|
|
small.has_body_len = (body_len != 0);
|
|
small.has_fields_len = (fields_len != 0);
|
|
small.has_decls_len = (decls_len != 0);
|
|
small.name_strategy = name_strategy;
|
|
small.nonexhaustive = nonexhaustive;
|
|
|
|
ensureExtraCapacity(ag, 6 + 5);
|
|
|
|
uint32_t payload_index = ag->extra_len;
|
|
|
|
// fields_hash (4 words): zero-filled; hash comparison skipped in tests.
|
|
ag->extra[ag->extra_len++] = 0;
|
|
ag->extra[ag->extra_len++] = 0;
|
|
ag->extra[ag->extra_len++] = 0;
|
|
ag->extra[ag->extra_len++] = 0;
|
|
|
|
ag->extra[ag->extra_len++] = ag->source_line;
|
|
ag->extra[ag->extra_len++] = src_node;
|
|
|
|
// Trailing data in upstream order (AstGen.zig:13092-13106).
|
|
if (small.has_tag_type)
|
|
ag->extra[ag->extra_len++] = tag_type;
|
|
if (small.has_captures_len)
|
|
ag->extra[ag->extra_len++] = captures_len;
|
|
if (small.has_body_len)
|
|
ag->extra[ag->extra_len++] = body_len;
|
|
if (small.has_fields_len)
|
|
ag->extra[ag->extra_len++] = fields_len;
|
|
if (small.has_decls_len)
|
|
ag->extra[ag->extra_len++] = decls_len;
|
|
|
|
ag->inst_tags[inst] = ZIR_INST_EXTENDED;
|
|
ZirInstData data;
|
|
memset(&data, 0, sizeof(data));
|
|
data.extended.opcode = (uint16_t)ZIR_EXT_ENUM_DECL;
|
|
data.extended.small = packEnumDeclSmall(small);
|
|
data.extended.operand = payload_index;
|
|
ag->inst_datas[inst] = data;
|
|
}
|
|
|
|
// Returns true if the identifier token at `ident_token` is "_".
|
|
static bool tokenIsUnderscore(const Ast* tree, uint32_t ident_token) {
|
|
uint32_t start = tree->tokens.starts[ident_token];
|
|
const char* src = tree->source;
|
|
if (src[start] != '_')
|
|
return false;
|
|
// Check that the next character is not alphanumeric/underscore
|
|
// (i.e., the identifier is exactly "_").
|
|
char next = src[start + 1];
|
|
if ((next >= 'a' && next <= 'z') || (next >= 'A' && next <= 'Z')
|
|
|| (next >= '0' && next <= '9') || next == '_')
|
|
return false;
|
|
return true;
|
|
}
|
|
|
|
// --- enumDeclInner (AstGen.zig:5508-5728) ---
|
|
// Handles enum container declarations.
|
|
// arg_node: the tag type expression node (e.g. u8 in enum(u8)), 0 if none.
|
|
|
|
static uint32_t enumDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
|
|
const uint32_t* members, uint32_t members_len, uint32_t arg_node,
|
|
uint8_t name_strategy) {
|
|
const Ast* tree = ag->tree;
|
|
|
|
// --- First pass: count fields, values, decls, detect nonexhaustive ---
|
|
// (AstGen.zig:5513-5590)
|
|
uint32_t total_fields = 0;
|
|
uint32_t decl_count = 0;
|
|
uint32_t nonexhaustive_index = UINT32_MAX; // index into members[]
|
|
for (uint32_t i = 0; i < members_len; i++) {
|
|
uint32_t member_node = members[i];
|
|
AstNodeTag mtag = tree->nodes.tags[member_node];
|
|
switch (mtag) {
|
|
case AST_NODE_CONTAINER_FIELD_INIT:
|
|
case AST_NODE_CONTAINER_FIELD_ALIGN:
|
|
case AST_NODE_CONTAINER_FIELD: {
|
|
uint32_t main_token = tree->nodes.main_tokens[member_node];
|
|
// Check for "_" (nonexhaustive marker).
|
|
if (tokenIsUnderscore(tree, main_token)) {
|
|
nonexhaustive_index = i;
|
|
continue;
|
|
}
|
|
total_fields++;
|
|
break;
|
|
}
|
|
default:
|
|
decl_count++;
|
|
break;
|
|
}
|
|
}
|
|
bool nonexhaustive = (nonexhaustive_index != UINT32_MAX);
|
|
|
|
uint32_t decl_inst = reserveInstructionIndex(ag);
|
|
gzAppendInstruction(gz, decl_inst);
|
|
|
|
// Create namespace scope (AstGen.zig:5618-5623).
|
|
ScopeNamespace namespace;
|
|
scopeNamespaceInit(
|
|
&namespace, &gz->base, node, decl_inst, gz, ag->within_fn);
|
|
|
|
advanceSourceCursorToNode(ag, node);
|
|
|
|
// scanContainer to register names in string table (AstGen.zig:5635).
|
|
scanContainer(ag, &namespace, members, members_len);
|
|
|
|
// Set up block_scope for tag value expressions (AstGen.zig:5624-5632).
|
|
GenZir block_scope;
|
|
memset(&block_scope, 0, sizeof(block_scope));
|
|
block_scope.base.tag = SCOPE_GEN_ZIR;
|
|
block_scope.parent = &namespace.base;
|
|
block_scope.astgen = ag;
|
|
block_scope.decl_node_index = node;
|
|
block_scope.decl_line = ag->source_line;
|
|
block_scope.is_comptime = true;
|
|
block_scope.instructions_top = ag->scratch_inst_len;
|
|
block_scope.any_defer_node = UINT32_MAX;
|
|
|
|
// Evaluate tag type argument if present (AstGen.zig:5638-5641).
|
|
uint32_t arg_inst = ZIR_REF_NONE;
|
|
if (arg_node != 0)
|
|
arg_inst = typeExpr(&block_scope, &block_scope.base, arg_node);
|
|
|
|
// WipMembers with bits_per_field=1, max_field_size=2 (AstGen.zig:5645).
|
|
WipMembers wm = wipMembersInitEx(decl_count, total_fields, 1, 2);
|
|
|
|
// --- Second pass: process members (AstGen.zig:5656-5693) ---
|
|
for (uint32_t i = 0; i < members_len; i++) {
|
|
uint32_t member_node = members[i];
|
|
// Skip nonexhaustive marker field (AstGen.zig:5657-5658).
|
|
if (i == nonexhaustive_index)
|
|
continue;
|
|
AstNodeTag mtag = tree->nodes.tags[member_node];
|
|
switch (mtag) {
|
|
case AST_NODE_COMPTIME:
|
|
comptimeDecl(ag, gz, &namespace.base, wm.payload, &wm.decl_index,
|
|
member_node);
|
|
break;
|
|
case AST_NODE_SIMPLE_VAR_DECL:
|
|
case AST_NODE_GLOBAL_VAR_DECL:
|
|
case AST_NODE_LOCAL_VAR_DECL:
|
|
case AST_NODE_ALIGNED_VAR_DECL:
|
|
globalVarDecl(ag, gz, &namespace.base, wm.payload, &wm.decl_index,
|
|
member_node);
|
|
break;
|
|
case AST_NODE_FN_DECL:
|
|
fnDecl(ag, gz, &namespace.base, wm.payload, &wm.decl_index,
|
|
member_node);
|
|
break;
|
|
case AST_NODE_TEST_DECL:
|
|
testDecl(ag, gz, &namespace.base, wm.payload, &wm.decl_index,
|
|
member_node);
|
|
break;
|
|
case AST_NODE_CONTAINER_FIELD_INIT:
|
|
case AST_NODE_CONTAINER_FIELD_ALIGN:
|
|
case AST_NODE_CONTAINER_FIELD: {
|
|
// Enum field (AstGen.zig:5669-5692).
|
|
uint32_t main_token = tree->nodes.main_tokens[member_node];
|
|
uint32_t field_name = identAsString(ag, main_token);
|
|
wipMembersAppendToField(&wm, field_name);
|
|
|
|
// Extract value expression.
|
|
AstData mnd = tree->nodes.datas[member_node];
|
|
uint32_t value_node = 0;
|
|
if (mtag == AST_NODE_CONTAINER_FIELD_INIT) {
|
|
value_node = mnd.rhs;
|
|
} else if (mtag == AST_NODE_CONTAINER_FIELD && mnd.rhs != 0) {
|
|
value_node = tree->extra_data.arr[mnd.rhs + 1];
|
|
}
|
|
|
|
bool have_value = (value_node != 0);
|
|
wipMembersNextFieldEnum(&wm, have_value);
|
|
|
|
// Evaluate tag value expression (AstGen.zig:5690-5691).
|
|
if (have_value) {
|
|
ResultLoc val_rl = { .tag = RL_COERCED_TY,
|
|
.data = arg_inst,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t tag_value_inst = exprRl(
|
|
&block_scope, &block_scope.base, val_rl, value_node);
|
|
wipMembersAppendToField(&wm, tag_value_inst);
|
|
}
|
|
break;
|
|
}
|
|
default:
|
|
SET_ERROR(ag);
|
|
break;
|
|
}
|
|
}
|
|
|
|
// Emit break_inline if block_scope has instructions
|
|
// (AstGen.zig:5695-5697).
|
|
if (gzInstructionsLen(&block_scope) > 0) {
|
|
addBreak(&block_scope, ZIR_INST_BREAK_INLINE, decl_inst,
|
|
ZIR_REF_VOID_VALUE, AST_NODE_OFFSET_NONE);
|
|
}
|
|
|
|
uint32_t raw_body_len = gzInstructionsLen(&block_scope);
|
|
const uint32_t* body = gzInstructionsSlice(&block_scope);
|
|
uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_body_len);
|
|
|
|
// setEnum (AstGen.zig:5705-5715).
|
|
setEnum(ag, decl_inst, node, arg_inst, namespace.captures_len, body_len,
|
|
total_fields, decl_count, nonexhaustive, name_strategy);
|
|
|
|
wipMembersFinishBitsEnum(&wm);
|
|
|
|
// Append trailing data (AstGen.zig:5718-5725):
|
|
// captures, decls, body, fields.
|
|
uint32_t decls_len_out;
|
|
const uint32_t* decls_slice = wipMembersDeclsSlice(&wm, &decls_len_out);
|
|
uint32_t fields_len_out;
|
|
const uint32_t* fields_slice = wipMembersFieldsSlice(&wm, &fields_len_out);
|
|
|
|
ensureExtraCapacity(ag,
|
|
namespace.captures_len * 2 + decls_len_out + body_len
|
|
+ fields_len_out);
|
|
// Captures (AstGen.zig:5721-5722).
|
|
for (uint32_t i = 0; i < namespace.captures_len; i++)
|
|
ag->extra[ag->extra_len++] = namespace.capture_keys[i];
|
|
for (uint32_t i = 0; i < namespace.captures_len; i++)
|
|
ag->extra[ag->extra_len++] = namespace.capture_vals[i];
|
|
for (uint32_t i = 0; i < decls_len_out; i++)
|
|
ag->extra[ag->extra_len++] = decls_slice[i];
|
|
// Body instructions with fixups (AstGen.zig:5724).
|
|
for (uint32_t i = 0; i < raw_body_len; i++)
|
|
appendPossiblyRefdBodyInst(ag, body[i]);
|
|
// Fields (bit bags + field data).
|
|
for (uint32_t i = 0; i < fields_len_out; i++)
|
|
ag->extra[ag->extra_len++] = fields_slice[i];
|
|
|
|
gzUnstack(&block_scope);
|
|
wipMembersDeinit(&wm);
|
|
scopeNamespaceDeinit(&namespace);
|
|
return decl_inst;
|
|
}
|
|
|
|
// --- tupleDecl (AstGen.zig:5192) ---
|
|
|
|
static uint32_t tupleDecl(AstGenCtx* ag, GenZir* gz, uint32_t node,
|
|
const uint32_t* members, uint32_t members_len, uint8_t layout,
|
|
uint32_t backing_int_node) {
|
|
const Ast* tree = ag->tree;
|
|
|
|
// layout must be auto for tuples (AstGen.zig:5204-5207).
|
|
if (layout != 0) {
|
|
SET_ERROR(ag);
|
|
return reserveInstructionIndex(ag);
|
|
}
|
|
// tuples don't support backing int (AstGen.zig:5209-5211).
|
|
if (backing_int_node != 0) {
|
|
SET_ERROR(ag);
|
|
return reserveInstructionIndex(ag);
|
|
}
|
|
|
|
// Build fields_start scratch area: for each field, type ref + init ref.
|
|
uint32_t fields_start = ag->extra_len;
|
|
// We use extra as scratch temporarily; will finalize below.
|
|
// Actually, upstream uses astgen.scratch; we use a temporary buffer.
|
|
uint32_t* tuple_scratch = NULL;
|
|
uint32_t tuple_scratch_len = 0;
|
|
uint32_t tuple_scratch_cap = 0;
|
|
|
|
for (uint32_t i = 0; i < members_len; i++) {
|
|
uint32_t member_node = members[i];
|
|
AstNodeTag mtag = tree->nodes.tags[member_node];
|
|
|
|
// Non-field nodes are errors in tuples (AstGen.zig:5224-5238).
|
|
if (mtag != AST_NODE_CONTAINER_FIELD_INIT
|
|
&& mtag != AST_NODE_CONTAINER_FIELD_ALIGN
|
|
&& mtag != AST_NODE_CONTAINER_FIELD) {
|
|
SET_ERROR(ag);
|
|
free(tuple_scratch);
|
|
return reserveInstructionIndex(ag);
|
|
}
|
|
|
|
// Extract field info.
|
|
uint32_t main_token = tree->nodes.main_tokens[member_node];
|
|
AstData nd = tree->nodes.datas[member_node];
|
|
uint32_t type_node = nd.lhs;
|
|
uint32_t align_node = 0;
|
|
uint32_t value_node = 0;
|
|
bool has_comptime_token = false;
|
|
|
|
switch (mtag) {
|
|
case AST_NODE_CONTAINER_FIELD_INIT:
|
|
value_node = nd.rhs;
|
|
break;
|
|
case AST_NODE_CONTAINER_FIELD_ALIGN:
|
|
align_node = nd.rhs;
|
|
break;
|
|
case AST_NODE_CONTAINER_FIELD:
|
|
if (nd.rhs != 0) {
|
|
align_node = tree->extra_data.arr[nd.rhs];
|
|
value_node = tree->extra_data.arr[nd.rhs + 1];
|
|
}
|
|
break;
|
|
default:
|
|
break;
|
|
}
|
|
|
|
if (main_token > 0
|
|
&& tree->tokens.tags[main_token - 1] == TOKEN_KEYWORD_COMPTIME) {
|
|
has_comptime_token = true;
|
|
}
|
|
|
|
// Check tuple_like: must be tuple-like (AstGen.zig:5240-5241).
|
|
bool tuple_like = tree->tokens.tags[main_token] != TOKEN_IDENTIFIER
|
|
|| tree->tokens.tags[main_token + 1] != TOKEN_COLON;
|
|
if (!tuple_like) {
|
|
// Named field in tuple: error (AstGen.zig:5241).
|
|
SET_ERROR(ag);
|
|
free(tuple_scratch);
|
|
return reserveInstructionIndex(ag);
|
|
}
|
|
|
|
// Tuple fields cannot have alignment (AstGen.zig:5244-5246).
|
|
if (align_node != 0) {
|
|
SET_ERROR(ag);
|
|
free(tuple_scratch);
|
|
return reserveInstructionIndex(ag);
|
|
}
|
|
|
|
// Non-comptime tuple field with default init: error
|
|
// (AstGen.zig:5248-5250).
|
|
if (value_node != 0 && !has_comptime_token) {
|
|
SET_ERROR(ag);
|
|
free(tuple_scratch);
|
|
return reserveInstructionIndex(ag);
|
|
}
|
|
|
|
// Comptime field without default init: error
|
|
// (AstGen.zig:5252-5254).
|
|
if (value_node == 0 && has_comptime_token) {
|
|
SET_ERROR(ag);
|
|
free(tuple_scratch);
|
|
return reserveInstructionIndex(ag);
|
|
}
|
|
|
|
// Type expression (AstGen.zig:5256).
|
|
uint32_t field_type_ref = typeExpr(gz, &gz->base, type_node);
|
|
|
|
// Grow scratch buffer.
|
|
if (tuple_scratch_len + 2 > tuple_scratch_cap) {
|
|
uint32_t new_cap
|
|
= tuple_scratch_cap == 0 ? 16 : tuple_scratch_cap * 2;
|
|
tuple_scratch = realloc(tuple_scratch, new_cap * sizeof(uint32_t));
|
|
if (!tuple_scratch)
|
|
exit(1);
|
|
tuple_scratch_cap = new_cap;
|
|
}
|
|
tuple_scratch[tuple_scratch_len++] = field_type_ref;
|
|
|
|
// Default init (AstGen.zig:5259-5264).
|
|
if (value_node != 0) {
|
|
ResultLoc init_rl = { .tag = RL_COERCED_TY,
|
|
.data = field_type_ref,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t field_init_ref = comptimeExpr(gz, &gz->base, init_rl,
|
|
value_node, COMPTIME_REASON_TUPLE_FIELD_DEFAULT_VALUE);
|
|
tuple_scratch[tuple_scratch_len++] = field_init_ref;
|
|
} else {
|
|
tuple_scratch[tuple_scratch_len++] = ZIR_REF_NONE;
|
|
}
|
|
}
|
|
|
|
if (members_len > 65535) {
|
|
SET_ERROR(ag);
|
|
free(tuple_scratch);
|
|
return reserveInstructionIndex(ag);
|
|
}
|
|
uint16_t fields_len = (uint16_t)members_len;
|
|
|
|
// Write TupleDecl payload (AstGen.zig:5274-5286).
|
|
(void)fields_start;
|
|
ensureExtraCapacity(ag, 1 + tuple_scratch_len);
|
|
uint32_t payload_index = ag->extra_len;
|
|
// src_node as node offset relative to gz->decl_node_index.
|
|
ag->extra[ag->extra_len++]
|
|
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
|
|
|
|
for (uint32_t i = 0; i < tuple_scratch_len; i++)
|
|
ag->extra[ag->extra_len++] = tuple_scratch[i];
|
|
|
|
free(tuple_scratch);
|
|
|
|
// Emit extended instruction (AstGen.zig:5279-5286).
|
|
ensureInstCapacity(ag, 1);
|
|
uint32_t idx = ag->inst_len;
|
|
ag->inst_tags[idx] = ZIR_INST_EXTENDED;
|
|
ZirInstData data;
|
|
memset(&data, 0, sizeof(data));
|
|
data.extended.opcode = (uint16_t)ZIR_EXT_TUPLE_DECL;
|
|
data.extended.small = fields_len;
|
|
data.extended.operand = payload_index;
|
|
ag->inst_datas[idx] = data;
|
|
ag->inst_len++;
|
|
gzAppendInstruction(gz, idx);
|
|
return idx;
|
|
}
|
|
|
|
// --- structDeclInner (AstGen.zig:4926) ---
|
|
|
|
static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
|
|
const uint32_t* members, uint32_t members_len, uint8_t layout,
|
|
uint32_t backing_int_node, uint8_t name_strategy) {
|
|
const Ast* tree = ag->tree;
|
|
|
|
// Tuple detection (AstGen.zig:4939-4950).
|
|
// Scan for tuple-like fields; if any found, dispatch to tupleDecl.
|
|
for (uint32_t i = 0; i < members_len; i++) {
|
|
uint32_t member_node = members[i];
|
|
AstNodeTag mtag = tree->nodes.tags[member_node];
|
|
if (mtag != AST_NODE_CONTAINER_FIELD_INIT
|
|
&& mtag != AST_NODE_CONTAINER_FIELD_ALIGN
|
|
&& mtag != AST_NODE_CONTAINER_FIELD)
|
|
continue;
|
|
uint32_t main_token = tree->nodes.main_tokens[member_node];
|
|
bool tuple_like = tree->tokens.tags[main_token] != TOKEN_IDENTIFIER
|
|
|| tree->tokens.tags[main_token + 1] != TOKEN_COLON;
|
|
if (tuple_like) {
|
|
if (node == 0) {
|
|
// Root node: file cannot be a tuple
|
|
// (AstGen.zig:4946).
|
|
SET_ERROR(ag);
|
|
return reserveInstructionIndex(ag);
|
|
}
|
|
return tupleDecl(
|
|
ag, gz, node, members, members_len, layout, backing_int_node);
|
|
}
|
|
}
|
|
|
|
uint32_t decl_inst = reserveInstructionIndex(ag);
|
|
gzAppendInstruction(gz, decl_inst);
|
|
|
|
// Fast path: no members, no backing int (AstGen.zig:4954-4970).
|
|
if (members_len == 0 && backing_int_node == 0) {
|
|
StructDeclSmall small;
|
|
memset(&small, 0, sizeof(small));
|
|
small.layout = layout;
|
|
small.name_strategy = name_strategy;
|
|
setStruct(ag, decl_inst, node, small, 0, 0, 0);
|
|
return decl_inst;
|
|
}
|
|
|
|
// Non-empty container (AstGen.zig:4973-5189).
|
|
|
|
// Create namespace scope (AstGen.zig:4973-4979).
|
|
ScopeNamespace namespace;
|
|
scopeNamespaceInit(
|
|
&namespace, &gz->base, node, decl_inst, gz, ag->within_fn);
|
|
|
|
advanceSourceCursorToNode(ag, node);
|
|
|
|
uint32_t decl_count = scanContainer(ag, &namespace, members, members_len);
|
|
uint32_t field_count = members_len - decl_count;
|
|
|
|
WipMembers wm = wipMembersInit(decl_count, field_count);
|
|
|
|
// Set up block_scope for field type/align/init expressions.
|
|
// (AstGen.zig:4986-4994)
|
|
GenZir block_scope;
|
|
memset(&block_scope, 0, sizeof(block_scope));
|
|
block_scope.base.tag = SCOPE_GEN_ZIR;
|
|
block_scope.parent = &namespace.base;
|
|
block_scope.astgen = ag;
|
|
block_scope.decl_node_index = node;
|
|
block_scope.decl_line = gz->decl_line; // Fix #7: use gz->decl_line
|
|
block_scope.is_comptime = true;
|
|
block_scope.instructions_top = ag->scratch_inst_len;
|
|
block_scope.any_defer_node = UINT32_MAX;
|
|
|
|
// Handle backing_int_node for packed structs (AstGen.zig:5000-5024).
|
|
// We store the raw body instructions and apply fixups at the final append.
|
|
uint32_t backing_int_body_raw_len = 0;
|
|
uint32_t backing_int_ref = ZIR_REF_NONE;
|
|
uint32_t* backing_int_body_raw = NULL;
|
|
if (backing_int_node != 0) {
|
|
if (layout != 2) { // not packed
|
|
SET_ERROR(ag); // non-packed struct with backing int
|
|
} else {
|
|
backing_int_ref
|
|
= typeExpr(&block_scope, &block_scope.base, backing_int_node);
|
|
if (gzInstructionsLen(&block_scope) > 0) {
|
|
if (!endsWithNoReturn(&block_scope)) {
|
|
makeBreakInline(&block_scope, decl_inst, backing_int_ref,
|
|
AST_NODE_OFFSET_NONE);
|
|
}
|
|
backing_int_body_raw_len = gzInstructionsLen(&block_scope);
|
|
const uint32_t* body = gzInstructionsSlice(&block_scope);
|
|
backing_int_body_raw
|
|
= malloc(backing_int_body_raw_len * sizeof(uint32_t));
|
|
if (!backing_int_body_raw)
|
|
exit(1);
|
|
memcpy(backing_int_body_raw, body,
|
|
backing_int_body_raw_len * sizeof(uint32_t));
|
|
ag->scratch_inst_len = block_scope.instructions_top;
|
|
}
|
|
}
|
|
}
|
|
|
|
bool known_non_opv = false;
|
|
bool known_comptime_only = false;
|
|
bool any_comptime_fields = false;
|
|
bool any_aligned_fields = false;
|
|
bool any_default_inits = false;
|
|
|
|
// Process each member (AstGen.zig:5060-5147).
|
|
for (uint32_t i = 0; i < members_len; i++) {
|
|
uint32_t member_node = members[i];
|
|
AstNodeTag mtag = tree->nodes.tags[member_node];
|
|
switch (mtag) {
|
|
case AST_NODE_COMPTIME:
|
|
comptimeDecl(ag, gz, &namespace.base, wm.payload, &wm.decl_index,
|
|
member_node);
|
|
break;
|
|
case AST_NODE_SIMPLE_VAR_DECL:
|
|
globalVarDecl(ag, gz, &namespace.base, wm.payload, &wm.decl_index,
|
|
member_node);
|
|
break;
|
|
case AST_NODE_TEST_DECL:
|
|
testDecl(ag, gz, &namespace.base, wm.payload, &wm.decl_index,
|
|
member_node);
|
|
break;
|
|
case AST_NODE_FN_DECL:
|
|
fnDecl(ag, gz, &namespace.base, wm.payload, &wm.decl_index,
|
|
member_node);
|
|
break;
|
|
// fn_proto* dispatch (AstGen.zig:5809-5813, issue #9).
|
|
case AST_NODE_FN_PROTO_SIMPLE:
|
|
case AST_NODE_FN_PROTO_MULTI:
|
|
case AST_NODE_FN_PROTO_ONE:
|
|
case AST_NODE_FN_PROTO:
|
|
fnDecl(ag, gz, &namespace.base, wm.payload, &wm.decl_index,
|
|
member_node);
|
|
break;
|
|
case AST_NODE_USINGNAMESPACE:
|
|
case AST_NODE_GLOBAL_VAR_DECL:
|
|
case AST_NODE_LOCAL_VAR_DECL:
|
|
case AST_NODE_ALIGNED_VAR_DECL:
|
|
globalVarDecl(ag, gz, &namespace.base, wm.payload, &wm.decl_index,
|
|
member_node);
|
|
break;
|
|
case AST_NODE_CONTAINER_FIELD_INIT:
|
|
case AST_NODE_CONTAINER_FIELD_ALIGN:
|
|
case AST_NODE_CONTAINER_FIELD: {
|
|
// Extract field info from AST node (Ast.zig:1413-1454).
|
|
uint32_t main_token = tree->nodes.main_tokens[member_node];
|
|
AstData nd = tree->nodes.datas[member_node];
|
|
uint32_t type_node = nd.lhs;
|
|
uint32_t align_node = 0;
|
|
uint32_t value_node = 0;
|
|
bool has_comptime_token = false;
|
|
|
|
switch (mtag) {
|
|
case AST_NODE_CONTAINER_FIELD_INIT:
|
|
// lhs = type_expr, rhs = value_expr (optional, 0=none)
|
|
value_node = nd.rhs;
|
|
break;
|
|
case AST_NODE_CONTAINER_FIELD_ALIGN:
|
|
// lhs = type_expr, rhs = align_expr
|
|
align_node = nd.rhs;
|
|
break;
|
|
case AST_NODE_CONTAINER_FIELD:
|
|
// lhs = type_expr, rhs = extra index to {align, value}
|
|
if (nd.rhs != 0) {
|
|
align_node = tree->extra_data.arr[nd.rhs];
|
|
value_node = tree->extra_data.arr[nd.rhs + 1];
|
|
}
|
|
break;
|
|
default:
|
|
break;
|
|
}
|
|
|
|
// Check for comptime token preceding main_token
|
|
// (Ast.zig:2071-2082).
|
|
if (main_token > 0
|
|
&& tree->tokens.tags[main_token - 1]
|
|
== TOKEN_KEYWORD_COMPTIME) {
|
|
has_comptime_token = true;
|
|
}
|
|
|
|
// Field name (AstGen.zig:5068).
|
|
// convertToNonTupleLike: for struct fields, if type_expr is
|
|
// an identifier node, it's actually a named field with the
|
|
// identifier as the name (AstGen.zig:5069-5070).
|
|
uint32_t field_name = identAsString(ag, main_token);
|
|
wipMembersAppendToField(&wm, field_name);
|
|
|
|
// Type expression: struct field missing type is an error
|
|
// (AstGen.zig:5073-5075, issue #12).
|
|
if (type_node == 0) {
|
|
SET_ERROR(ag);
|
|
break;
|
|
}
|
|
|
|
bool have_type_body = false;
|
|
uint32_t field_type = 0;
|
|
field_type = typeExpr(&block_scope, &block_scope.base, type_node);
|
|
have_type_body = (gzInstructionsLen(&block_scope) > 0);
|
|
|
|
bool have_align = (align_node != 0);
|
|
bool have_value = (value_node != 0);
|
|
bool is_comptime = has_comptime_token;
|
|
|
|
// Packed/extern struct comptime field error
|
|
// (AstGen.zig:5083-5087, issue #15).
|
|
if (is_comptime) {
|
|
if (layout == 2 || layout == 1) {
|
|
// packed or extern struct fields cannot be comptime.
|
|
SET_ERROR(ag);
|
|
break;
|
|
}
|
|
any_comptime_fields = true;
|
|
} else {
|
|
// (AstGen.zig:5089-5093)
|
|
known_non_opv = known_non_opv
|
|
|| nodeImpliesMoreThanOnePossibleValue(tree, type_node);
|
|
known_comptime_only = known_comptime_only
|
|
|| nodeImpliesComptimeOnly(tree, type_node);
|
|
}
|
|
|
|
bool field_bits[4]
|
|
= { have_align, have_value, is_comptime, have_type_body };
|
|
wipMembersNextField(&wm, field_bits);
|
|
|
|
if (have_type_body) {
|
|
// Emit break_inline to carry the type value
|
|
// (AstGen.zig:5097-5099).
|
|
if (!endsWithNoReturn(&block_scope)) {
|
|
makeBreakInline(&block_scope, decl_inst, field_type,
|
|
AST_NODE_OFFSET_NONE);
|
|
}
|
|
uint32_t raw_len = gzInstructionsLen(&block_scope);
|
|
const uint32_t* body = gzInstructionsSlice(&block_scope);
|
|
uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_len);
|
|
uint32_t bodies_before = wm.bodies_len;
|
|
wipMembersBodiesAppendWithFixups(&wm, ag, body, raw_len);
|
|
(void)bodies_before;
|
|
wipMembersAppendToField(&wm, body_len);
|
|
// Reset block_scope.
|
|
ag->scratch_inst_len = block_scope.instructions_top;
|
|
} else {
|
|
wipMembersAppendToField(&wm, field_type);
|
|
}
|
|
|
|
if (have_align) {
|
|
// Packed struct fields cannot have alignment overrides
|
|
// (AstGen.zig:5111-5113, issue #15).
|
|
if (layout == 2) { // packed
|
|
SET_ERROR(ag);
|
|
break;
|
|
}
|
|
any_aligned_fields = true;
|
|
// Use coerced_align_ri: RL_COERCED_TY with u29_type
|
|
// (AstGen.zig:5115, issue #14).
|
|
ResultLoc align_rl = { .tag = RL_COERCED_TY,
|
|
.data = ZIR_REF_U29_TYPE,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
uint32_t align_ref = exprRl(
|
|
&block_scope, &block_scope.base, align_rl, align_node);
|
|
if (!endsWithNoReturn(&block_scope)) {
|
|
makeBreakInline(&block_scope, decl_inst, align_ref,
|
|
AST_NODE_OFFSET_NONE);
|
|
}
|
|
uint32_t raw_len = gzInstructionsLen(&block_scope);
|
|
const uint32_t* body = gzInstructionsSlice(&block_scope);
|
|
uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_len);
|
|
wipMembersBodiesAppendWithFixups(&wm, ag, body, raw_len);
|
|
wipMembersAppendToField(&wm, body_len);
|
|
ag->scratch_inst_len = block_scope.instructions_top;
|
|
}
|
|
|
|
if (have_value) {
|
|
any_default_inits = true;
|
|
// Use coerced_ty with decl_inst when field type is present
|
|
// (AstGen.zig:5132, issue #11).
|
|
ResultLoc value_rl;
|
|
if (field_type == 0) {
|
|
value_rl = RL_NONE_VAL;
|
|
} else {
|
|
uint32_t dref = decl_inst + ZIR_REF_START_INDEX;
|
|
value_rl = (ResultLoc) { .tag = RL_COERCED_TY,
|
|
.data = dref,
|
|
.src_node = 0,
|
|
.ctx = RI_CTX_NONE };
|
|
}
|
|
uint32_t default_ref = exprRl(
|
|
&block_scope, &block_scope.base, value_rl, value_node);
|
|
if (!endsWithNoReturn(&block_scope)) {
|
|
makeBreakInline(&block_scope, decl_inst, default_ref,
|
|
AST_NODE_OFFSET_NONE);
|
|
}
|
|
uint32_t raw_len = gzInstructionsLen(&block_scope);
|
|
const uint32_t* body = gzInstructionsSlice(&block_scope);
|
|
uint32_t body_len = countBodyLenAfterFixups(ag, body, raw_len);
|
|
wipMembersBodiesAppendWithFixups(&wm, ag, body, raw_len);
|
|
wipMembersAppendToField(&wm, body_len);
|
|
ag->scratch_inst_len = block_scope.instructions_top;
|
|
} else if (has_comptime_token) {
|
|
// Comptime field without default init: error
|
|
// (AstGen.zig:5144-5145, issue #13).
|
|
SET_ERROR(ag);
|
|
}
|
|
break;
|
|
}
|
|
default:
|
|
SET_ERROR(ag);
|
|
break;
|
|
}
|
|
}
|
|
|
|
wipMembersFinishBits(&wm);
|
|
|
|
// setStruct (AstGen.zig:5152-5166).
|
|
StructDeclSmall small;
|
|
memset(&small, 0, sizeof(small));
|
|
small.has_captures_len = (namespace.captures_len > 0);
|
|
small.has_decls_len = (decl_count > 0);
|
|
small.has_fields_len = (field_count > 0);
|
|
small.has_backing_int = (backing_int_ref != ZIR_REF_NONE);
|
|
small.known_non_opv = known_non_opv;
|
|
small.known_comptime_only = known_comptime_only;
|
|
small.any_comptime_fields = any_comptime_fields;
|
|
small.any_default_inits = any_default_inits;
|
|
small.any_aligned_fields = any_aligned_fields;
|
|
small.layout = layout;
|
|
small.name_strategy = name_strategy;
|
|
setStruct(ag, decl_inst, node, small, namespace.captures_len, field_count,
|
|
decl_count);
|
|
|
|
// Append: captures, backing_int, decls, fields, bodies
|
|
// (AstGen.zig:5172-5186).
|
|
uint32_t decls_len;
|
|
const uint32_t* decls_slice = wipMembersDeclsSlice(&wm, &decls_len);
|
|
uint32_t fields_len;
|
|
const uint32_t* fields_slice = wipMembersFieldsSlice(&wm, &fields_len);
|
|
|
|
// Compute backing_int_body_len (with fixups) for capacity estimation.
|
|
uint32_t backing_int_body_len = 0;
|
|
if (backing_int_body_raw_len > 0) {
|
|
backing_int_body_len = countBodyLenAfterFixups(
|
|
ag, backing_int_body_raw, backing_int_body_raw_len);
|
|
}
|
|
ensureExtraCapacity(ag,
|
|
namespace.captures_len * 2
|
|
+ (backing_int_ref != ZIR_REF_NONE ? backing_int_body_len + 2 : 0)
|
|
+ decls_len + fields_len + wm.bodies_len);
|
|
// Captures (AstGen.zig:5174-5175).
|
|
for (uint32_t i = 0; i < namespace.captures_len; i++)
|
|
ag->extra[ag->extra_len++] = namespace.capture_keys[i];
|
|
for (uint32_t i = 0; i < namespace.captures_len; i++)
|
|
ag->extra[ag->extra_len++] = namespace.capture_vals[i];
|
|
// backing_int (AstGen.zig:5176-5183).
|
|
if (backing_int_ref != ZIR_REF_NONE) {
|
|
ag->extra[ag->extra_len++] = backing_int_body_len;
|
|
if (backing_int_body_len == 0) {
|
|
ag->extra[ag->extra_len++] = backing_int_ref;
|
|
} else {
|
|
for (uint32_t j = 0; j < backing_int_body_raw_len; j++)
|
|
appendPossiblyRefdBodyInst(ag, backing_int_body_raw[j]);
|
|
}
|
|
}
|
|
free(backing_int_body_raw);
|
|
for (uint32_t i = 0; i < decls_len; i++)
|
|
ag->extra[ag->extra_len++] = decls_slice[i];
|
|
for (uint32_t i = 0; i < fields_len; i++)
|
|
ag->extra[ag->extra_len++] = fields_slice[i];
|
|
for (uint32_t i = 0; i < wm.bodies_len; i++)
|
|
ag->extra[ag->extra_len++] = wm.bodies[i];
|
|
|
|
gzUnstack(&block_scope);
|
|
wipMembersDeinit(&wm);
|
|
scopeNamespaceDeinit(&namespace);
|
|
return decl_inst;
|
|
}
|
|
|
|
// --- AstRlAnnotate (AstRlAnnotate.zig) ---
|
|
// Pre-pass to determine which AST nodes need result locations.
|
|
|
|
typedef struct {
|
|
bool have_type;
|
|
bool have_ptr;
|
|
} RlResultInfo;
|
|
|
|
#define RL_RI_NONE ((RlResultInfo) { false, false })
|
|
#define RL_RI_TYPED_PTR ((RlResultInfo) { true, true })
|
|
#define RL_RI_INFERRED_PTR ((RlResultInfo) { false, true })
|
|
#define RL_RI_TYPE_ONLY ((RlResultInfo) { true, false })
|
|
|
|
// Block for label tracking (AstRlAnnotate.zig:56-62).
|
|
typedef struct RlBlock {
|
|
struct RlBlock* parent;
|
|
uint32_t label_token; // UINT32_MAX = no label
|
|
bool is_loop;
|
|
RlResultInfo ri;
|
|
bool consumes_res_ptr;
|
|
} RlBlock;
|
|
|
|
static void nodesNeedRlAdd(AstGenCtx* ag, uint32_t node) {
|
|
if (ag->nodes_need_rl_len >= ag->nodes_need_rl_cap) {
|
|
uint32_t new_cap
|
|
= ag->nodes_need_rl_cap == 0 ? 16 : ag->nodes_need_rl_cap * 2;
|
|
ag->nodes_need_rl
|
|
= realloc(ag->nodes_need_rl, new_cap * sizeof(uint32_t));
|
|
ag->nodes_need_rl_cap = new_cap;
|
|
}
|
|
ag->nodes_need_rl[ag->nodes_need_rl_len++] = node;
|
|
}
|
|
|
|
static bool nodesNeedRlContains(const AstGenCtx* ag, uint32_t node) {
|
|
for (uint32_t i = 0; i < ag->nodes_need_rl_len; i++) {
|
|
if (ag->nodes_need_rl[i] == node)
|
|
return true;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
// Compare two identifier tokens by their source text.
|
|
// Handles both regular identifiers and @"..."-quoted identifiers.
|
|
static bool rlTokenIdentEqual(
|
|
const Ast* tree, uint32_t tok_a, uint32_t tok_b) {
|
|
const char* src = tree->source;
|
|
uint32_t a_start = tree->tokens.starts[tok_a];
|
|
uint32_t b_start = tree->tokens.starts[tok_b];
|
|
bool a_quoted = (src[a_start] == '@');
|
|
bool b_quoted = (src[b_start] == '@');
|
|
if (a_quoted != b_quoted)
|
|
return false;
|
|
if (a_quoted) {
|
|
// Both are @"..."-quoted: skip '@"' prefix, compare up to '"'.
|
|
uint32_t ai = a_start + 2;
|
|
uint32_t bi = b_start + 2;
|
|
for (;;) {
|
|
char ca = src[ai];
|
|
char cb = src[bi];
|
|
if (ca == '"' && cb == '"')
|
|
return true;
|
|
if (ca == '"' || cb == '"')
|
|
return false;
|
|
if (ca != cb)
|
|
return false;
|
|
ai++;
|
|
bi++;
|
|
}
|
|
}
|
|
for (uint32_t i = 0;; i++) {
|
|
char ca = src[a_start + i];
|
|
char cb = src[b_start + i];
|
|
bool a_id = (ca >= 'a' && ca <= 'z') || (ca >= 'A' && ca <= 'Z')
|
|
|| (ca >= '0' && ca <= '9') || ca == '_';
|
|
bool b_id = (cb >= 'a' && cb <= 'z') || (cb >= 'A' && cb <= 'Z')
|
|
|| (cb >= '0' && cb <= '9') || cb == '_';
|
|
if (!a_id && !b_id)
|
|
return true;
|
|
if (!a_id || !b_id)
|
|
return false;
|
|
if (ca != cb)
|
|
return false;
|
|
}
|
|
}
|
|
|
|
// Forward declarations.
|
|
static bool rlExpr(
|
|
AstGenCtx* ag, uint32_t node, RlBlock* block, RlResultInfo ri);
|
|
static void rlContainerDecl(AstGenCtx* ag, RlBlock* block, uint32_t node);
|
|
static bool rlBlockExpr(AstGenCtx* ag, RlBlock* parent_block, RlResultInfo ri,
|
|
uint32_t node, const uint32_t* stmts, uint32_t count);
|
|
static bool rlBuiltinCall(AstGenCtx* ag, RlBlock* block, uint32_t node,
|
|
const uint32_t* args, uint32_t nargs);
|
|
|
|
// containerDecl (AstRlAnnotate.zig:89-127).
|
|
static void rlContainerDecl(AstGenCtx* ag, RlBlock* block, uint32_t node) {
|
|
const Ast* tree = ag->tree;
|
|
AstNodeTag tag = tree->nodes.tags[node];
|
|
AstData nd = tree->nodes.datas[node];
|
|
|
|
// Extract arg and members depending on variant.
|
|
// All container decls: recurse arg with type_only, members with none.
|
|
// (The keyword type — struct/union/enum/opaque — doesn't matter for RL.)
|
|
uint32_t member_buf[2];
|
|
const uint32_t* members = NULL;
|
|
uint32_t members_len = 0;
|
|
uint32_t arg_node = 0; // 0 = no arg
|
|
|
|
switch (tag) {
|
|
case AST_NODE_CONTAINER_DECL_TWO:
|
|
case AST_NODE_CONTAINER_DECL_TWO_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_TWO:
|
|
case AST_NODE_TAGGED_UNION_TWO_TRAILING: {
|
|
uint32_t idx = 0;
|
|
if (nd.lhs != 0)
|
|
member_buf[idx++] = nd.lhs;
|
|
if (nd.rhs != 0)
|
|
member_buf[idx++] = nd.rhs;
|
|
members = member_buf;
|
|
members_len = idx;
|
|
break;
|
|
}
|
|
case AST_NODE_CONTAINER_DECL:
|
|
case AST_NODE_CONTAINER_DECL_TRAILING:
|
|
case AST_NODE_TAGGED_UNION:
|
|
case AST_NODE_TAGGED_UNION_TRAILING:
|
|
members = tree->extra_data.arr + nd.lhs;
|
|
members_len = nd.rhs - nd.lhs;
|
|
break;
|
|
case AST_NODE_CONTAINER_DECL_ARG:
|
|
case AST_NODE_CONTAINER_DECL_ARG_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG:
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING: {
|
|
arg_node = nd.lhs;
|
|
uint32_t extra_idx = nd.rhs;
|
|
uint32_t start = tree->extra_data.arr[extra_idx];
|
|
uint32_t end = tree->extra_data.arr[extra_idx + 1];
|
|
members = tree->extra_data.arr + start;
|
|
members_len = end - start;
|
|
break;
|
|
}
|
|
default:
|
|
return;
|
|
}
|
|
|
|
if (arg_node != 0)
|
|
(void)rlExpr(ag, arg_node, block, RL_RI_TYPE_ONLY);
|
|
for (uint32_t i = 0; i < members_len; i++)
|
|
(void)rlExpr(ag, members[i], block, RL_RI_NONE);
|
|
}
|
|
|
|
// blockExpr (AstRlAnnotate.zig:787-814).
|
|
static bool rlBlockExpr(AstGenCtx* ag, RlBlock* parent_block, RlResultInfo ri,
|
|
uint32_t node, const uint32_t* stmts, uint32_t count) {
|
|
const Ast* tree = ag->tree;
|
|
uint32_t lbrace = tree->nodes.main_tokens[node];
|
|
bool is_labeled
|
|
= (lbrace >= 2 && tree->tokens.tags[lbrace - 1] == TOKEN_COLON
|
|
&& tree->tokens.tags[lbrace - 2] == TOKEN_IDENTIFIER);
|
|
|
|
if (is_labeled) {
|
|
RlBlock new_block;
|
|
new_block.parent = parent_block;
|
|
new_block.label_token = lbrace - 2;
|
|
new_block.is_loop = false;
|
|
new_block.ri = ri;
|
|
new_block.consumes_res_ptr = false;
|
|
for (uint32_t i = 0; i < count; i++)
|
|
(void)rlExpr(ag, stmts[i], &new_block, RL_RI_NONE);
|
|
if (new_block.consumes_res_ptr)
|
|
nodesNeedRlAdd(ag, node);
|
|
return new_block.consumes_res_ptr;
|
|
} else {
|
|
for (uint32_t i = 0; i < count; i++)
|
|
(void)rlExpr(ag, stmts[i], parent_block, RL_RI_NONE);
|
|
return false;
|
|
}
|
|
}
|
|
|
|
// builtinCall (AstRlAnnotate.zig:816-1100).
|
|
// Simplified: no builtin currently consumes its result location,
|
|
// so we just recurse into all args with RL_RI_NONE.
|
|
static bool rlBuiltinCall(AstGenCtx* ag, RlBlock* block, uint32_t node,
|
|
const uint32_t* args, uint32_t nargs) {
|
|
(void)node;
|
|
for (uint32_t i = 0; i < nargs; i++)
|
|
(void)rlExpr(ag, args[i], block, RL_RI_NONE);
|
|
return false;
|
|
}
|
|
|
|
// expr (AstRlAnnotate.zig:130-771).
|
|
static bool rlExpr(
|
|
AstGenCtx* ag, uint32_t node, RlBlock* block, RlResultInfo ri) {
|
|
const Ast* tree = ag->tree;
|
|
AstNodeTag tag = tree->nodes.tags[node];
|
|
AstData nd = tree->nodes.datas[node];
|
|
|
|
switch (tag) {
|
|
// Unreachable nodes (AstRlAnnotate.zig:133-142).
|
|
case AST_NODE_ROOT:
|
|
case AST_NODE_SWITCH_CASE_ONE:
|
|
case AST_NODE_SWITCH_CASE_INLINE_ONE:
|
|
case AST_NODE_SWITCH_CASE:
|
|
case AST_NODE_SWITCH_CASE_INLINE:
|
|
case AST_NODE_SWITCH_RANGE:
|
|
case AST_NODE_FOR_RANGE:
|
|
case AST_NODE_ASM_OUTPUT:
|
|
case AST_NODE_ASM_INPUT:
|
|
return false; // unreachable in upstream
|
|
|
|
// errdefer (AstRlAnnotate.zig:144-147).
|
|
case AST_NODE_ERRDEFER:
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_NONE);
|
|
return false;
|
|
|
|
// defer (AstRlAnnotate.zig:148-151).
|
|
case AST_NODE_DEFER:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
return false;
|
|
|
|
// container_field (AstRlAnnotate.zig:153-167).
|
|
case AST_NODE_CONTAINER_FIELD_INIT: {
|
|
// lhs = type_expr, rhs = value_expr
|
|
if (nd.lhs != 0)
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
|
|
if (nd.rhs != 0)
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
}
|
|
case AST_NODE_CONTAINER_FIELD_ALIGN: {
|
|
// lhs = type_expr, rhs = align_expr
|
|
if (nd.lhs != 0)
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
|
|
if (nd.rhs != 0)
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
}
|
|
case AST_NODE_CONTAINER_FIELD: {
|
|
// lhs = type_expr, rhs = extra index to {align_expr, value_expr}
|
|
if (nd.lhs != 0)
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
|
|
if (nd.rhs != 0) {
|
|
uint32_t align_node = tree->extra_data.arr[nd.rhs];
|
|
uint32_t value_node = tree->extra_data.arr[nd.rhs + 1];
|
|
if (align_node != 0)
|
|
(void)rlExpr(ag, align_node, block, RL_RI_TYPE_ONLY);
|
|
if (value_node != 0)
|
|
(void)rlExpr(ag, value_node, block, RL_RI_TYPE_ONLY);
|
|
}
|
|
return false;
|
|
}
|
|
|
|
// test_decl (AstRlAnnotate.zig:168-171).
|
|
case AST_NODE_TEST_DECL:
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_NONE);
|
|
return false;
|
|
|
|
// var_decl (AstRlAnnotate.zig:172-202).
|
|
case AST_NODE_GLOBAL_VAR_DECL:
|
|
case AST_NODE_LOCAL_VAR_DECL:
|
|
case AST_NODE_SIMPLE_VAR_DECL:
|
|
case AST_NODE_ALIGNED_VAR_DECL: {
|
|
uint32_t type_node = 0;
|
|
uint32_t init_node = 0;
|
|
uint32_t mut_token = tree->nodes.main_tokens[node];
|
|
if (tag == AST_NODE_SIMPLE_VAR_DECL) {
|
|
type_node = nd.lhs;
|
|
init_node = nd.rhs;
|
|
} else if (tag == AST_NODE_LOCAL_VAR_DECL
|
|
|| tag == AST_NODE_GLOBAL_VAR_DECL) {
|
|
type_node = tree->extra_data.arr[nd.lhs];
|
|
init_node = nd.rhs;
|
|
} else { // ALIGNED_VAR_DECL
|
|
init_node = nd.rhs;
|
|
}
|
|
RlResultInfo init_ri;
|
|
if (type_node != 0) {
|
|
(void)rlExpr(ag, type_node, block, RL_RI_TYPE_ONLY);
|
|
init_ri = RL_RI_TYPED_PTR;
|
|
} else {
|
|
init_ri = RL_RI_INFERRED_PTR;
|
|
}
|
|
if (init_node == 0)
|
|
return false;
|
|
bool is_const = (tree->source[tree->tokens.starts[mut_token]] == 'c');
|
|
if (is_const) {
|
|
bool init_consumes_rl = rlExpr(ag, init_node, block, init_ri);
|
|
if (init_consumes_rl)
|
|
nodesNeedRlAdd(ag, node);
|
|
return false;
|
|
} else {
|
|
(void)rlExpr(ag, init_node, block, init_ri);
|
|
return false;
|
|
}
|
|
}
|
|
|
|
// assign (AstRlAnnotate.zig:212-217).
|
|
case AST_NODE_ASSIGN:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPED_PTR);
|
|
return false;
|
|
|
|
// compound assign (AstRlAnnotate.zig:218-240).
|
|
case AST_NODE_ASSIGN_SHL:
|
|
case AST_NODE_ASSIGN_SHL_SAT:
|
|
case AST_NODE_ASSIGN_SHR:
|
|
case AST_NODE_ASSIGN_BIT_AND:
|
|
case AST_NODE_ASSIGN_BIT_OR:
|
|
case AST_NODE_ASSIGN_BIT_XOR:
|
|
case AST_NODE_ASSIGN_DIV:
|
|
case AST_NODE_ASSIGN_SUB:
|
|
case AST_NODE_ASSIGN_SUB_WRAP:
|
|
case AST_NODE_ASSIGN_SUB_SAT:
|
|
case AST_NODE_ASSIGN_MOD:
|
|
case AST_NODE_ASSIGN_ADD:
|
|
case AST_NODE_ASSIGN_ADD_WRAP:
|
|
case AST_NODE_ASSIGN_ADD_SAT:
|
|
case AST_NODE_ASSIGN_MUL:
|
|
case AST_NODE_ASSIGN_MUL_WRAP:
|
|
case AST_NODE_ASSIGN_MUL_SAT:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_NONE);
|
|
return false;
|
|
|
|
// shl/shr (AstRlAnnotate.zig:241-246).
|
|
case AST_NODE_SHL:
|
|
case AST_NODE_SHR:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
|
|
// binary arithmetic/comparison (AstRlAnnotate.zig:247-274).
|
|
case AST_NODE_ADD:
|
|
case AST_NODE_ADD_WRAP:
|
|
case AST_NODE_ADD_SAT:
|
|
case AST_NODE_SUB:
|
|
case AST_NODE_SUB_WRAP:
|
|
case AST_NODE_SUB_SAT:
|
|
case AST_NODE_MUL:
|
|
case AST_NODE_MUL_WRAP:
|
|
case AST_NODE_MUL_SAT:
|
|
case AST_NODE_DIV:
|
|
case AST_NODE_MOD:
|
|
case AST_NODE_SHL_SAT:
|
|
case AST_NODE_BIT_AND:
|
|
case AST_NODE_BIT_OR:
|
|
case AST_NODE_BIT_XOR:
|
|
case AST_NODE_BANG_EQUAL:
|
|
case AST_NODE_EQUAL_EQUAL:
|
|
case AST_NODE_GREATER_THAN:
|
|
case AST_NODE_GREATER_OR_EQUAL:
|
|
case AST_NODE_LESS_THAN:
|
|
case AST_NODE_LESS_OR_EQUAL:
|
|
case AST_NODE_ARRAY_CAT:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_NONE);
|
|
return false;
|
|
|
|
// array_mult (AstRlAnnotate.zig:276-281).
|
|
case AST_NODE_ARRAY_MULT:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
|
|
// error_union, merge_error_sets (AstRlAnnotate.zig:282-287).
|
|
case AST_NODE_ERROR_UNION:
|
|
case AST_NODE_MERGE_ERROR_SETS:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_NONE);
|
|
return false;
|
|
|
|
// bool_and, bool_or (AstRlAnnotate.zig:288-295).
|
|
case AST_NODE_BOOL_AND:
|
|
case AST_NODE_BOOL_OR:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
|
|
// bool_not (AstRlAnnotate.zig:296-299).
|
|
case AST_NODE_BOOL_NOT:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
|
|
// bit_not, negation, negation_wrap (AstRlAnnotate.zig:300-303).
|
|
case AST_NODE_BIT_NOT:
|
|
case AST_NODE_NEGATION:
|
|
case AST_NODE_NEGATION_WRAP:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
return false;
|
|
|
|
// Leaves (AstRlAnnotate.zig:305-320).
|
|
case AST_NODE_IDENTIFIER:
|
|
case AST_NODE_STRING_LITERAL:
|
|
case AST_NODE_MULTILINE_STRING_LITERAL:
|
|
case AST_NODE_NUMBER_LITERAL:
|
|
case AST_NODE_UNREACHABLE_LITERAL:
|
|
case AST_NODE_ASM_SIMPLE:
|
|
case AST_NODE_ASM:
|
|
case AST_NODE_ASM_LEGACY:
|
|
case AST_NODE_ENUM_LITERAL:
|
|
case AST_NODE_ERROR_VALUE:
|
|
case AST_NODE_ANYFRAME_LITERAL:
|
|
case AST_NODE_CONTINUE:
|
|
case AST_NODE_CHAR_LITERAL:
|
|
case AST_NODE_ERROR_SET_DECL:
|
|
return false;
|
|
|
|
// builtin_call (AstRlAnnotate.zig:322-330).
|
|
case AST_NODE_BUILTIN_CALL_TWO:
|
|
case AST_NODE_BUILTIN_CALL_TWO_COMMA: {
|
|
uint32_t args[2];
|
|
uint32_t nargs = 0;
|
|
if (nd.lhs != 0)
|
|
args[nargs++] = nd.lhs;
|
|
if (nd.rhs != 0)
|
|
args[nargs++] = nd.rhs;
|
|
return rlBuiltinCall(ag, block, node, args, nargs);
|
|
}
|
|
case AST_NODE_BUILTIN_CALL:
|
|
case AST_NODE_BUILTIN_CALL_COMMA: {
|
|
uint32_t start = nd.lhs;
|
|
uint32_t end = nd.rhs;
|
|
return rlBuiltinCall(
|
|
ag, block, node, tree->extra_data.arr + start, end - start);
|
|
}
|
|
|
|
// call (AstRlAnnotate.zig:332-351).
|
|
case AST_NODE_CALL_ONE:
|
|
case AST_NODE_CALL_ONE_COMMA: {
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
if (nd.rhs != 0)
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
}
|
|
case AST_NODE_CALL:
|
|
case AST_NODE_CALL_COMMA: {
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
uint32_t start = tree->extra_data.arr[nd.rhs];
|
|
uint32_t end = tree->extra_data.arr[nd.rhs + 1];
|
|
for (uint32_t i = start; i < end; i++)
|
|
(void)rlExpr(ag, tree->extra_data.arr[i], block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
}
|
|
|
|
// return (AstRlAnnotate.zig:353-361).
|
|
case AST_NODE_RETURN:
|
|
if (nd.lhs != 0) {
|
|
bool ret_consumes_rl = rlExpr(ag, nd.lhs, block, RL_RI_TYPED_PTR);
|
|
if (ret_consumes_rl)
|
|
nodesNeedRlAdd(ag, node);
|
|
}
|
|
return false;
|
|
|
|
// field_access (AstRlAnnotate.zig:363-367).
|
|
case AST_NODE_FIELD_ACCESS:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
return false;
|
|
|
|
// if_simple, if (AstRlAnnotate.zig:369-387).
|
|
case AST_NODE_IF_SIMPLE:
|
|
case AST_NODE_IF: {
|
|
uint32_t cond_node = nd.lhs;
|
|
uint32_t then_node, else_node = 0;
|
|
if (tag == AST_NODE_IF_SIMPLE) {
|
|
then_node = nd.rhs;
|
|
} else {
|
|
then_node = tree->extra_data.arr[nd.rhs];
|
|
else_node = tree->extra_data.arr[nd.rhs + 1];
|
|
}
|
|
// Detect payload/error token.
|
|
uint32_t last_cond_tok = lastToken(tree, cond_node);
|
|
uint32_t pipe_tok = last_cond_tok + 2;
|
|
bool has_payload = (pipe_tok < tree->tokens.len
|
|
&& tree->tokens.tags[pipe_tok] == TOKEN_PIPE);
|
|
bool has_error = false;
|
|
if (else_node != 0) {
|
|
uint32_t else_tok = lastToken(tree, then_node) + 1;
|
|
has_error = (else_tok + 1 < tree->tokens.len
|
|
&& tree->tokens.tags[else_tok + 1] == TOKEN_PIPE);
|
|
}
|
|
if (has_error || has_payload)
|
|
(void)rlExpr(ag, cond_node, block, RL_RI_NONE);
|
|
else
|
|
(void)rlExpr(ag, cond_node, block, RL_RI_TYPE_ONLY);
|
|
|
|
if (else_node != 0) {
|
|
bool then_uses = rlExpr(ag, then_node, block, ri);
|
|
bool else_uses = rlExpr(ag, else_node, block, ri);
|
|
bool uses_rl = then_uses || else_uses;
|
|
if (uses_rl)
|
|
nodesNeedRlAdd(ag, node);
|
|
return uses_rl;
|
|
} else {
|
|
(void)rlExpr(ag, then_node, block, RL_RI_NONE);
|
|
return false;
|
|
}
|
|
}
|
|
|
|
// while (AstRlAnnotate.zig:389-419).
|
|
case AST_NODE_WHILE_SIMPLE:
|
|
case AST_NODE_WHILE_CONT:
|
|
case AST_NODE_WHILE: {
|
|
uint32_t cond_node = nd.lhs;
|
|
uint32_t body_node, cont_node = 0, else_node = 0;
|
|
if (tag == AST_NODE_WHILE_SIMPLE) {
|
|
body_node = nd.rhs;
|
|
} else if (tag == AST_NODE_WHILE_CONT) {
|
|
cont_node = tree->extra_data.arr[nd.rhs];
|
|
body_node = tree->extra_data.arr[nd.rhs + 1];
|
|
} else {
|
|
cont_node = tree->extra_data.arr[nd.rhs];
|
|
body_node = tree->extra_data.arr[nd.rhs + 1];
|
|
else_node = tree->extra_data.arr[nd.rhs + 2];
|
|
}
|
|
uint32_t main_tok = tree->nodes.main_tokens[node];
|
|
uint32_t tok_i = main_tok;
|
|
if (tok_i >= 1 && tree->tokens.tags[tok_i - 1] == TOKEN_KEYWORD_INLINE)
|
|
tok_i = tok_i - 1;
|
|
bool is_labeled
|
|
= (tok_i >= 2 && tree->tokens.tags[tok_i - 1] == TOKEN_COLON
|
|
&& tree->tokens.tags[tok_i - 2] == TOKEN_IDENTIFIER);
|
|
uint32_t label_token = is_labeled ? tok_i - 2 : UINT32_MAX;
|
|
|
|
// Detect payload/error.
|
|
uint32_t last_cond_tok = lastToken(tree, cond_node);
|
|
uint32_t pipe_tok = last_cond_tok + 2;
|
|
bool has_payload = (pipe_tok < tree->tokens.len
|
|
&& tree->tokens.tags[pipe_tok] == TOKEN_PIPE);
|
|
// Error token detection for while: check for else |err|.
|
|
bool has_error = false;
|
|
if (else_node != 0) {
|
|
uint32_t else_tok = lastToken(tree, body_node) + 1;
|
|
has_error = (else_tok + 1 < tree->tokens.len
|
|
&& tree->tokens.tags[else_tok + 1] == TOKEN_PIPE);
|
|
}
|
|
if (has_error || has_payload)
|
|
(void)rlExpr(ag, cond_node, block, RL_RI_NONE);
|
|
else
|
|
(void)rlExpr(ag, cond_node, block, RL_RI_TYPE_ONLY);
|
|
|
|
RlBlock new_block;
|
|
new_block.parent = block;
|
|
new_block.label_token = label_token;
|
|
new_block.is_loop = true;
|
|
new_block.ri = ri;
|
|
new_block.consumes_res_ptr = false;
|
|
|
|
if (cont_node != 0)
|
|
(void)rlExpr(ag, cont_node, &new_block, RL_RI_NONE);
|
|
(void)rlExpr(ag, body_node, &new_block, RL_RI_NONE);
|
|
bool else_consumes = false;
|
|
if (else_node != 0)
|
|
else_consumes = rlExpr(ag, else_node, block, ri);
|
|
if (new_block.consumes_res_ptr || else_consumes) {
|
|
nodesNeedRlAdd(ag, node);
|
|
return true;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
// for (AstRlAnnotate.zig:421-454).
|
|
case AST_NODE_FOR_SIMPLE:
|
|
case AST_NODE_FOR: {
|
|
uint32_t input_buf[16];
|
|
const uint32_t* inputs = NULL;
|
|
uint32_t num_inputs = 0;
|
|
uint32_t body_node = 0;
|
|
uint32_t else_node = 0;
|
|
|
|
if (tag == AST_NODE_FOR_SIMPLE) {
|
|
input_buf[0] = nd.lhs;
|
|
inputs = input_buf;
|
|
num_inputs = 1;
|
|
body_node = nd.rhs;
|
|
} else {
|
|
AstFor for_data;
|
|
memcpy(&for_data, &nd.rhs, sizeof(AstFor));
|
|
num_inputs = for_data.inputs;
|
|
if (num_inputs > 16)
|
|
num_inputs = 16;
|
|
for (uint32_t i = 0; i < num_inputs; i++)
|
|
input_buf[i] = tree->extra_data.arr[nd.lhs + i];
|
|
inputs = input_buf;
|
|
body_node = tree->extra_data.arr[nd.lhs + num_inputs];
|
|
if (for_data.has_else)
|
|
else_node = tree->extra_data.arr[nd.lhs + num_inputs + 1];
|
|
}
|
|
|
|
uint32_t main_tok = tree->nodes.main_tokens[node];
|
|
uint32_t for_tok_i = main_tok;
|
|
if (for_tok_i >= 1
|
|
&& tree->tokens.tags[for_tok_i - 1] == TOKEN_KEYWORD_INLINE)
|
|
for_tok_i = for_tok_i - 1;
|
|
bool is_labeled = (for_tok_i >= 2
|
|
&& tree->tokens.tags[for_tok_i - 1] == TOKEN_COLON
|
|
&& tree->tokens.tags[for_tok_i - 2] == TOKEN_IDENTIFIER);
|
|
uint32_t label_token = is_labeled ? for_tok_i - 2 : UINT32_MAX;
|
|
|
|
for (uint32_t i = 0; i < num_inputs; i++) {
|
|
uint32_t input = inputs[i];
|
|
if (tree->nodes.tags[input] == AST_NODE_FOR_RANGE) {
|
|
AstData range_nd = tree->nodes.datas[input];
|
|
(void)rlExpr(ag, range_nd.lhs, block, RL_RI_TYPE_ONLY);
|
|
if (range_nd.rhs != 0)
|
|
(void)rlExpr(ag, range_nd.rhs, block, RL_RI_TYPE_ONLY);
|
|
} else {
|
|
(void)rlExpr(ag, input, block, RL_RI_NONE);
|
|
}
|
|
}
|
|
|
|
RlBlock new_block;
|
|
new_block.parent = block;
|
|
new_block.label_token = label_token;
|
|
new_block.is_loop = true;
|
|
new_block.ri = ri;
|
|
new_block.consumes_res_ptr = false;
|
|
|
|
(void)rlExpr(ag, body_node, &new_block, RL_RI_NONE);
|
|
bool else_consumes = false;
|
|
if (else_node != 0)
|
|
else_consumes = rlExpr(ag, else_node, block, ri);
|
|
if (new_block.consumes_res_ptr || else_consumes) {
|
|
nodesNeedRlAdd(ag, node);
|
|
return true;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
// slice (AstRlAnnotate.zig:456-480).
|
|
case AST_NODE_SLICE_OPEN:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
case AST_NODE_SLICE: {
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
uint32_t start = tree->extra_data.arr[nd.rhs];
|
|
uint32_t end = tree->extra_data.arr[nd.rhs + 1];
|
|
(void)rlExpr(ag, start, block, RL_RI_TYPE_ONLY);
|
|
(void)rlExpr(ag, end, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
}
|
|
case AST_NODE_SLICE_SENTINEL: {
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
AstSliceSentinel ss;
|
|
ss.start = tree->extra_data.arr[nd.rhs];
|
|
ss.end = tree->extra_data.arr[nd.rhs + 1];
|
|
ss.sentinel = tree->extra_data.arr[nd.rhs + 2];
|
|
(void)rlExpr(ag, ss.start, block, RL_RI_TYPE_ONLY);
|
|
if (ss.end != 0)
|
|
(void)rlExpr(ag, ss.end, block, RL_RI_TYPE_ONLY);
|
|
(void)rlExpr(ag, ss.sentinel, block, RL_RI_NONE);
|
|
return false;
|
|
}
|
|
|
|
// deref (AstRlAnnotate.zig:481-484).
|
|
case AST_NODE_DEREF:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
return false;
|
|
|
|
// address_of (AstRlAnnotate.zig:485-488).
|
|
case AST_NODE_ADDRESS_OF:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
return false;
|
|
|
|
// optional_type (AstRlAnnotate.zig:489-492).
|
|
case AST_NODE_OPTIONAL_TYPE:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
|
|
// try, nosuspend (AstRlAnnotate.zig:493-495).
|
|
case AST_NODE_TRY:
|
|
case AST_NODE_NOSUSPEND:
|
|
return rlExpr(ag, nd.lhs, block, ri);
|
|
|
|
// grouped_expression, unwrap_optional (AstRlAnnotate.zig:496-498).
|
|
case AST_NODE_GROUPED_EXPRESSION:
|
|
case AST_NODE_UNWRAP_OPTIONAL:
|
|
return rlExpr(ag, nd.lhs, block, ri);
|
|
|
|
// block (AstRlAnnotate.zig:500-508).
|
|
case AST_NODE_BLOCK_TWO:
|
|
case AST_NODE_BLOCK_TWO_SEMICOLON: {
|
|
uint32_t stmts[2];
|
|
uint32_t count = 0;
|
|
if (nd.lhs != 0)
|
|
stmts[count++] = nd.lhs;
|
|
if (nd.rhs != 0)
|
|
stmts[count++] = nd.rhs;
|
|
return rlBlockExpr(ag, block, ri, node, stmts, count);
|
|
}
|
|
case AST_NODE_BLOCK:
|
|
case AST_NODE_BLOCK_SEMICOLON:
|
|
return rlBlockExpr(ag, block, ri, node, tree->extra_data.arr + nd.lhs,
|
|
nd.rhs - nd.lhs);
|
|
|
|
// anyframe_type (AstRlAnnotate.zig:509-513).
|
|
case AST_NODE_ANYFRAME_TYPE:
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
|
|
// catch/orelse (AstRlAnnotate.zig:514-522).
|
|
case AST_NODE_CATCH:
|
|
case AST_NODE_ORELSE: {
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
bool rhs_consumes = rlExpr(ag, nd.rhs, block, ri);
|
|
if (rhs_consumes)
|
|
nodesNeedRlAdd(ag, node);
|
|
return rhs_consumes;
|
|
}
|
|
|
|
// ptr_type (AstRlAnnotate.zig:524-546).
|
|
case AST_NODE_PTR_TYPE_ALIGNED:
|
|
if (nd.lhs != 0)
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
case AST_NODE_PTR_TYPE_SENTINEL:
|
|
if (nd.lhs != 0)
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
case AST_NODE_PTR_TYPE: {
|
|
const AstPtrType* pt
|
|
= (const AstPtrType*)(tree->extra_data.arr + nd.lhs);
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
|
|
if (pt->sentinel != UINT32_MAX)
|
|
(void)rlExpr(ag, pt->sentinel, block, RL_RI_TYPE_ONLY);
|
|
if (pt->align_node != UINT32_MAX)
|
|
(void)rlExpr(ag, pt->align_node, block, RL_RI_TYPE_ONLY);
|
|
if (pt->addrspace_node != UINT32_MAX)
|
|
(void)rlExpr(ag, pt->addrspace_node, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
}
|
|
case AST_NODE_PTR_TYPE_BIT_RANGE: {
|
|
const AstPtrTypeBitRange* pt
|
|
= (const AstPtrTypeBitRange*)(tree->extra_data.arr + nd.lhs);
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
|
|
if (pt->sentinel != UINT32_MAX)
|
|
(void)rlExpr(ag, pt->sentinel, block, RL_RI_TYPE_ONLY);
|
|
// align_node is always present for bit_range.
|
|
(void)rlExpr(ag, pt->align_node, block, RL_RI_TYPE_ONLY);
|
|
if (pt->addrspace_node != UINT32_MAX)
|
|
(void)rlExpr(ag, pt->addrspace_node, block, RL_RI_TYPE_ONLY);
|
|
(void)rlExpr(ag, pt->bit_range_start, block, RL_RI_TYPE_ONLY);
|
|
(void)rlExpr(ag, pt->bit_range_end, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
}
|
|
|
|
// container_decl (AstRlAnnotate.zig:548-564).
|
|
case AST_NODE_CONTAINER_DECL:
|
|
case AST_NODE_CONTAINER_DECL_TRAILING:
|
|
case AST_NODE_CONTAINER_DECL_ARG:
|
|
case AST_NODE_CONTAINER_DECL_ARG_TRAILING:
|
|
case AST_NODE_CONTAINER_DECL_TWO:
|
|
case AST_NODE_CONTAINER_DECL_TWO_TRAILING:
|
|
case AST_NODE_TAGGED_UNION:
|
|
case AST_NODE_TAGGED_UNION_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG:
|
|
case AST_NODE_TAGGED_UNION_ENUM_TAG_TRAILING:
|
|
case AST_NODE_TAGGED_UNION_TWO:
|
|
case AST_NODE_TAGGED_UNION_TWO_TRAILING:
|
|
rlContainerDecl(ag, block, node);
|
|
return false;
|
|
|
|
// break (AstRlAnnotate.zig:566-596).
|
|
case AST_NODE_BREAK: {
|
|
uint32_t opt_label_tok = nd.lhs; // 0 = no label
|
|
uint32_t rhs_node = nd.rhs; // 0 = void break
|
|
if (rhs_node == 0)
|
|
return false;
|
|
|
|
RlBlock* opt_cur_block = block;
|
|
if (opt_label_tok != 0) {
|
|
// Labeled break: find matching block.
|
|
while (opt_cur_block != NULL) {
|
|
if (opt_cur_block->label_token != UINT32_MAX
|
|
&& rlTokenIdentEqual(
|
|
tree, opt_cur_block->label_token, opt_label_tok))
|
|
break;
|
|
opt_cur_block = opt_cur_block->parent;
|
|
}
|
|
} else {
|
|
// No label: breaking from innermost loop.
|
|
while (opt_cur_block != NULL) {
|
|
if (opt_cur_block->is_loop)
|
|
break;
|
|
opt_cur_block = opt_cur_block->parent;
|
|
}
|
|
}
|
|
|
|
if (opt_cur_block != NULL) {
|
|
bool consumes = rlExpr(ag, rhs_node, block, opt_cur_block->ri);
|
|
if (consumes)
|
|
opt_cur_block->consumes_res_ptr = true;
|
|
} else {
|
|
(void)rlExpr(ag, rhs_node, block, RL_RI_NONE);
|
|
}
|
|
return false;
|
|
}
|
|
|
|
// array_type (AstRlAnnotate.zig:598-611).
|
|
case AST_NODE_ARRAY_TYPE:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
case AST_NODE_ARRAY_TYPE_SENTINEL: {
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
|
|
uint32_t elem_type = tree->extra_data.arr[nd.rhs + 1];
|
|
uint32_t sentinel = tree->extra_data.arr[nd.rhs];
|
|
(void)rlExpr(ag, elem_type, block, RL_RI_TYPE_ONLY);
|
|
(void)rlExpr(ag, sentinel, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
}
|
|
|
|
// array_access (AstRlAnnotate.zig:612-617).
|
|
case AST_NODE_ARRAY_ACCESS:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
|
|
// comptime (AstRlAnnotate.zig:618-623).
|
|
case AST_NODE_COMPTIME:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
return false;
|
|
|
|
// switch (AstRlAnnotate.zig:624-650).
|
|
case AST_NODE_SWITCH:
|
|
case AST_NODE_SWITCH_COMMA: {
|
|
uint32_t cond_node = nd.lhs;
|
|
uint32_t extra_idx = nd.rhs;
|
|
uint32_t cases_start = tree->extra_data.arr[extra_idx];
|
|
uint32_t cases_end = tree->extra_data.arr[extra_idx + 1];
|
|
|
|
(void)rlExpr(ag, cond_node, block, RL_RI_NONE);
|
|
|
|
bool any_consumed = false;
|
|
for (uint32_t ci = cases_start; ci < cases_end; ci++) {
|
|
uint32_t case_node = tree->extra_data.arr[ci];
|
|
AstNodeTag ct = tree->nodes.tags[case_node];
|
|
AstData cd = tree->nodes.datas[case_node];
|
|
|
|
// Process case values.
|
|
if (ct == AST_NODE_SWITCH_CASE_ONE
|
|
|| ct == AST_NODE_SWITCH_CASE_INLINE_ONE) {
|
|
if (cd.lhs != 0) {
|
|
if (tree->nodes.tags[cd.lhs] == AST_NODE_SWITCH_RANGE) {
|
|
AstData rd = tree->nodes.datas[cd.lhs];
|
|
(void)rlExpr(ag, rd.lhs, block, RL_RI_NONE);
|
|
(void)rlExpr(ag, rd.rhs, block, RL_RI_NONE);
|
|
} else {
|
|
(void)rlExpr(ag, cd.lhs, block, RL_RI_NONE);
|
|
}
|
|
}
|
|
} else {
|
|
// SWITCH_CASE / SWITCH_CASE_INLINE: SubRange[lhs]
|
|
uint32_t items_start = tree->extra_data.arr[cd.lhs];
|
|
uint32_t items_end = tree->extra_data.arr[cd.lhs + 1];
|
|
for (uint32_t ii = items_start; ii < items_end; ii++) {
|
|
uint32_t item = tree->extra_data.arr[ii];
|
|
if (tree->nodes.tags[item] == AST_NODE_SWITCH_RANGE) {
|
|
AstData rd = tree->nodes.datas[item];
|
|
(void)rlExpr(ag, rd.lhs, block, RL_RI_NONE);
|
|
(void)rlExpr(ag, rd.rhs, block, RL_RI_NONE);
|
|
} else {
|
|
(void)rlExpr(ag, item, block, RL_RI_NONE);
|
|
}
|
|
}
|
|
}
|
|
// Process case target expr.
|
|
if (rlExpr(ag, cd.rhs, block, ri))
|
|
any_consumed = true;
|
|
}
|
|
if (any_consumed)
|
|
nodesNeedRlAdd(ag, node);
|
|
return any_consumed;
|
|
}
|
|
|
|
// suspend (AstRlAnnotate.zig:651-654).
|
|
case AST_NODE_SUSPEND:
|
|
if (nd.lhs != 0)
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
return false;
|
|
|
|
// resume (AstRlAnnotate.zig:655-658).
|
|
case AST_NODE_RESUME:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
return false;
|
|
|
|
// array_init (AstRlAnnotate.zig:660-695).
|
|
case AST_NODE_ARRAY_INIT_ONE:
|
|
case AST_NODE_ARRAY_INIT_ONE_COMMA:
|
|
case AST_NODE_ARRAY_INIT_DOT_TWO:
|
|
case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA:
|
|
case AST_NODE_ARRAY_INIT_DOT:
|
|
case AST_NODE_ARRAY_INIT_DOT_COMMA:
|
|
case AST_NODE_ARRAY_INIT:
|
|
case AST_NODE_ARRAY_INIT_COMMA: {
|
|
// Extract type_expr and elements.
|
|
uint32_t type_expr = 0;
|
|
uint32_t elem_buf[2];
|
|
const uint32_t* elems = NULL;
|
|
uint32_t nelem = 0;
|
|
switch (tag) {
|
|
case AST_NODE_ARRAY_INIT_ONE:
|
|
case AST_NODE_ARRAY_INIT_ONE_COMMA:
|
|
type_expr = nd.lhs;
|
|
if (nd.rhs != 0) {
|
|
elem_buf[0] = nd.rhs;
|
|
elems = elem_buf;
|
|
nelem = 1;
|
|
}
|
|
break;
|
|
case AST_NODE_ARRAY_INIT_DOT_TWO:
|
|
case AST_NODE_ARRAY_INIT_DOT_TWO_COMMA: {
|
|
uint32_t idx = 0;
|
|
if (nd.lhs != 0)
|
|
elem_buf[idx++] = nd.lhs;
|
|
if (nd.rhs != 0)
|
|
elem_buf[idx++] = nd.rhs;
|
|
elems = elem_buf;
|
|
nelem = idx;
|
|
break;
|
|
}
|
|
case AST_NODE_ARRAY_INIT_DOT:
|
|
case AST_NODE_ARRAY_INIT_DOT_COMMA:
|
|
elems = tree->extra_data.arr + nd.lhs;
|
|
nelem = nd.rhs - nd.lhs;
|
|
break;
|
|
case AST_NODE_ARRAY_INIT:
|
|
case AST_NODE_ARRAY_INIT_COMMA: {
|
|
type_expr = nd.lhs;
|
|
uint32_t start = tree->extra_data.arr[nd.rhs];
|
|
uint32_t end = tree->extra_data.arr[nd.rhs + 1];
|
|
elems = tree->extra_data.arr + start;
|
|
nelem = end - start;
|
|
break;
|
|
}
|
|
default:
|
|
break;
|
|
}
|
|
if (type_expr != 0) {
|
|
(void)rlExpr(ag, type_expr, block, RL_RI_NONE);
|
|
for (uint32_t i = 0; i < nelem; i++)
|
|
(void)rlExpr(ag, elems[i], block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
}
|
|
if (ri.have_type) {
|
|
for (uint32_t i = 0; i < nelem; i++)
|
|
(void)rlExpr(ag, elems[i], block, ri);
|
|
return ri.have_ptr;
|
|
} else {
|
|
for (uint32_t i = 0; i < nelem; i++)
|
|
(void)rlExpr(ag, elems[i], block, RL_RI_NONE);
|
|
return false;
|
|
}
|
|
}
|
|
|
|
// struct_init (AstRlAnnotate.zig:697-732).
|
|
case AST_NODE_STRUCT_INIT_ONE:
|
|
case AST_NODE_STRUCT_INIT_ONE_COMMA:
|
|
case AST_NODE_STRUCT_INIT_DOT_TWO:
|
|
case AST_NODE_STRUCT_INIT_DOT_TWO_COMMA:
|
|
case AST_NODE_STRUCT_INIT_DOT:
|
|
case AST_NODE_STRUCT_INIT_DOT_COMMA:
|
|
case AST_NODE_STRUCT_INIT:
|
|
case AST_NODE_STRUCT_INIT_COMMA: {
|
|
uint32_t type_expr = 0;
|
|
uint32_t field_buf[2];
|
|
const uint32_t* fields = NULL;
|
|
uint32_t nfields = 0;
|
|
switch (tag) {
|
|
case AST_NODE_STRUCT_INIT_ONE:
|
|
case AST_NODE_STRUCT_INIT_ONE_COMMA:
|
|
type_expr = nd.lhs;
|
|
if (nd.rhs != 0) {
|
|
field_buf[0] = nd.rhs;
|
|
fields = field_buf;
|
|
nfields = 1;
|
|
}
|
|
break;
|
|
case AST_NODE_STRUCT_INIT_DOT_TWO:
|
|
case AST_NODE_STRUCT_INIT_DOT_TWO_COMMA: {
|
|
uint32_t idx = 0;
|
|
if (nd.lhs != 0)
|
|
field_buf[idx++] = nd.lhs;
|
|
if (nd.rhs != 0)
|
|
field_buf[idx++] = nd.rhs;
|
|
fields = field_buf;
|
|
nfields = idx;
|
|
break;
|
|
}
|
|
case AST_NODE_STRUCT_INIT_DOT:
|
|
case AST_NODE_STRUCT_INIT_DOT_COMMA:
|
|
fields = tree->extra_data.arr + nd.lhs;
|
|
nfields = nd.rhs - nd.lhs;
|
|
break;
|
|
case AST_NODE_STRUCT_INIT:
|
|
case AST_NODE_STRUCT_INIT_COMMA: {
|
|
type_expr = nd.lhs;
|
|
uint32_t start = tree->extra_data.arr[nd.rhs];
|
|
uint32_t end = tree->extra_data.arr[nd.rhs + 1];
|
|
fields = tree->extra_data.arr + start;
|
|
nfields = end - start;
|
|
break;
|
|
}
|
|
default:
|
|
break;
|
|
}
|
|
if (type_expr != 0) {
|
|
(void)rlExpr(ag, type_expr, block, RL_RI_NONE);
|
|
for (uint32_t i = 0; i < nfields; i++)
|
|
(void)rlExpr(ag, fields[i], block, RL_RI_TYPE_ONLY);
|
|
return false;
|
|
}
|
|
if (ri.have_type) {
|
|
for (uint32_t i = 0; i < nfields; i++)
|
|
(void)rlExpr(ag, fields[i], block, ri);
|
|
return ri.have_ptr;
|
|
} else {
|
|
for (uint32_t i = 0; i < nfields; i++)
|
|
(void)rlExpr(ag, fields[i], block, RL_RI_NONE);
|
|
return false;
|
|
}
|
|
}
|
|
|
|
// fn_proto, fn_decl (AstRlAnnotate.zig:734-770).
|
|
case AST_NODE_FN_PROTO_SIMPLE:
|
|
case AST_NODE_FN_PROTO_MULTI:
|
|
case AST_NODE_FN_PROTO_ONE:
|
|
case AST_NODE_FN_PROTO:
|
|
case AST_NODE_FN_DECL: {
|
|
// Extract return type and body.
|
|
uint32_t return_type = 0;
|
|
uint32_t body_node = 0;
|
|
|
|
if (tag == AST_NODE_FN_DECL) {
|
|
body_node = nd.rhs;
|
|
// fn_proto is nd.lhs
|
|
uint32_t proto = nd.lhs;
|
|
AstNodeTag ptag = tree->nodes.tags[proto];
|
|
AstData pnd = tree->nodes.datas[proto];
|
|
if (ptag == AST_NODE_FN_PROTO_SIMPLE) {
|
|
return_type = pnd.rhs;
|
|
if (pnd.lhs != 0)
|
|
(void)rlExpr(ag, pnd.lhs, block, RL_RI_TYPE_ONLY);
|
|
} else if (ptag == AST_NODE_FN_PROTO_MULTI) {
|
|
return_type = pnd.rhs;
|
|
uint32_t ps = tree->extra_data.arr[pnd.lhs];
|
|
uint32_t pe = tree->extra_data.arr[pnd.lhs + 1];
|
|
for (uint32_t i = ps; i < pe; i++)
|
|
(void)rlExpr(
|
|
ag, tree->extra_data.arr[i], block, RL_RI_TYPE_ONLY);
|
|
} else if (ptag == AST_NODE_FN_PROTO_ONE) {
|
|
return_type = pnd.rhs;
|
|
AstFnProtoOne fp;
|
|
fp.param = tree->extra_data.arr[pnd.lhs];
|
|
fp.align_expr = tree->extra_data.arr[pnd.lhs + 1];
|
|
fp.addrspace_expr = tree->extra_data.arr[pnd.lhs + 2];
|
|
fp.section_expr = tree->extra_data.arr[pnd.lhs + 3];
|
|
fp.callconv_expr = tree->extra_data.arr[pnd.lhs + 4];
|
|
if (fp.param != 0)
|
|
(void)rlExpr(ag, fp.param, block, RL_RI_TYPE_ONLY);
|
|
if (fp.align_expr != 0)
|
|
(void)rlExpr(ag, fp.align_expr, block, RL_RI_TYPE_ONLY);
|
|
if (fp.addrspace_expr != 0)
|
|
(void)rlExpr(
|
|
ag, fp.addrspace_expr, block, RL_RI_TYPE_ONLY);
|
|
if (fp.section_expr != 0)
|
|
(void)rlExpr(ag, fp.section_expr, block, RL_RI_TYPE_ONLY);
|
|
if (fp.callconv_expr != 0)
|
|
(void)rlExpr(ag, fp.callconv_expr, block, RL_RI_TYPE_ONLY);
|
|
} else if (ptag == AST_NODE_FN_PROTO) {
|
|
return_type = pnd.rhs;
|
|
AstFnProto fp;
|
|
fp.params_start = tree->extra_data.arr[pnd.lhs];
|
|
fp.params_end = tree->extra_data.arr[pnd.lhs + 1];
|
|
fp.align_expr = tree->extra_data.arr[pnd.lhs + 2];
|
|
fp.addrspace_expr = tree->extra_data.arr[pnd.lhs + 3];
|
|
fp.section_expr = tree->extra_data.arr[pnd.lhs + 4];
|
|
fp.callconv_expr = tree->extra_data.arr[pnd.lhs + 5];
|
|
for (uint32_t i = fp.params_start; i < fp.params_end; i++)
|
|
(void)rlExpr(
|
|
ag, tree->extra_data.arr[i], block, RL_RI_TYPE_ONLY);
|
|
if (fp.align_expr != 0)
|
|
(void)rlExpr(ag, fp.align_expr, block, RL_RI_TYPE_ONLY);
|
|
if (fp.addrspace_expr != 0)
|
|
(void)rlExpr(
|
|
ag, fp.addrspace_expr, block, RL_RI_TYPE_ONLY);
|
|
if (fp.section_expr != 0)
|
|
(void)rlExpr(ag, fp.section_expr, block, RL_RI_TYPE_ONLY);
|
|
if (fp.callconv_expr != 0)
|
|
(void)rlExpr(ag, fp.callconv_expr, block, RL_RI_TYPE_ONLY);
|
|
}
|
|
} else {
|
|
// Standalone fn_proto (no body).
|
|
if (tag == AST_NODE_FN_PROTO_SIMPLE) {
|
|
return_type = nd.rhs;
|
|
if (nd.lhs != 0)
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_TYPE_ONLY);
|
|
} else if (tag == AST_NODE_FN_PROTO_MULTI) {
|
|
return_type = nd.rhs;
|
|
uint32_t ps = tree->extra_data.arr[nd.lhs];
|
|
uint32_t pe = tree->extra_data.arr[nd.lhs + 1];
|
|
for (uint32_t i = ps; i < pe; i++)
|
|
(void)rlExpr(
|
|
ag, tree->extra_data.arr[i], block, RL_RI_TYPE_ONLY);
|
|
} else if (tag == AST_NODE_FN_PROTO_ONE) {
|
|
return_type = nd.rhs;
|
|
AstFnProtoOne fp;
|
|
fp.param = tree->extra_data.arr[nd.lhs];
|
|
fp.align_expr = tree->extra_data.arr[nd.lhs + 1];
|
|
fp.addrspace_expr = tree->extra_data.arr[nd.lhs + 2];
|
|
fp.section_expr = tree->extra_data.arr[nd.lhs + 3];
|
|
fp.callconv_expr = tree->extra_data.arr[nd.lhs + 4];
|
|
if (fp.param != 0)
|
|
(void)rlExpr(ag, fp.param, block, RL_RI_TYPE_ONLY);
|
|
if (fp.align_expr != 0)
|
|
(void)rlExpr(ag, fp.align_expr, block, RL_RI_TYPE_ONLY);
|
|
if (fp.addrspace_expr != 0)
|
|
(void)rlExpr(
|
|
ag, fp.addrspace_expr, block, RL_RI_TYPE_ONLY);
|
|
if (fp.section_expr != 0)
|
|
(void)rlExpr(ag, fp.section_expr, block, RL_RI_TYPE_ONLY);
|
|
if (fp.callconv_expr != 0)
|
|
(void)rlExpr(ag, fp.callconv_expr, block, RL_RI_TYPE_ONLY);
|
|
} else if (tag == AST_NODE_FN_PROTO) {
|
|
return_type = nd.rhs;
|
|
AstFnProto fp;
|
|
fp.params_start = tree->extra_data.arr[nd.lhs];
|
|
fp.params_end = tree->extra_data.arr[nd.lhs + 1];
|
|
fp.align_expr = tree->extra_data.arr[nd.lhs + 2];
|
|
fp.addrspace_expr = tree->extra_data.arr[nd.lhs + 3];
|
|
fp.section_expr = tree->extra_data.arr[nd.lhs + 4];
|
|
fp.callconv_expr = tree->extra_data.arr[nd.lhs + 5];
|
|
for (uint32_t i = fp.params_start; i < fp.params_end; i++)
|
|
(void)rlExpr(
|
|
ag, tree->extra_data.arr[i], block, RL_RI_TYPE_ONLY);
|
|
if (fp.align_expr != 0)
|
|
(void)rlExpr(ag, fp.align_expr, block, RL_RI_TYPE_ONLY);
|
|
if (fp.addrspace_expr != 0)
|
|
(void)rlExpr(
|
|
ag, fp.addrspace_expr, block, RL_RI_TYPE_ONLY);
|
|
if (fp.section_expr != 0)
|
|
(void)rlExpr(ag, fp.section_expr, block, RL_RI_TYPE_ONLY);
|
|
if (fp.callconv_expr != 0)
|
|
(void)rlExpr(ag, fp.callconv_expr, block, RL_RI_TYPE_ONLY);
|
|
}
|
|
}
|
|
|
|
if (return_type != 0)
|
|
(void)rlExpr(ag, return_type, block, RL_RI_TYPE_ONLY);
|
|
if (body_node != 0)
|
|
(void)rlExpr(ag, body_node, block, RL_RI_NONE);
|
|
return false;
|
|
}
|
|
|
|
// Remaining: usingnamespace, await, assign_destructure, async calls.
|
|
case AST_NODE_USINGNAMESPACE:
|
|
return false;
|
|
case AST_NODE_AWAIT:
|
|
(void)rlExpr(ag, nd.lhs, block, RL_RI_NONE);
|
|
return false;
|
|
case AST_NODE_ASSIGN_DESTRUCTURE: {
|
|
uint32_t extra_start = nd.lhs;
|
|
uint32_t variable_count = tree->extra_data.arr[extra_start];
|
|
for (uint32_t i = 0; i < variable_count; i++)
|
|
(void)rlExpr(ag, tree->extra_data.arr[extra_start + 1 + i], block,
|
|
RL_RI_NONE);
|
|
(void)rlExpr(ag, nd.rhs, block, RL_RI_NONE);
|
|
return false;
|
|
}
|
|
case AST_NODE_ASYNC_CALL_ONE:
|
|
case AST_NODE_ASYNC_CALL_ONE_COMMA:
|
|
case AST_NODE_ASYNC_CALL:
|
|
case AST_NODE_ASYNC_CALL_COMMA:
|
|
return false; // async not relevant
|
|
|
|
default:
|
|
return false;
|
|
}
|
|
}
|
|
|
|
// astRlAnnotate (AstRlAnnotate.zig:64-83).
|
|
// Entry point: run the RL annotation pre-pass.
|
|
static void astRlAnnotate(AstGenCtx* ag) {
|
|
const Ast* tree = ag->tree;
|
|
if (tree->has_error)
|
|
return;
|
|
|
|
// Get root container members (same as in astGen).
|
|
AstData root_data = tree->nodes.datas[0];
|
|
uint32_t members_start = root_data.lhs;
|
|
uint32_t members_end = root_data.rhs;
|
|
const uint32_t* members = tree->extra_data.arr + members_start;
|
|
uint32_t members_len = members_end - members_start;
|
|
|
|
for (uint32_t i = 0; i < members_len; i++)
|
|
(void)rlExpr(ag, members[i], NULL, RL_RI_NONE);
|
|
}
|
|
|
|
// --- Public API: astGen (AstGen.zig:144) ---
|
|
|
|
Zir astGen(const Ast* ast) {
|
|
AstGenCtx ag;
|
|
memset(&ag, 0, sizeof(ag));
|
|
ag.tree = ast;
|
|
|
|
// Initial allocations (AstGen.zig:162-172).
|
|
uint32_t nodes_len = ast->nodes.len;
|
|
uint32_t init_cap = nodes_len > 8 ? nodes_len : 8;
|
|
|
|
ag.inst_cap = init_cap;
|
|
ag.inst_tags = ARR_INIT(ZirInstTag, ag.inst_cap);
|
|
ag.inst_datas = ARR_INIT(ZirInstData, ag.inst_cap);
|
|
|
|
ag.extra_cap = init_cap + ZIR_EXTRA_RESERVED_COUNT;
|
|
ag.extra = ARR_INIT(uint32_t, ag.extra_cap);
|
|
|
|
ag.string_bytes_cap = 16;
|
|
ag.string_bytes = ARR_INIT(uint8_t, ag.string_bytes_cap);
|
|
|
|
// String table index 0 is reserved for NullTerminatedString.empty
|
|
// (AstGen.zig:163).
|
|
ag.string_bytes[0] = 0;
|
|
ag.string_bytes_len = 1;
|
|
|
|
// Reserve extra[0..1] (AstGen.zig:170-172).
|
|
ag.extra[ZIR_EXTRA_COMPILE_ERRORS] = 0;
|
|
ag.extra[ZIR_EXTRA_IMPORTS] = 0;
|
|
ag.extra_len = ZIR_EXTRA_RESERVED_COUNT;
|
|
|
|
// Run AstRlAnnotate pre-pass (AstGen.zig:150-151).
|
|
astRlAnnotate(&ag);
|
|
|
|
// Set up root GenZir scope (AstGen.zig:176-185).
|
|
GenZir gen_scope;
|
|
memset(&gen_scope, 0, sizeof(gen_scope));
|
|
gen_scope.base.tag = SCOPE_GEN_ZIR;
|
|
gen_scope.parent = NULL;
|
|
gen_scope.astgen = &ag;
|
|
gen_scope.is_comptime = true;
|
|
gen_scope.decl_node_index = 0; // root
|
|
gen_scope.decl_line = 0;
|
|
gen_scope.break_block = UINT32_MAX;
|
|
gen_scope.any_defer_node = UINT32_MAX;
|
|
|
|
// Get root container members: containerDeclRoot (AstGen.zig:191-195).
|
|
AstData root_data = ast->nodes.datas[0];
|
|
uint32_t members_start = root_data.lhs;
|
|
uint32_t members_end = root_data.rhs;
|
|
const uint32_t* members = ast->extra_data.arr + members_start;
|
|
uint32_t members_len = members_end - members_start;
|
|
|
|
structDeclInner(
|
|
&ag, &gen_scope, 0, members, members_len, 0, 0, 0 /* parent */);
|
|
|
|
// Write imports list (AstGen.zig:227-244).
|
|
writeImports(&ag);
|
|
|
|
// Build output Zir (AstGen.zig:211-239).
|
|
Zir zir;
|
|
zir.inst_len = ag.inst_len;
|
|
zir.inst_cap = ag.inst_cap;
|
|
zir.inst_tags = ag.inst_tags;
|
|
zir.inst_datas = ag.inst_datas;
|
|
zir.extra_len = ag.extra_len;
|
|
zir.extra_cap = ag.extra_cap;
|
|
zir.extra = ag.extra;
|
|
zir.string_bytes_len = ag.string_bytes_len;
|
|
zir.string_bytes_cap = ag.string_bytes_cap;
|
|
zir.string_bytes = ag.string_bytes;
|
|
zir.has_compile_errors = ag.has_compile_errors;
|
|
|
|
free(ag.imports);
|
|
free(ag.scratch_instructions);
|
|
free(ag.scratch_extra);
|
|
free(ag.ref_table_keys);
|
|
free(ag.ref_table_vals);
|
|
free(ag.nodes_need_rl);
|
|
free(ag.string_table);
|
|
|
|
return zir;
|
|
}
|