astgen: fix structDeclInner layout, coercion, fn_proto handling, and error diagnostics

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-14 01:01:21 +00:00
parent d08206471b
commit 6204bb245b
2 changed files with 373 additions and 36 deletions

404
astgen.c
View File

@@ -2331,7 +2331,11 @@ static bool tokenIsUnderscore(const Ast* tree, uint32_t ident_token);
static uint32_t containerDecl( static uint32_t containerDecl(
GenZir* gz, Scope* scope, uint32_t node, uint8_t name_strategy); GenZir* gz, Scope* scope, uint32_t node, uint8_t name_strategy);
static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node, static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
const uint32_t* members, uint32_t members_len, uint8_t name_strategy); const uint32_t* members, uint32_t members_len, uint8_t layout,
uint32_t backing_int_node, uint8_t name_strategy);
static uint32_t tupleDecl(AstGenCtx* ag, GenZir* gz, uint32_t node,
const uint32_t* members, uint32_t members_len, uint8_t layout,
uint32_t backing_int_node);
static uint32_t enumDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node, static uint32_t enumDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
const uint32_t* members, uint32_t members_len, uint32_t arg_node, const uint32_t* members, uint32_t members_len, uint32_t arg_node,
uint8_t name_strategy); uint8_t name_strategy);
@@ -2473,6 +2477,7 @@ static uint32_t tryResolvePrimitiveIdent(GenZir* gz, uint32_t node);
#define COMPTIME_REASON_ADDRSPACE 51 #define COMPTIME_REASON_ADDRSPACE 51
#define COMPTIME_REASON_COMPTIME_KEYWORD 53 #define COMPTIME_REASON_COMPTIME_KEYWORD 53
#define COMPTIME_REASON_SWITCH_ITEM 56 #define COMPTIME_REASON_SWITCH_ITEM 56
#define COMPTIME_REASON_TUPLE_FIELD_DEFAULT_VALUE 57
// Mirrors comptimeExpr2 (AstGen.zig:1982). // Mirrors comptimeExpr2 (AstGen.zig:1982).
// Evaluates a node in a comptime block_comptime scope. // Evaluates a node in a comptime block_comptime scope.
@@ -9628,16 +9633,27 @@ static void testDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts,
} }
// --- fnDecl (AstGen.zig:4067) / fnDeclInner (AstGen.zig:4228) --- // --- fnDecl (AstGen.zig:4067) / fnDeclInner (AstGen.zig:4228) ---
// Handles non-extern function declarations with bodies, including params. // Handles function declarations with bodies (fn_decl) and
// function prototypes without bodies (fn_proto*).
static void fnDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts, static void fnDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts,
uint32_t* decl_idx, uint32_t node) { uint32_t* decl_idx, uint32_t node) {
const Ast* tree = ag->tree; const Ast* tree = ag->tree;
AstNodeTag node_tag = tree->nodes.tags[node];
AstData nd = tree->nodes.datas[node]; AstData nd = tree->nodes.datas[node];
// For fn_decl: data.lhs = fn_proto node, data.rhs = body node. // For fn_decl: data.lhs = fn_proto node, data.rhs = body node.
uint32_t proto_node = nd.lhs; // For fn_proto*: the node itself IS the proto, no body.
uint32_t body_node = nd.rhs; uint32_t proto_node;
uint32_t body_node;
if (node_tag == AST_NODE_FN_DECL) {
proto_node = nd.lhs;
body_node = nd.rhs;
} else {
// fn_proto_simple, fn_proto_multi, fn_proto_one, fn_proto
proto_node = node;
body_node = 0;
}
// Get function name token (main_token of proto + 1 = fn name). // Get function name token (main_token of proto + 1 = fn name).
uint32_t fn_token = tree->nodes.main_tokens[proto_node]; uint32_t fn_token = tree->nodes.main_tokens[proto_node];
@@ -10037,6 +10053,24 @@ static void fnDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts,
} }
gzUnstack(&cc_gz); gzUnstack(&cc_gz);
// --- Body handling ---
// For fn_proto* (no body): extern function, emit type only
// (AstGen.zig:4136-4164).
// For fn_decl (has body): emit function value (AstGen.zig:4197-4201).
uint32_t func_ref;
if (body_node == 0) {
// fn_proto without body: extern function type.
// Upstream emits fnProtoExprInner; we SET_ERROR for now as
// fnProtoExprInner is not yet ported.
// TODO: implement fnProtoExprInner for extern fn support.
SET_ERROR(ag);
free(ret_body);
free(cc_body);
gzUnstack(&decl_gz);
ag->within_fn = prev_within_fn;
return;
}
// --- Body (AstGen.zig:4415-4424) --- // --- Body (AstGen.zig:4415-4424) ---
GenZir body_gz; GenZir body_gz;
memset(&body_gz, 0, sizeof(body_gz)); memset(&body_gz, 0, sizeof(body_gz));
@@ -10125,7 +10159,6 @@ static void fnDecl(AstGenCtx* ag, GenZir* gz, uint32_t* wip_decl_insts,
// 12112-12173). // 12112-12173).
bool need_fancy = cc_ref != ZIR_REF_NONE || is_var_args bool need_fancy = cc_ref != ZIR_REF_NONE || is_var_args
|| noalias_bits != 0 || is_noinline; || noalias_bits != 0 || is_noinline;
uint32_t func_ref;
if (need_fancy) { if (need_fancy) {
func_ref = addFuncFancy(&decl_gz, node, body_node, decl_inst, ret_ref, func_ref = addFuncFancy(&decl_gz, node, body_node, decl_inst, ret_ref,
ret_body, ret_body_len, cc_ref, cc_body, cc_body_len, fn_body, ret_body, ret_body_len, cc_ref, cc_body, cc_body_len, fn_body,
@@ -11033,10 +11066,21 @@ static uint32_t containerDecl(
TokenizerTag kw_tag = tree->tokens.tags[main_token]; TokenizerTag kw_tag = tree->tokens.tags[main_token];
uint32_t decl_inst; uint32_t decl_inst;
switch (kw_tag) { switch (kw_tag) {
case TOKEN_KEYWORD_STRUCT: case TOKEN_KEYWORD_STRUCT: {
decl_inst = structDeclInner( // Extract layout from token before main_token (AstGen.zig:5489-5493).
ag, gz, node, members, members_len, name_strategy); // auto=0, extern=1, packed=2.
uint8_t layout = 0; // auto
if (main_token > 0) {
TokenizerTag prev_tag = tree->tokens.tags[main_token - 1];
if (prev_tag == TOKEN_KEYWORD_PACKED)
layout = 2;
else if (prev_tag == TOKEN_KEYWORD_EXTERN)
layout = 1;
}
decl_inst = structDeclInner(ag, gz, node, members, members_len, layout,
arg_node, name_strategy);
break; break;
}
case TOKEN_KEYWORD_ENUM: case TOKEN_KEYWORD_ENUM:
decl_inst = enumDeclInner( decl_inst = enumDeclInner(
ag, gz, node, members, members_len, arg_node, name_strategy); ag, gz, node, members, members_len, arg_node, name_strategy);
@@ -11044,7 +11088,7 @@ static uint32_t containerDecl(
default: default:
// union/opaque: fall back to struct for now. // union/opaque: fall back to struct for now.
decl_inst = structDeclInner( decl_inst = structDeclInner(
ag, gz, node, members, members_len, name_strategy); ag, gz, node, members, members_len, 0, 0, name_strategy);
break; break;
} }
(void)scope; (void)scope;
@@ -11312,18 +11356,210 @@ static uint32_t enumDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
return decl_inst; return decl_inst;
} }
// --- tupleDecl (AstGen.zig:5192) ---
static uint32_t tupleDecl(AstGenCtx* ag, GenZir* gz, uint32_t node,
const uint32_t* members, uint32_t members_len, uint8_t layout,
uint32_t backing_int_node) {
const Ast* tree = ag->tree;
// layout must be auto for tuples (AstGen.zig:5204-5207).
if (layout != 0) {
SET_ERROR(ag);
return reserveInstructionIndex(ag);
}
// tuples don't support backing int (AstGen.zig:5209-5211).
if (backing_int_node != 0) {
SET_ERROR(ag);
return reserveInstructionIndex(ag);
}
// Build fields_start scratch area: for each field, type ref + init ref.
uint32_t fields_start = ag->extra_len;
// We use extra as scratch temporarily; will finalize below.
// Actually, upstream uses astgen.scratch; we use a temporary buffer.
uint32_t* tuple_scratch = NULL;
uint32_t tuple_scratch_len = 0;
uint32_t tuple_scratch_cap = 0;
for (uint32_t i = 0; i < members_len; i++) {
uint32_t member_node = members[i];
AstNodeTag mtag = tree->nodes.tags[member_node];
// Non-field nodes are errors in tuples (AstGen.zig:5224-5238).
if (mtag != AST_NODE_CONTAINER_FIELD_INIT
&& mtag != AST_NODE_CONTAINER_FIELD_ALIGN
&& mtag != AST_NODE_CONTAINER_FIELD) {
SET_ERROR(ag);
free(tuple_scratch);
return reserveInstructionIndex(ag);
}
// Extract field info.
uint32_t main_token = tree->nodes.main_tokens[member_node];
AstData nd = tree->nodes.datas[member_node];
uint32_t type_node = nd.lhs;
uint32_t align_node = 0;
uint32_t value_node = 0;
bool has_comptime_token = false;
switch (mtag) {
case AST_NODE_CONTAINER_FIELD_INIT:
value_node = nd.rhs;
break;
case AST_NODE_CONTAINER_FIELD_ALIGN:
align_node = nd.rhs;
break;
case AST_NODE_CONTAINER_FIELD:
if (nd.rhs != 0) {
align_node = tree->extra_data.arr[nd.rhs];
value_node = tree->extra_data.arr[nd.rhs + 1];
}
break;
default:
break;
}
if (main_token > 0
&& tree->tokens.tags[main_token - 1] == TOKEN_KEYWORD_COMPTIME) {
has_comptime_token = true;
}
// Check tuple_like: must be tuple-like (AstGen.zig:5240-5241).
bool tuple_like = tree->tokens.tags[main_token] != TOKEN_IDENTIFIER
|| tree->tokens.tags[main_token + 1] != TOKEN_COLON;
if (!tuple_like) {
// Named field in tuple: error (AstGen.zig:5241).
SET_ERROR(ag);
free(tuple_scratch);
return reserveInstructionIndex(ag);
}
// Tuple fields cannot have alignment (AstGen.zig:5244-5246).
if (align_node != 0) {
SET_ERROR(ag);
free(tuple_scratch);
return reserveInstructionIndex(ag);
}
// Non-comptime tuple field with default init: error
// (AstGen.zig:5248-5250).
if (value_node != 0 && !has_comptime_token) {
SET_ERROR(ag);
free(tuple_scratch);
return reserveInstructionIndex(ag);
}
// Comptime field without default init: error
// (AstGen.zig:5252-5254).
if (value_node == 0 && has_comptime_token) {
SET_ERROR(ag);
free(tuple_scratch);
return reserveInstructionIndex(ag);
}
// Type expression (AstGen.zig:5256).
uint32_t field_type_ref = typeExpr(gz, &gz->base, type_node);
// Grow scratch buffer.
if (tuple_scratch_len + 2 > tuple_scratch_cap) {
uint32_t new_cap
= tuple_scratch_cap == 0 ? 16 : tuple_scratch_cap * 2;
tuple_scratch = realloc(tuple_scratch, new_cap * sizeof(uint32_t));
if (!tuple_scratch)
exit(1);
tuple_scratch_cap = new_cap;
}
tuple_scratch[tuple_scratch_len++] = field_type_ref;
// Default init (AstGen.zig:5259-5264).
if (value_node != 0) {
ResultLoc init_rl = { .tag = RL_COERCED_TY,
.data = field_type_ref,
.src_node = 0,
.ctx = RI_CTX_NONE };
uint32_t field_init_ref = comptimeExpr(gz, &gz->base, init_rl,
value_node, COMPTIME_REASON_TUPLE_FIELD_DEFAULT_VALUE);
tuple_scratch[tuple_scratch_len++] = field_init_ref;
} else {
tuple_scratch[tuple_scratch_len++] = ZIR_REF_NONE;
}
}
if (members_len > 65535) {
SET_ERROR(ag);
free(tuple_scratch);
return reserveInstructionIndex(ag);
}
uint16_t fields_len = (uint16_t)members_len;
// Write TupleDecl payload (AstGen.zig:5274-5286).
(void)fields_start;
ensureExtraCapacity(ag, 1 + tuple_scratch_len);
uint32_t payload_index = ag->extra_len;
// src_node as node offset relative to gz->decl_node_index.
ag->extra[ag->extra_len++]
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
for (uint32_t i = 0; i < tuple_scratch_len; i++)
ag->extra[ag->extra_len++] = tuple_scratch[i];
free(tuple_scratch);
// Emit extended instruction (AstGen.zig:5279-5286).
ensureInstCapacity(ag, 1);
uint32_t idx = ag->inst_len;
ag->inst_tags[idx] = ZIR_INST_EXTENDED;
ZirInstData data;
memset(&data, 0, sizeof(data));
data.extended.opcode = (uint16_t)ZIR_EXT_TUPLE_DECL;
data.extended.small = fields_len;
data.extended.operand = payload_index;
ag->inst_datas[idx] = data;
ag->inst_len++;
gzAppendInstruction(gz, idx);
return idx;
}
// --- structDeclInner (AstGen.zig:4926) --- // --- structDeclInner (AstGen.zig:4926) ---
static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node, static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
const uint32_t* members, uint32_t members_len, uint8_t name_strategy) { const uint32_t* members, uint32_t members_len, uint8_t layout,
uint32_t backing_int_node, uint8_t name_strategy) {
const Ast* tree = ag->tree; const Ast* tree = ag->tree;
// Tuple detection (AstGen.zig:4939-4950).
// Scan for tuple-like fields; if any found, dispatch to tupleDecl.
for (uint32_t i = 0; i < members_len; i++) {
uint32_t member_node = members[i];
AstNodeTag mtag = tree->nodes.tags[member_node];
if (mtag != AST_NODE_CONTAINER_FIELD_INIT
&& mtag != AST_NODE_CONTAINER_FIELD_ALIGN
&& mtag != AST_NODE_CONTAINER_FIELD)
continue;
uint32_t main_token = tree->nodes.main_tokens[member_node];
bool tuple_like = tree->tokens.tags[main_token] != TOKEN_IDENTIFIER
|| tree->tokens.tags[main_token + 1] != TOKEN_COLON;
if (tuple_like) {
if (node == 0) {
// Root node: file cannot be a tuple
// (AstGen.zig:4946).
SET_ERROR(ag);
return reserveInstructionIndex(ag);
}
return tupleDecl(
ag, gz, node, members, members_len, layout, backing_int_node);
}
}
uint32_t decl_inst = reserveInstructionIndex(ag); uint32_t decl_inst = reserveInstructionIndex(ag);
gzAppendInstruction(gz, decl_inst); gzAppendInstruction(gz, decl_inst);
// Fast path: no members, no backing int (AstGen.zig:4954-4970). // Fast path: no members, no backing int (AstGen.zig:4954-4970).
if (members_len == 0) { if (members_len == 0 && backing_int_node == 0) {
StructDeclSmall small; StructDeclSmall small;
memset(&small, 0, sizeof(small)); memset(&small, 0, sizeof(small));
small.layout = layout;
small.name_strategy = name_strategy; small.name_strategy = name_strategy;
setStruct(ag, decl_inst, node, small, 0, 0, 0); setStruct(ag, decl_inst, node, small, 0, 0, 0);
return decl_inst; return decl_inst;
@@ -11338,18 +11574,47 @@ static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
WipMembers wm = wipMembersInit(decl_count, field_count); WipMembers wm = wipMembersInit(decl_count, field_count);
// Set up block_scope for field type/align/init expressions. // Set up block_scope for field type/align/init expressions.
// (AstGen.zig:4983-4992) // (AstGen.zig:4986-4994)
GenZir block_scope; GenZir block_scope;
memset(&block_scope, 0, sizeof(block_scope)); memset(&block_scope, 0, sizeof(block_scope));
block_scope.base.tag = SCOPE_GEN_ZIR; block_scope.base.tag = SCOPE_GEN_ZIR;
block_scope.parent = NULL; block_scope.parent = NULL;
block_scope.astgen = ag; block_scope.astgen = ag;
block_scope.decl_node_index = node; block_scope.decl_node_index = node;
block_scope.decl_line = ag->source_line; block_scope.decl_line = gz->decl_line; // Fix #7: use gz->decl_line
block_scope.is_comptime = true; block_scope.is_comptime = true;
block_scope.instructions_top = ag->scratch_inst_len; block_scope.instructions_top = ag->scratch_inst_len;
block_scope.any_defer_node = UINT32_MAX; block_scope.any_defer_node = UINT32_MAX;
// Handle backing_int_node for packed structs (AstGen.zig:5000-5024).
// We store the raw body instructions and apply fixups at the final append.
uint32_t backing_int_body_raw_len = 0;
uint32_t backing_int_ref = ZIR_REF_NONE;
uint32_t* backing_int_body_raw = NULL;
if (backing_int_node != 0) {
if (layout != 2) { // not packed
SET_ERROR(ag); // non-packed struct with backing int
} else {
backing_int_ref
= typeExpr(&block_scope, &block_scope.base, backing_int_node);
if (gzInstructionsLen(&block_scope) > 0) {
if (!endsWithNoReturn(&block_scope)) {
makeBreakInline(&block_scope, decl_inst, backing_int_ref,
AST_NODE_OFFSET_NONE);
}
backing_int_body_raw_len = gzInstructionsLen(&block_scope);
const uint32_t* body = gzInstructionsSlice(&block_scope);
backing_int_body_raw
= malloc(backing_int_body_raw_len * sizeof(uint32_t));
if (!backing_int_body_raw)
exit(1);
memcpy(backing_int_body_raw, body,
backing_int_body_raw_len * sizeof(uint32_t));
ag->scratch_inst_len = block_scope.instructions_top;
}
}
}
bool known_non_opv = false; bool known_non_opv = false;
bool known_comptime_only = false; bool known_comptime_only = false;
bool any_comptime_fields = false; bool any_comptime_fields = false;
@@ -11373,6 +11638,13 @@ static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
case AST_NODE_FN_DECL: case AST_NODE_FN_DECL:
fnDecl(ag, gz, wm.payload, &wm.decl_index, member_node); fnDecl(ag, gz, wm.payload, &wm.decl_index, member_node);
break; break;
// fn_proto* dispatch (AstGen.zig:5809-5813, issue #9).
case AST_NODE_FN_PROTO_SIMPLE:
case AST_NODE_FN_PROTO_MULTI:
case AST_NODE_FN_PROTO_ONE:
case AST_NODE_FN_PROTO:
fnDecl(ag, gz, wm.payload, &wm.decl_index, member_node);
break;
case AST_NODE_USINGNAMESPACE: case AST_NODE_USINGNAMESPACE:
case AST_NODE_GLOBAL_VAR_DECL: case AST_NODE_GLOBAL_VAR_DECL:
case AST_NODE_LOCAL_VAR_DECL: case AST_NODE_LOCAL_VAR_DECL:
@@ -11418,34 +11690,44 @@ static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
has_comptime_token = true; has_comptime_token = true;
} }
// Field name (AstGen.zig:5080). // Field name (AstGen.zig:5068).
// convertToNonTupleLike: for struct fields, if type_expr is
// an identifier node, it's actually a named field with the
// identifier as the name (AstGen.zig:5069-5070).
uint32_t field_name = identAsString(ag, main_token); uint32_t field_name = identAsString(ag, main_token);
wipMembersAppendToField(&wm, field_name); wipMembersAppendToField(&wm, field_name);
// Type expression (AstGen.zig:5089-5109). // Type expression: struct field missing type is an error
// (AstGen.zig:5073-5075, issue #12).
if (type_node == 0) {
SET_ERROR(ag);
break;
}
bool have_type_body = false; bool have_type_body = false;
uint32_t field_type = 0; uint32_t field_type = 0;
if (type_node != 0) { field_type = typeExpr(&block_scope, &block_scope.base, type_node);
field_type have_type_body = (gzInstructionsLen(&block_scope) > 0);
= typeExpr(&block_scope, &block_scope.base, type_node);
have_type_body = (gzInstructionsLen(&block_scope) > 0);
}
bool have_align = (align_node != 0); bool have_align = (align_node != 0);
bool have_value = (value_node != 0); bool have_value = (value_node != 0);
bool is_comptime = has_comptime_token; bool is_comptime = has_comptime_token;
// Packed/extern struct comptime field error
// (AstGen.zig:5083-5087, issue #15).
if (is_comptime) { if (is_comptime) {
if (layout == 2 || layout == 1) {
// packed or extern struct fields cannot be comptime.
SET_ERROR(ag);
break;
}
any_comptime_fields = true; any_comptime_fields = true;
} else { } else {
// (AstGen.zig:5106-5109) // (AstGen.zig:5089-5093)
if (type_node != 0) { known_non_opv = known_non_opv
known_non_opv = known_non_opv || nodeImpliesMoreThanOnePossibleValue(tree, type_node);
|| nodeImpliesMoreThanOnePossibleValue( known_comptime_only = known_comptime_only
tree, type_node); || nodeImpliesComptimeOnly(tree, type_node);
known_comptime_only = known_comptime_only
|| nodeImpliesComptimeOnly(tree, type_node);
}
} }
bool field_bits[4] bool field_bits[4]
@@ -11473,9 +11755,21 @@ static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
} }
if (have_align) { if (have_align) {
// Packed struct fields cannot have alignment overrides
// (AstGen.zig:5111-5113, issue #15).
if (layout == 2) { // packed
SET_ERROR(ag);
break;
}
any_aligned_fields = true; any_aligned_fields = true;
uint32_t align_ref // Use coerced_align_ri: RL_COERCED_TY with u29_type
= expr(&block_scope, &block_scope.base, align_node); // (AstGen.zig:5115, issue #14).
ResultLoc align_rl = { .tag = RL_COERCED_TY,
.data = ZIR_REF_U29_TYPE,
.src_node = 0,
.ctx = RI_CTX_NONE };
uint32_t align_ref = exprRl(
&block_scope, &block_scope.base, align_rl, align_node);
if (!endsWithNoReturn(&block_scope)) { if (!endsWithNoReturn(&block_scope)) {
makeBreakInline(&block_scope, decl_inst, align_ref, makeBreakInline(&block_scope, decl_inst, align_ref,
AST_NODE_OFFSET_NONE); AST_NODE_OFFSET_NONE);
@@ -11490,8 +11784,20 @@ static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
if (have_value) { if (have_value) {
any_default_inits = true; any_default_inits = true;
uint32_t default_ref // Use coerced_ty with decl_inst when field type is present
= expr(&block_scope, &block_scope.base, value_node); // (AstGen.zig:5132, issue #11).
ResultLoc value_rl;
if (field_type == 0) {
value_rl = RL_NONE_VAL;
} else {
uint32_t dref = decl_inst + ZIR_REF_START_INDEX;
value_rl = (ResultLoc) { .tag = RL_COERCED_TY,
.data = dref,
.src_node = 0,
.ctx = RI_CTX_NONE };
}
uint32_t default_ref = exprRl(
&block_scope, &block_scope.base, value_rl, value_node);
if (!endsWithNoReturn(&block_scope)) { if (!endsWithNoReturn(&block_scope)) {
makeBreakInline(&block_scope, decl_inst, default_ref, makeBreakInline(&block_scope, decl_inst, default_ref,
AST_NODE_OFFSET_NONE); AST_NODE_OFFSET_NONE);
@@ -11502,6 +11808,10 @@ static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
wipMembersBodiesAppendWithFixups(&wm, ag, body, raw_len); wipMembersBodiesAppendWithFixups(&wm, ag, body, raw_len);
wipMembersAppendToField(&wm, body_len); wipMembersAppendToField(&wm, body_len);
ag->scratch_inst_len = block_scope.instructions_top; ag->scratch_inst_len = block_scope.instructions_top;
} else if (has_comptime_token) {
// Comptime field without default init: error
// (AstGen.zig:5144-5145, issue #13).
SET_ERROR(ag);
} }
break; break;
} }
@@ -11518,22 +11828,43 @@ static uint32_t structDeclInner(AstGenCtx* ag, GenZir* gz, uint32_t node,
memset(&small, 0, sizeof(small)); memset(&small, 0, sizeof(small));
small.has_decls_len = (decl_count > 0); small.has_decls_len = (decl_count > 0);
small.has_fields_len = (field_count > 0); small.has_fields_len = (field_count > 0);
small.has_backing_int = (backing_int_ref != ZIR_REF_NONE);
small.known_non_opv = known_non_opv; small.known_non_opv = known_non_opv;
small.known_comptime_only = known_comptime_only; small.known_comptime_only = known_comptime_only;
small.any_comptime_fields = any_comptime_fields; small.any_comptime_fields = any_comptime_fields;
small.any_default_inits = any_default_inits; small.any_default_inits = any_default_inits;
small.any_aligned_fields = any_aligned_fields; small.any_aligned_fields = any_aligned_fields;
small.layout = layout;
small.name_strategy = name_strategy; small.name_strategy = name_strategy;
setStruct(ag, decl_inst, node, small, 0, field_count, decl_count); setStruct(ag, decl_inst, node, small, 0, field_count, decl_count);
// Append: captures (none), backing_int (none), decls, fields, bodies // Append: captures (none), backing_int, decls, fields, bodies
// (AstGen.zig:5176-5189). // (AstGen.zig:5172-5186).
uint32_t decls_len; uint32_t decls_len;
const uint32_t* decls_slice = wipMembersDeclsSlice(&wm, &decls_len); const uint32_t* decls_slice = wipMembersDeclsSlice(&wm, &decls_len);
uint32_t fields_len; uint32_t fields_len;
const uint32_t* fields_slice = wipMembersFieldsSlice(&wm, &fields_len); const uint32_t* fields_slice = wipMembersFieldsSlice(&wm, &fields_len);
ensureExtraCapacity(ag, decls_len + fields_len + wm.bodies_len); // Compute backing_int_body_len (with fixups) for capacity estimation.
uint32_t backing_int_body_len = 0;
if (backing_int_body_raw_len > 0) {
backing_int_body_len = countBodyLenAfterFixups(
ag, backing_int_body_raw, backing_int_body_raw_len);
}
ensureExtraCapacity(ag,
(backing_int_ref != ZIR_REF_NONE ? backing_int_body_len + 2 : 0)
+ decls_len + fields_len + wm.bodies_len);
// backing_int (AstGen.zig:5176-5183).
if (backing_int_ref != ZIR_REF_NONE) {
ag->extra[ag->extra_len++] = backing_int_body_len;
if (backing_int_body_len == 0) {
ag->extra[ag->extra_len++] = backing_int_ref;
} else {
for (uint32_t j = 0; j < backing_int_body_raw_len; j++)
appendPossiblyRefdBodyInst(ag, backing_int_body_raw[j]);
}
}
free(backing_int_body_raw);
for (uint32_t i = 0; i < decls_len; i++) for (uint32_t i = 0; i < decls_len; i++)
ag->extra[ag->extra_len++] = decls_slice[i]; ag->extra[ag->extra_len++] = decls_slice[i];
for (uint32_t i = 0; i < fields_len; i++) for (uint32_t i = 0; i < fields_len; i++)
@@ -12807,7 +13138,8 @@ Zir astGen(const Ast* ast) {
const uint32_t* members = ast->extra_data.arr + members_start; const uint32_t* members = ast->extra_data.arr + members_start;
uint32_t members_len = members_end - members_start; uint32_t members_len = members_end - members_start;
structDeclInner(&ag, &gen_scope, 0, members, members_len, 0 /* parent */); structDeclInner(
&ag, &gen_scope, 0, members, members_len, 0, 0, 0 /* parent */);
// Write imports list (AstGen.zig:227-244). // Write imports list (AstGen.zig:227-244).
writeImports(&ag); writeImports(&ag);

View File

@@ -325,6 +325,11 @@ fn expectEqualData(
.shl_with_overflow, .shl_with_overflow,
.restore_err_ret_index, .restore_err_ret_index,
.branch_hint, .branch_hint,
// Container decl Small packed structs have undefined padding bits.
.struct_decl,
.enum_decl,
.union_decl,
.opaque_decl,
=> true, => true,
else => false, else => false,
}; };