astgen: implement unified ptrCastBuiltin with nested cast collapsing

Port the upstream ptrCast() function (AstGen.zig:8969-9087) which handles
nested pointer cast collapsing. All five pointer cast builtins (@ptrCast,
@alignCast, @addrSpaceCast, @constCast, @volatileCast) now route through
a single ptrCastBuiltin() function that:

- Walks inward through nested builtin calls accumulating flags
- Handles @fieldParentPtr nesting (with accumulated outer flags)
- Emits ptr_cast_full, ptr_cast_no_dest, or simple ptr_cast based on
  combined flags and whether a result type is needed

This fixes compile errors in field_parent_ptr.zig and switch.zig where
@alignCast(@fieldParentPtr(...)) needed nested cast support.

Also adds @addrSpaceCast support (previously missing).

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-14 19:02:57 +00:00
parent 95044f9932
commit 9322d3feb0

View File

@@ -3150,6 +3150,171 @@ static uint32_t simpleCBuiltin(GenZir* gz, Scope* scope, ResultLoc rl,
return rvalue(gz, rl, ZIR_REF_VOID_VALUE, node);
}
// FullPtrCastFlags bits (Zir.zig:3461).
#define PTR_CAST_FLAG_PTR_CAST 0x01
#define PTR_CAST_FLAG_ALIGN_CAST 0x02
#define PTR_CAST_FLAG_ADDRSPACE 0x04
#define PTR_CAST_FLAG_CONST_CAST 0x08
#define PTR_CAST_FLAG_VOLATILE_CAST 0x10
// Flags that don't need a result type.
#define PTR_CAST_NO_RESULT_TY_MASK \
(PTR_CAST_FLAG_CONST_CAST | PTR_CAST_FLAG_VOLATILE_CAST)
// Mirrors ptrCast (AstGen.zig:8969-9087).
// Handles nested pointer cast collapsing for @ptrCast, @alignCast,
// @addrSpaceCast, @constCast, @volatileCast, and nested @fieldParentPtr.
static uint32_t ptrCastBuiltin(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t root_node) {
AstGenCtx* ag = gz->astgen;
const Ast* tree = ag->tree;
uint16_t flags = 0;
uint32_t node = root_node;
// Walk inward through nested builtin calls (AstGen.zig:8984-9047).
while (1) {
AstNodeTag ntag = tree->nodes.tags[node];
if (ntag == AST_NODE_BUILTIN_CALL_TWO
|| ntag == AST_NODE_BUILTIN_CALL_TWO_COMMA) {
// ok
} else if (ntag == AST_NODE_GROUPED_EXPRESSION) {
// Handle chaining through parentheses (AstGen.zig:8987-8990).
node = tree->nodes.datas[node].lhs;
continue;
} else {
break;
}
AstData nd = tree->nodes.datas[node];
// Count args: lhs is arg0, rhs is arg1 (0 = absent).
uint32_t arg_count = (nd.lhs != 0 ? 1u : 0u) + (nd.rhs != 0 ? 1u : 0u);
if (arg_count == 0)
break;
// Get builtin name from source.
uint32_t bt = tree->nodes.main_tokens[node];
uint32_t bstart = tree->tokens.starts[bt] + 1; // skip '@'
uint32_t bend = bstart;
while (bend < tree->source_len
&& ((tree->source[bend] >= 'a' && tree->source[bend] <= 'z')
|| (tree->source[bend] >= 'A' && tree->source[bend] <= 'Z')
|| tree->source[bend] == '_'))
bend++;
uint32_t blen = bend - bstart;
if (arg_count == 1) {
// 1-arg builtin: check if it's a pointer cast.
uint16_t flag = 0;
if (blen == 7 && memcmp(tree->source + bstart, "ptrCast", 7) == 0)
flag = PTR_CAST_FLAG_PTR_CAST;
else if (blen == 9
&& memcmp(tree->source + bstart, "alignCast", 9) == 0)
flag = PTR_CAST_FLAG_ALIGN_CAST;
else if (blen == 13
&& memcmp(tree->source + bstart, "addrSpaceCast", 13) == 0)
flag = PTR_CAST_FLAG_ADDRSPACE;
else if (blen == 9
&& memcmp(tree->source + bstart, "constCast", 9) == 0)
flag = PTR_CAST_FLAG_CONST_CAST;
else if (blen == 12
&& memcmp(tree->source + bstart, "volatileCast", 12) == 0)
flag = PTR_CAST_FLAG_VOLATILE_CAST;
else
break;
if (flags & flag) {
// Redundant cast (AstGen.zig:9015-9016).
SET_ERROR(ag);
return ZIR_REF_VOID_VALUE;
}
flags |= flag;
node = nd.lhs; // Advance to inner operand.
} else {
// 2-arg builtin: check for @fieldParentPtr (AstGen.zig:9027-9046).
if (blen != 14
|| memcmp(tree->source + bstart, "fieldParentPtr", 14) != 0)
break;
if (flags & PTR_CAST_FLAG_PTR_CAST)
break;
// Emit field_parent_ptr with accumulated flags.
advanceSourceCursorToMainToken(ag, gz, root_node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t parent_ptr_type = rlResultTypeForCast(gz, rl, root_node);
ResultLoc field_name_rl = { .tag = RL_COERCED_TY,
.data = ZIR_REF_SLICE_CONST_U8_TYPE,
.src_node = 0,
.ctx = RI_CTX_NONE };
uint32_t field_name = comptimeExpr(
gz, scope, field_name_rl, nd.lhs, COMPTIME_REASON_FIELD_NAME);
uint32_t field_ptr = expr(gz, scope, nd.rhs);
emitDbgStmt(gz, saved_line, saved_col);
ensureExtraCapacity(ag, 4);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++]
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
ag->extra[ag->extra_len++] = parent_ptr_type;
ag->extra[ag->extra_len++] = field_name;
ag->extra[ag->extra_len++] = field_ptr;
uint32_t result = addExtendedPayloadSmall(
gz, (uint16_t)ZIR_EXT_FIELD_PARENT_PTR, flags, payload_index);
return rvalue(gz, rl, result, root_node);
}
}
// After loop: flags must be non-zero (AstGen.zig:9051).
// Path A: pure @ptrCast (AstGen.zig:9053-9056).
if (flags == PTR_CAST_FLAG_PTR_CAST) {
advanceSourceCursorToMainToken(ag, gz, root_node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t result_type = rlResultTypeForCast(gz, rl, root_node);
uint32_t operand = expr(gz, scope, node);
emitDbgStmt(gz, saved_line, saved_col);
return rvalue(gz, rl,
addPlNodeBin(
gz, ZIR_INST_PTR_CAST, root_node, result_type, operand),
root_node);
}
// Path B: only const_cast/volatile_cast — no result type needed
// (AstGen.zig:9059-9072).
if ((flags & ~PTR_CAST_NO_RESULT_TY_MASK) == 0) {
advanceSourceCursorToMainToken(ag, gz, root_node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t operand = expr(gz, scope, node);
emitDbgStmt(gz, saved_line, saved_col);
ensureExtraCapacity(ag, 2);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++]
= (uint32_t)((int32_t)root_node - (int32_t)gz->decl_node_index);
ag->extra[ag->extra_len++] = operand;
uint32_t result = addExtendedPayloadSmall(
gz, (uint16_t)ZIR_EXT_PTR_CAST_NO_DEST, flags, payload_index);
return rvalue(gz, rl, result, root_node);
}
// Path C: full cast with result type (AstGen.zig:9077-9086).
advanceSourceCursorToMainToken(ag, gz, root_node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t result_type = rlResultTypeForCast(gz, rl, root_node);
uint32_t operand = expr(gz, scope, node);
emitDbgStmt(gz, saved_line, saved_col);
ensureExtraCapacity(ag, 3);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++]
= (uint32_t)((int32_t)root_node - (int32_t)gz->decl_node_index);
ag->extra[ag->extra_len++] = result_type;
ag->extra[ag->extra_len++] = operand;
uint32_t result = addExtendedPayloadSmall(
gz, (uint16_t)ZIR_EXT_PTR_CAST_FULL, flags, payload_index);
return rvalue(gz, rl, result, root_node);
}
// Mirrors builtinCall (AstGen.zig:9191) dispatch.
static uint32_t builtinCall(
GenZir* gz, Scope* scope, ResultLoc rl, uint32_t node) {
@@ -3244,20 +3409,13 @@ static uint32_t builtinCall(
return rvalue(gz, rl, addPlNodeBin(gz, ZIR_INST_TRUNCATE, node,
result_type, operand), node);
}
// @ptrCast — typeCast pattern (AstGen.zig:9056, 9807-9826).
// TODO: Issue 14 — upstream routes through ptrCast() for nested
// pointer cast collapsing. Currently uses simple typeCast path.
if (name_len == 7 && memcmp(source + name_start, "ptrCast", 7) == 0) {
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t result_type = rlResultTypeForCast(gz, rl, node);
AstData nd = tree->nodes.datas[node];
uint32_t operand = expr(gz, scope, nd.lhs);
emitDbgStmt(gz, saved_line, saved_col);
return rvalue(gz, rl, addPlNodeBin(gz, ZIR_INST_PTR_CAST, node,
result_type, operand), node);
}
// @ptrCast — routes through ptrCastBuiltin (AstGen.zig:9464-9469).
if (name_len == 7 && memcmp(source + name_start, "ptrCast", 7) == 0)
return ptrCastBuiltin(gz, scope, rl, node);
// @addrSpaceCast — routes through ptrCastBuiltin (AstGen.zig:9464-9469).
if (name_len == 13
&& memcmp(source + name_start, "addrSpaceCast", 13) == 0)
return ptrCastBuiltin(gz, scope, rl, node);
// @enumFromInt — typeCast pattern (AstGen.zig:9414, 9807-9826).
if (name_len == 11 && memcmp(source + name_start, "enumFromInt", 11) == 0) {
advanceSourceCursorToMainToken(ag, gz, node);
@@ -3518,69 +3676,18 @@ static uint32_t builtinCall(
gz, (uint16_t)ZIR_EXT_SHL_WITH_OVERFLOW, payload_index);
return rvalue(gz, rl, result, node);
}
// @alignCast — ptrCast family (AstGen.zig:9464-9469, 8969-9087).
// Simplified: standalone @alignCast uses ptr_cast_full with align_cast flag.
// @alignCast — routes through ptrCastBuiltin (AstGen.zig:9464-9469).
if (name_len == 9
&& memcmp(source + name_start, "alignCast", 9) == 0) {
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
uint32_t result_type = rlResultTypeForCast(gz, rl, node);
AstData nd = tree->nodes.datas[node];
uint32_t operand = expr(gz, scope, nd.lhs);
emitDbgStmt(gz, saved_line, saved_col);
// align_cast flag = bit 1 (FullPtrCastFlags: ptr_cast=0, align_cast=1)
uint16_t flags = 0x02;
ensureExtraCapacity(ag, 3);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++]
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
ag->extra[ag->extra_len++] = result_type;
ag->extra[ag->extra_len++] = operand;
uint32_t result = addExtendedPayloadSmall(
gz, (uint16_t)ZIR_EXT_PTR_CAST_FULL, flags, payload_index);
return rvalue(gz, rl, result, node);
}
// @constCast (AstGen.zig:9464-9469, 8969-9087).
&& memcmp(source + name_start, "alignCast", 9) == 0)
return ptrCastBuiltin(gz, scope, rl, node);
// @constCast — routes through ptrCastBuiltin (AstGen.zig:9464-9469).
if (name_len == 9
&& memcmp(source + name_start, "constCast", 9) == 0) {
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
AstData nd = tree->nodes.datas[node];
uint32_t operand = expr(gz, scope, nd.lhs);
emitDbgStmt(gz, saved_line, saved_col);
// const_cast flag = bit 3 (FullPtrCastFlags packed u5)
uint16_t flags = 0x08;
ensureExtraCapacity(ag, 2);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++]
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
ag->extra[ag->extra_len++] = operand;
uint32_t result = addExtendedPayloadSmall(
gz, (uint16_t)ZIR_EXT_PTR_CAST_NO_DEST, flags, payload_index);
return rvalue(gz, rl, result, node);
}
// @volatileCast (AstGen.zig:9464-9469, 8969-9087).
&& memcmp(source + name_start, "constCast", 9) == 0)
return ptrCastBuiltin(gz, scope, rl, node);
// @volatileCast — routes through ptrCastBuiltin (AstGen.zig:9464-9469).
if (name_len == 12
&& memcmp(source + name_start, "volatileCast", 12) == 0) {
advanceSourceCursorToMainToken(ag, gz, node);
uint32_t saved_line = ag->source_line - gz->decl_line;
uint32_t saved_col = ag->source_column;
AstData nd = tree->nodes.datas[node];
uint32_t operand = expr(gz, scope, nd.lhs);
emitDbgStmt(gz, saved_line, saved_col);
// volatile_cast flag = bit 4 (FullPtrCastFlags packed u5)
uint16_t flags = 0x10;
ensureExtraCapacity(ag, 2);
uint32_t payload_index = ag->extra_len;
ag->extra[ag->extra_len++]
= (uint32_t)((int32_t)node - (int32_t)gz->decl_node_index);
ag->extra[ag->extra_len++] = operand;
uint32_t result = addExtendedPayloadSmall(
gz, (uint16_t)ZIR_EXT_PTR_CAST_NO_DEST, flags, payload_index);
return rvalue(gz, rl, result, node);
}
&& memcmp(source + name_start, "volatileCast", 12) == 0)
return ptrCastBuiltin(gz, scope, rl, node);
// @Type (reify) (AstGen.zig:9426-9428, 9747-9781).
if (name_len == 4 && memcmp(source + name_start, "Type", 4) == 0) {
AstData nd = tree->nodes.datas[node];