stage2: support nested structs and arrays and sret
* Add AIR instructions: ret_ptr, ret_load
- This allows Sema to be blissfully unaware of the backend's decision
to implement by-val/by-ref semantics for struct/union/array types.
Backends can lower these simply as alloc, load, ret instructions,
or they can take advantage of them to use a result pointer.
* Add AIR instruction: array_elem_val
- Allows for better codegen for `Sema.elemVal`.
* Implement calculation of ABI alignment and ABI size for unions.
* Before appending the following AIR instructions to a block,
resolveTypeLayout is called on the type:
- call - return type
- ret - return type
- store_ptr - elem type
* Sema: fix memory leak in `zirArrayInit` and other cleanups to this
function.
* x86_64: implement the full x86_64 C ABI according to the spec
* Type: implement `intInfo` for error sets.
* Type: implement `intTagType` for tagged unions.
The Zig type tag `Fn` is now used exclusively for function bodies.
Function pointers are modeled as `*const T` where `T` is a `Fn` type.
* The `call` AIR instruction now allows a function pointer operand as
well as a function operand.
* Sema now has a coercion from function body to function pointer.
* Function type syntax, e.g. `fn()void`, now returns zig tag type of
Pointer with child Fn, rather than Fn directly.
- I think this should probably be reverted. Will discuss the lang
specs before doing this. Idea being that function pointers would
need to be specified as `*const fn()void` rather than `fn() void`.
LLVM backend:
* Enable calling the panic handler (previously this just
emitted `@breakpoint()` since the backend could not handle the panic
function).
* Implement sret
* Introduce `isByRef` and implement it for structs and arrays. Types
that are `isByRef` are now passed as pointers to functions, and e.g.
`elem_val` will return a pointer instead of doing a load.
* Move the function type creating code from `resolveLlvmFunction` to
`llvmType` where it belongs; now there is only 1 instance of this
logic instead of two.
* Add the `nonnull` attribute to non-optional pointer parameters.
* Fix `resolveGlobalDecl` not using fully-qualified names and not using
the `decl_map`.
* Implement `genTypedValue` for pointer-like optionals.
* Fix memory leak when lowering `block` instruction and OOM occurs.
* Implement volatile checks where relevant.
This commit is contained in:
@@ -785,7 +785,7 @@ pub const Struct = struct {
|
||||
/// The Decl that corresponds to the struct itself.
|
||||
owner_decl: *Decl,
|
||||
/// Set of field names in declaration order.
|
||||
fields: std.StringArrayHashMapUnmanaged(Field),
|
||||
fields: Fields,
|
||||
/// Represents the declarations inside this struct.
|
||||
namespace: Namespace,
|
||||
/// Offset from `owner_decl`, points to the struct AST node.
|
||||
@@ -805,6 +805,8 @@ pub const Struct = struct {
|
||||
/// is necessary to determine whether it has bits at runtime.
|
||||
known_has_bits: bool,
|
||||
|
||||
pub const Fields = std.StringArrayHashMapUnmanaged(Field);
|
||||
|
||||
/// The `Type` and `Value` memory is owned by the arena of the Struct's owner_decl.
|
||||
pub const Field = struct {
|
||||
/// Uses `noreturn` to indicate `anytype`.
|
||||
@@ -935,7 +937,7 @@ pub const Union = struct {
|
||||
/// This will be set to the null type until status is `have_field_types`.
|
||||
tag_ty: Type,
|
||||
/// Set of field names in declaration order.
|
||||
fields: std.StringArrayHashMapUnmanaged(Field),
|
||||
fields: Fields,
|
||||
/// Represents the declarations inside this union.
|
||||
namespace: Namespace,
|
||||
/// Offset from `owner_decl`, points to the union decl AST node.
|
||||
@@ -958,6 +960,8 @@ pub const Union = struct {
|
||||
abi_align: Value,
|
||||
};
|
||||
|
||||
pub const Fields = std.StringArrayHashMapUnmanaged(Field);
|
||||
|
||||
pub fn getFullyQualifiedName(s: *Union, gpa: *Allocator) ![]u8 {
|
||||
return s.owner_decl.getFullyQualifiedName(gpa);
|
||||
}
|
||||
@@ -992,14 +996,18 @@ pub const Union = struct {
|
||||
|
||||
pub fn mostAlignedField(u: Union, target: Target) u32 {
|
||||
assert(u.haveFieldTypes());
|
||||
var most_alignment: u64 = 0;
|
||||
var most_alignment: u32 = 0;
|
||||
var most_index: usize = undefined;
|
||||
for (u.fields.values()) |field, i| {
|
||||
if (!field.ty.hasCodeGenBits()) continue;
|
||||
const field_align = if (field.abi_align.tag() == .abi_align_default)
|
||||
field.ty.abiAlignment(target)
|
||||
else
|
||||
field.abi_align.toUnsignedInt();
|
||||
|
||||
const field_align = a: {
|
||||
if (field.abi_align.tag() == .abi_align_default) {
|
||||
break :a field.ty.abiAlignment(target);
|
||||
} else {
|
||||
break :a @intCast(u32, field.abi_align.toUnsignedInt());
|
||||
}
|
||||
};
|
||||
if (field_align > most_alignment) {
|
||||
most_alignment = field_align;
|
||||
most_index = i;
|
||||
@@ -1007,6 +1015,69 @@ pub const Union = struct {
|
||||
}
|
||||
return @intCast(u32, most_index);
|
||||
}
|
||||
|
||||
pub fn abiAlignment(u: Union, target: Target, have_tag: bool) u32 {
|
||||
var max_align: u32 = 0;
|
||||
if (have_tag) max_align = u.tag_ty.abiAlignment(target);
|
||||
for (u.fields.values()) |field| {
|
||||
if (!field.ty.hasCodeGenBits()) continue;
|
||||
|
||||
const field_align = a: {
|
||||
if (field.abi_align.tag() == .abi_align_default) {
|
||||
break :a field.ty.abiAlignment(target);
|
||||
} else {
|
||||
break :a @intCast(u32, field.abi_align.toUnsignedInt());
|
||||
}
|
||||
};
|
||||
max_align = @maximum(max_align, field_align);
|
||||
}
|
||||
assert(max_align != 0);
|
||||
return max_align;
|
||||
}
|
||||
|
||||
pub fn abiSize(u: Union, target: Target, have_tag: bool) u64 {
|
||||
assert(u.haveFieldTypes());
|
||||
const is_packed = u.layout == .Packed;
|
||||
if (is_packed) @panic("TODO packed unions");
|
||||
|
||||
var payload_size: u64 = 0;
|
||||
var payload_align: u32 = 0;
|
||||
for (u.fields.values()) |field| {
|
||||
if (!field.ty.hasCodeGenBits()) continue;
|
||||
|
||||
const field_align = a: {
|
||||
if (field.abi_align.tag() == .abi_align_default) {
|
||||
break :a field.ty.abiAlignment(target);
|
||||
} else {
|
||||
break :a @intCast(u32, field.abi_align.toUnsignedInt());
|
||||
}
|
||||
};
|
||||
payload_size = @maximum(payload_size, field.ty.abiSize(target));
|
||||
payload_align = @maximum(payload_align, field_align);
|
||||
}
|
||||
if (!have_tag) {
|
||||
return std.mem.alignForwardGeneric(u64, payload_size, payload_align);
|
||||
}
|
||||
// Put the tag before or after the payload depending on which one's
|
||||
// alignment is greater.
|
||||
const tag_size = u.tag_ty.abiSize(target);
|
||||
const tag_align = u.tag_ty.abiAlignment(target);
|
||||
var size: u64 = 0;
|
||||
if (tag_align >= payload_align) {
|
||||
// {Tag, Payload}
|
||||
size += tag_size;
|
||||
size = std.mem.alignForwardGeneric(u64, size, payload_align);
|
||||
size += payload_size;
|
||||
size = std.mem.alignForwardGeneric(u64, size, tag_align);
|
||||
} else {
|
||||
// {Payload, Tag}
|
||||
size += payload_size;
|
||||
size = std.mem.alignForwardGeneric(u64, size, tag_align);
|
||||
size += tag_size;
|
||||
size = std.mem.alignForwardGeneric(u64, size, payload_align);
|
||||
}
|
||||
return size;
|
||||
}
|
||||
};
|
||||
|
||||
/// Some Fn struct memory is owned by the Decl's TypedValue.Managed arena allocator.
|
||||
|
||||
Reference in New Issue
Block a user