Merge pull request #9517 from ziglang/generic-functions

stage2 generic functions
This commit is contained in:
Andrew Kelley
2021-08-05 23:32:42 -07:00
committed by GitHub
18 changed files with 1828 additions and 931 deletions

View File

@@ -563,7 +563,7 @@ pub fn HashMap(
return self.unmanaged.getPtrContext(key, self.ctx);
}
pub fn getPtrAdapted(self: Self, key: anytype, ctx: anytype) ?*V {
return self.unmanaged.getPtrAdapted(key, self.ctx);
return self.unmanaged.getPtrAdapted(key, ctx);
}
/// Finds the key and value associated with a key in the map

View File

@@ -2198,6 +2198,9 @@ pub const full = struct {
.type_expr = param_type,
};
}
if (token_tags[it.tok_i] == .comma) {
it.tok_i += 1;
}
if (token_tags[it.tok_i] == .r_paren) {
return null;
}

View File

@@ -42,7 +42,7 @@ const InnerError = error{ OutOfMemory, AnalysisFail };
fn addExtra(astgen: *AstGen, extra: anytype) Allocator.Error!u32 {
const fields = std.meta.fields(@TypeOf(extra));
try astgen.extra.ensureCapacity(astgen.gpa, astgen.extra.items.len + fields.len);
try astgen.extra.ensureUnusedCapacity(astgen.gpa, fields.len);
return addExtraAssumeCapacity(astgen, extra);
}
@@ -195,6 +195,9 @@ pub const ResultLoc = union(enum) {
none_or_ref,
/// The expression will be coerced into this type, but it will be evaluated as an rvalue.
ty: Zir.Inst.Ref,
/// Same as `ty` but it is guaranteed that Sema will additionall perform the coercion,
/// so no `as` instruction needs to be emitted.
coerced_ty: Zir.Inst.Ref,
/// The expression must store its result into this typed pointer. The result instruction
/// from the expression must be ignored.
ptr: Zir.Inst.Ref,
@@ -225,7 +228,7 @@ pub const ResultLoc = union(enum) {
fn strategy(rl: ResultLoc, block_scope: *GenZir) Strategy {
switch (rl) {
// In this branch there will not be any store_to_block_ptr instructions.
.discard, .none, .none_or_ref, .ty, .ref => return .{
.discard, .none, .none_or_ref, .ty, .coerced_ty, .ref => return .{
.tag = .break_operand,
.elide_store_to_block_ptr_instructions = false,
},
@@ -259,13 +262,15 @@ pub const ResultLoc = union(enum) {
pub const align_rl: ResultLoc = .{ .ty = .u16_type };
pub const bool_rl: ResultLoc = .{ .ty = .bool_type };
pub const type_rl: ResultLoc = .{ .ty = .type_type };
pub const coerced_type_rl: ResultLoc = .{ .coerced_ty = .type_type };
fn typeExpr(gz: *GenZir, scope: *Scope, type_node: ast.Node.Index) InnerError!Zir.Inst.Ref {
const prev_force_comptime = gz.force_comptime;
gz.force_comptime = true;
defer gz.force_comptime = prev_force_comptime;
return expr(gz, scope, .{ .ty = .type_type }, type_node);
return expr(gz, scope, coerced_type_rl, type_node);
}
/// Same as `expr` but fails with a compile error if the result type is `noreturn`.
@@ -1046,71 +1051,55 @@ fn fnProtoExpr(
};
assert(!is_extern);
// The AST params array does not contain anytype and ... parameters.
// We must iterate to count how many param types to allocate.
const param_count = blk: {
var count: usize = 0;
var it = fn_proto.iterate(tree.*);
while (it.next()) |param| {
if (param.anytype_ellipsis3) |token| switch (token_tags[token]) {
.ellipsis3 => break,
.keyword_anytype => {},
else => unreachable,
};
count += 1;
}
break :blk count;
};
const param_types = try gpa.alloc(Zir.Inst.Ref, param_count);
defer gpa.free(param_types);
const bits_per_param = 1;
const params_per_u32 = 32 / bits_per_param;
// We only need this if there are greater than params_per_u32 fields.
var bit_bag = ArrayListUnmanaged(u32){};
defer bit_bag.deinit(gpa);
var cur_bit_bag: u32 = 0;
var is_var_args = false;
{
const is_var_args = is_var_args: {
var param_type_i: usize = 0;
var it = fn_proto.iterate(tree.*);
while (it.next()) |param| : (param_type_i += 1) {
if (param_type_i % params_per_u32 == 0 and param_type_i != 0) {
try bit_bag.append(gpa, cur_bit_bag);
cur_bit_bag = 0;
}
const is_comptime = if (param.comptime_noalias) |token|
token_tags[token] == .keyword_comptime
else
false;
cur_bit_bag = (cur_bit_bag >> bits_per_param) |
(@as(u32, @boolToInt(is_comptime)) << 31);
if (param.anytype_ellipsis3) |token| {
const is_anytype = if (param.anytype_ellipsis3) |token| blk: {
switch (token_tags[token]) {
.keyword_anytype => {
param_types[param_type_i] = .none;
continue;
},
.ellipsis3 => {
is_var_args = true;
break;
},
.keyword_anytype => break :blk true,
.ellipsis3 => break :is_var_args true,
else => unreachable,
}
}
const param_type_node = param.type_expr;
assert(param_type_node != 0);
param_types[param_type_i] =
try expr(gz, scope, .{ .ty = .type_type }, param_type_node);
}
assert(param_type_i == param_count);
} else false;
const empty_slot_count = params_per_u32 - (param_type_i % params_per_u32);
if (empty_slot_count < params_per_u32) {
cur_bit_bag >>= @intCast(u5, empty_slot_count * bits_per_param);
const param_name: u32 = if (param.name_token) |name_token| blk: {
if (mem.eql(u8, "_", tree.tokenSlice(name_token)))
break :blk 0;
break :blk try astgen.identAsString(name_token);
} else 0;
if (is_anytype) {
const name_token = param.name_token orelse param.anytype_ellipsis3.?;
const tag: Zir.Inst.Tag = if (is_comptime)
.param_anytype_comptime
else
.param_anytype;
_ = try gz.addStrTok(tag, param_name, name_token);
} else {
const param_type_node = param.type_expr;
assert(param_type_node != 0);
var param_gz = gz.makeSubBlock(scope);
defer param_gz.instructions.deinit(gpa);
const param_type = try expr(&param_gz, scope, coerced_type_rl, param_type_node);
const param_inst_expected = @intCast(u32, astgen.instructions.len + 1);
_ = try param_gz.addBreak(.break_inline, param_inst_expected, param_type);
const main_tokens = tree.nodes.items(.main_token);
const name_token = param.name_token orelse main_tokens[param_type_node];
const tag: Zir.Inst.Tag = if (is_comptime) .param_comptime else .param;
const param_inst = try gz.addParam(tag, name_token, param_name, param_gz.instructions.items);
assert(param_inst_expected == param_inst);
}
}
}
break :is_var_args false;
};
const align_inst: Zir.Inst.Ref = if (fn_proto.ast.align_expr == 0) .none else inst: {
break :inst try expr(gz, scope, align_rl, fn_proto.ast.align_expr);
@@ -1124,15 +1113,13 @@ fn fnProtoExpr(
if (is_inferred_error) {
return astgen.failTok(maybe_bang, "function prototype may not have inferred error set", .{});
}
const return_type_inst = try AstGen.expr(
gz,
scope,
.{ .ty = .type_type },
fn_proto.ast.return_type,
);
var ret_gz = gz.makeSubBlock(scope);
defer ret_gz.instructions.deinit(gpa);
const ret_ty = try expr(&ret_gz, scope, coerced_type_rl, fn_proto.ast.return_type);
const ret_br = try ret_gz.addBreak(.break_inline, 0, ret_ty);
const cc: Zir.Inst.Ref = if (fn_proto.ast.callconv_expr != 0)
try AstGen.expr(
try expr(
gz,
scope,
.{ .ty = .calling_convention_type },
@@ -1143,8 +1130,9 @@ fn fnProtoExpr(
const result = try gz.addFunc(.{
.src_node = fn_proto.ast.proto_node,
.ret_ty = return_type_inst,
.param_types = param_types,
.param_block = 0,
.ret_ty = ret_gz.instructions.items,
.ret_br = ret_br,
.body = &[0]Zir.Inst.Index{},
.cc = cc,
.align_inst = align_inst,
@@ -1153,8 +1141,6 @@ fn fnProtoExpr(
.is_inferred_error = false,
.is_test = false,
.is_extern = false,
.cur_bit_bag = cur_bit_bag,
.bit_bag = bit_bag.items,
});
return rvalue(gz, rl, result, fn_proto.ast.proto_node);
}
@@ -1239,7 +1225,7 @@ fn arrayInitExpr(
return arrayInitExprRlNone(gz, scope, node, array_init.ast.elements, .array_init_anon);
}
},
.ty => |ty_inst| {
.ty, .coerced_ty => |ty_inst| {
if (types.array != .none) {
const result = try arrayInitExprRlTy(gz, scope, node, array_init.ast.elements, types.elem, .array_init);
return rvalue(gz, rl, result, node);
@@ -1408,7 +1394,7 @@ fn structInitExpr(
return structInitExprRlNone(gz, scope, node, struct_init, .struct_init_anon);
}
},
.ty => |ty_inst| {
.ty, .coerced_ty => |ty_inst| {
if (struct_init.ast.type_expr == 0) {
return structInitExprRlTy(gz, scope, node, struct_init, ty_inst, .struct_init);
}
@@ -1447,8 +1433,8 @@ fn structInitExprRlNone(
const init_inst = try gz.addPlNode(tag, node, Zir.Inst.StructInitAnon{
.fields_len = @intCast(u32, fields_list.len),
});
try astgen.extra.ensureCapacity(gpa, astgen.extra.items.len +
fields_list.len * @typeInfo(Zir.Inst.StructInitAnon.Item).Struct.fields.len);
try astgen.extra.ensureUnusedCapacity(gpa, fields_list.len *
@typeInfo(Zir.Inst.StructInitAnon.Item).Struct.fields.len);
for (fields_list) |field| {
_ = gz.astgen.addExtraAssumeCapacity(field);
}
@@ -1520,8 +1506,8 @@ fn structInitExprRlTy(
const init_inst = try gz.addPlNode(tag, node, Zir.Inst.StructInit{
.fields_len = @intCast(u32, fields_list.len),
});
try astgen.extra.ensureCapacity(gpa, astgen.extra.items.len +
fields_list.len * @typeInfo(Zir.Inst.StructInit.Item).Struct.fields.len);
try astgen.extra.ensureUnusedCapacity(gpa, fields_list.len *
@typeInfo(Zir.Inst.StructInit.Item).Struct.fields.len);
for (fields_list) |field| {
_ = gz.astgen.addExtraAssumeCapacity(field);
}
@@ -1918,7 +1904,10 @@ fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: ast.Node.Index) Inner
// ZIR instructions that might be a type other than `noreturn` or `void`.
.add,
.addwrap,
.arg,
.param,
.param_comptime,
.param_anytype,
.param_anytype_comptime,
.alloc,
.alloc_mut,
.alloc_comptime,
@@ -2488,7 +2477,7 @@ fn varDecl(
// Move the init_scope instructions into the parent scope, swapping
// store_to_block_ptr for store_to_inferred_ptr.
const expected_len = parent_zir.items.len + init_scope.instructions.items.len;
try parent_zir.ensureCapacity(gpa, expected_len);
try parent_zir.ensureTotalCapacity(gpa, expected_len);
for (init_scope.instructions.items) |src_inst| {
if (zir_tags[src_inst] == .store_to_block_ptr) {
if (zir_datas[src_inst].bin.lhs == init_scope.rl_ptr) {
@@ -2634,7 +2623,7 @@ fn assignOp(
const lhs_ptr = try lvalExpr(gz, scope, node_datas[infix_node].lhs);
const lhs = try gz.addUnNode(.load, lhs_ptr, infix_node);
const lhs_type = try gz.addUnNode(.typeof, lhs, infix_node);
const rhs = try expr(gz, scope, .{ .ty = lhs_type }, node_datas[infix_node].rhs);
const rhs = try expr(gz, scope, .{ .coerced_ty = lhs_type }, node_datas[infix_node].rhs);
const result = try gz.addPlNode(op_inst_tag, infix_node, Zir.Inst.Bin{
.lhs = lhs,
@@ -2750,10 +2739,10 @@ fn ptrType(
}
const gpa = gz.astgen.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
try gz.astgen.extra.ensureCapacity(gpa, gz.astgen.extra.items.len +
@typeInfo(Zir.Inst.PtrType).Struct.fields.len + trailing_count);
try gz.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.PtrType).Struct.fields.len +
trailing_count);
const payload_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.PtrType{ .elem_type = elem_type });
if (sentinel_ref != .none) {
@@ -2899,6 +2888,16 @@ fn fnDecl(
};
defer decl_gz.instructions.deinit(gpa);
var fn_gz: GenZir = .{
.force_comptime = false,
.in_defer = false,
.decl_node_index = fn_proto.ast.proto_node,
.decl_line = decl_gz.decl_line,
.parent = &decl_gz.base,
.astgen = astgen,
};
defer fn_gz.instructions.deinit(gpa);
// TODO: support noinline
const is_pub = fn_proto.visib_token != null;
const is_export = blk: {
@@ -2913,80 +2912,82 @@ fn fnDecl(
const maybe_inline_token = fn_proto.extern_export_inline_token orelse break :blk false;
break :blk token_tags[maybe_inline_token] == .keyword_inline;
};
const align_inst: Zir.Inst.Ref = if (fn_proto.ast.align_expr == 0) .none else inst: {
break :inst try expr(&decl_gz, &decl_gz.base, align_rl, fn_proto.ast.align_expr);
};
const section_inst: Zir.Inst.Ref = if (fn_proto.ast.section_expr == 0) .none else inst: {
break :inst try comptimeExpr(&decl_gz, &decl_gz.base, .{ .ty = .const_slice_u8_type }, fn_proto.ast.section_expr);
};
try wip_decls.next(gpa, is_pub, is_export, fn_proto.ast.align_expr != 0, fn_proto.ast.section_expr != 0);
try wip_decls.next(gpa, is_pub, is_export, align_inst != .none, section_inst != .none);
// The AST params array does not contain anytype and ... parameters.
// We must iterate to count how many param types to allocate.
const param_count = blk: {
var count: usize = 0;
var it = fn_proto.iterate(tree.*);
while (it.next()) |param| {
if (param.anytype_ellipsis3) |token| switch (token_tags[token]) {
.ellipsis3 => break,
.keyword_anytype => {},
else => unreachable,
};
count += 1;
}
break :blk count;
};
const param_types = try gpa.alloc(Zir.Inst.Ref, param_count);
defer gpa.free(param_types);
const bits_per_param = 1;
const params_per_u32 = 32 / bits_per_param;
// We only need this if there are greater than params_per_u32 fields.
var bit_bag = ArrayListUnmanaged(u32){};
defer bit_bag.deinit(gpa);
var cur_bit_bag: u32 = 0;
var is_var_args = false;
{
var params_scope = &fn_gz.base;
const is_var_args = is_var_args: {
var param_type_i: usize = 0;
var it = fn_proto.iterate(tree.*);
while (it.next()) |param| : (param_type_i += 1) {
if (param_type_i % params_per_u32 == 0 and param_type_i != 0) {
try bit_bag.append(gpa, cur_bit_bag);
cur_bit_bag = 0;
}
const is_comptime = if (param.comptime_noalias) |token|
token_tags[token] == .keyword_comptime
else
false;
cur_bit_bag = (cur_bit_bag >> bits_per_param) |
(@as(u32, @boolToInt(is_comptime)) << 31);
if (param.anytype_ellipsis3) |token| {
const is_anytype = if (param.anytype_ellipsis3) |token| blk: {
switch (token_tags[token]) {
.keyword_anytype => {
param_types[param_type_i] = .none;
continue;
},
.ellipsis3 => {
is_var_args = true;
break;
},
.keyword_anytype => break :blk true,
.ellipsis3 => break :is_var_args true,
else => unreachable,
}
}
const param_type_node = param.type_expr;
assert(param_type_node != 0);
param_types[param_type_i] =
try expr(&decl_gz, &decl_gz.base, .{ .ty = .type_type }, param_type_node);
}
assert(param_type_i == param_count);
} else false;
const empty_slot_count = params_per_u32 - (param_type_i % params_per_u32);
if (empty_slot_count < params_per_u32) {
cur_bit_bag >>= @intCast(u5, empty_slot_count * bits_per_param);
const param_name: u32 = if (param.name_token) |name_token| blk: {
if (mem.eql(u8, "_", tree.tokenSlice(name_token)))
break :blk 0;
const param_name = try astgen.identAsString(name_token);
if (!is_extern) {
try astgen.detectLocalShadowing(params_scope, param_name, name_token);
}
break :blk param_name;
} else if (!is_extern) {
if (param.anytype_ellipsis3) |tok| {
return astgen.failTok(tok, "missing parameter name", .{});
} else {
return astgen.failNode(param.type_expr, "missing parameter name", .{});
}
} else 0;
const param_inst = if (is_anytype) param: {
const name_token = param.name_token orelse param.anytype_ellipsis3.?;
const tag: Zir.Inst.Tag = if (is_comptime)
.param_anytype_comptime
else
.param_anytype;
break :param try decl_gz.addStrTok(tag, param_name, name_token);
} else param: {
const param_type_node = param.type_expr;
assert(param_type_node != 0);
var param_gz = decl_gz.makeSubBlock(scope);
defer param_gz.instructions.deinit(gpa);
const param_type = try expr(&param_gz, params_scope, coerced_type_rl, param_type_node);
const param_inst_expected = @intCast(u32, astgen.instructions.len + 1);
_ = try param_gz.addBreak(.break_inline, param_inst_expected, param_type);
const main_tokens = tree.nodes.items(.main_token);
const name_token = param.name_token orelse main_tokens[param_type_node];
const tag: Zir.Inst.Tag = if (is_comptime) .param_comptime else .param;
const param_inst = try decl_gz.addParam(tag, name_token, param_name, param_gz.instructions.items);
assert(param_inst_expected == param_inst);
break :param indexToRef(param_inst);
};
if (param_name == 0) continue;
const sub_scope = try astgen.arena.create(Scope.LocalVal);
sub_scope.* = .{
.parent = params_scope,
.gen_zir = &decl_gz,
.name = param_name,
.inst = param_inst,
.token_src = param.name_token.?,
.id_cat = .@"function parameter",
};
params_scope = &sub_scope.base;
}
}
break :is_var_args false;
};
const lib_name: u32 = if (fn_proto.lib_name) |lib_name_token| blk: {
const lib_name_str = try astgen.strLitAsString(lib_name_token);
@@ -2996,12 +2997,17 @@ fn fnDecl(
const maybe_bang = tree.firstToken(fn_proto.ast.return_type) - 1;
const is_inferred_error = token_tags[maybe_bang] == .bang;
const return_type_inst = try AstGen.expr(
&decl_gz,
&decl_gz.base,
.{ .ty = .type_type },
fn_proto.ast.return_type,
);
const align_inst: Zir.Inst.Ref = if (fn_proto.ast.align_expr == 0) .none else inst: {
break :inst try expr(&decl_gz, params_scope, align_rl, fn_proto.ast.align_expr);
};
const section_inst: Zir.Inst.Ref = if (fn_proto.ast.section_expr == 0) .none else inst: {
break :inst try comptimeExpr(&decl_gz, params_scope, .{ .ty = .const_slice_u8_type }, fn_proto.ast.section_expr);
};
var ret_gz = decl_gz.makeSubBlock(params_scope);
defer ret_gz.instructions.deinit(gpa);
const ret_ty = try expr(&ret_gz, params_scope, coerced_type_rl, fn_proto.ast.return_type);
const ret_br = try ret_gz.addBreak(.break_inline, 0, ret_ty);
const cc: Zir.Inst.Ref = blk: {
if (fn_proto.ast.callconv_expr != 0) {
@@ -3012,9 +3018,9 @@ fn fnDecl(
.{},
);
}
break :blk try AstGen.expr(
break :blk try expr(
&decl_gz,
&decl_gz.base,
params_scope,
.{ .ty = .calling_convention_type },
fn_proto.ast.callconv_expr,
);
@@ -3037,8 +3043,9 @@ fn fnDecl(
}
break :func try decl_gz.addFunc(.{
.src_node = decl_node,
.ret_ty = return_type_inst,
.param_types = param_types,
.ret_ty = ret_gz.instructions.items,
.ret_br = ret_br,
.param_block = block_inst,
.body = &[0]Zir.Inst.Index{},
.cc = cc,
.align_inst = .none, // passed in the per-decl data
@@ -3047,75 +3054,18 @@ fn fnDecl(
.is_inferred_error = false,
.is_test = false,
.is_extern = true,
.cur_bit_bag = cur_bit_bag,
.bit_bag = bit_bag.items,
});
} else func: {
if (is_var_args) {
return astgen.failTok(fn_proto.ast.fn_token, "non-extern function is variadic", .{});
}
var fn_gz: GenZir = .{
.force_comptime = false,
.in_defer = false,
.decl_node_index = fn_proto.ast.proto_node,
.decl_line = decl_gz.decl_line,
.parent = &decl_gz.base,
.astgen = astgen,
};
defer fn_gz.instructions.deinit(gpa);
const prev_fn_block = astgen.fn_block;
astgen.fn_block = &fn_gz;
defer astgen.fn_block = prev_fn_block;
// Iterate over the parameters. We put the param names as the first N
// items inside `extra` so that debug info later can refer to the parameter names
// even while the respective source code is unloaded.
try astgen.extra.ensureUnusedCapacity(gpa, param_count);
{
var params_scope = &fn_gz.base;
var i: usize = 0;
var it = fn_proto.iterate(tree.*);
while (it.next()) |param| : (i += 1) {
const name_token = param.name_token orelse {
if (param.anytype_ellipsis3) |tok| {
return astgen.failTok(tok, "missing parameter name", .{});
} else {
return astgen.failNode(param.type_expr, "missing parameter name", .{});
}
};
if (param.type_expr != 0)
_ = try typeExpr(&fn_gz, params_scope, param.type_expr);
if (mem.eql(u8, "_", tree.tokenSlice(name_token)))
continue;
const param_name = try astgen.identAsString(name_token);
// Create an arg instruction. This is needed to emit a semantic analysis
// error for shadowing decls.
try astgen.detectLocalShadowing(params_scope, param_name, name_token);
const arg_inst = try fn_gz.addStrTok(.arg, param_name, name_token);
const sub_scope = try astgen.arena.create(Scope.LocalVal);
sub_scope.* = .{
.parent = params_scope,
.gen_zir = &fn_gz,
.name = param_name,
.inst = arg_inst,
.token_src = name_token,
.id_cat = .@"function parameter",
};
params_scope = &sub_scope.base;
// Additionally put the param name into `string_bytes` and reference it with
// `extra` so that we have access to the data in codegen, for debug info.
const str_index = try astgen.identAsString(name_token);
try astgen.extra.append(astgen.gpa, str_index);
}
_ = try typeExpr(&fn_gz, params_scope, fn_proto.ast.return_type);
_ = try expr(&fn_gz, params_scope, .none, body_node);
try checkUsed(gz, &fn_gz.base, params_scope);
}
_ = try expr(&fn_gz, params_scope, .none, body_node);
try checkUsed(gz, &fn_gz.base, params_scope);
const need_implicit_ret = blk: {
if (fn_gz.instructions.items.len == 0)
@@ -3132,8 +3082,9 @@ fn fnDecl(
break :func try decl_gz.addFunc(.{
.src_node = decl_node,
.ret_ty = return_type_inst,
.param_types = param_types,
.param_block = block_inst,
.ret_ty = ret_gz.instructions.items,
.ret_br = ret_br,
.body = fn_gz.instructions.items,
.cc = cc,
.align_inst = .none, // passed in the per-decl data
@@ -3142,8 +3093,6 @@ fn fnDecl(
.is_inferred_error = is_inferred_error,
.is_test = false,
.is_extern = false,
.cur_bit_bag = cur_bit_bag,
.bit_bag = bit_bag.items,
});
};
@@ -3479,8 +3428,9 @@ fn testDecl(
const func_inst = try decl_block.addFunc(.{
.src_node = node,
.ret_ty = .void_type,
.param_types = &[0]Zir.Inst.Ref{},
.param_block = block_inst,
.ret_ty = &.{},
.ret_br = 0,
.body = fn_block.instructions.items,
.cc = .none,
.align_inst = .none,
@@ -3489,8 +3439,6 @@ fn testDecl(
.is_inferred_error = true,
.is_test = true,
.is_extern = false,
.cur_bit_bag = 0,
.bit_bag = &.{},
});
_ = try decl_block.addBreak(.break_inline, block_inst, func_inst);
@@ -4238,7 +4186,7 @@ fn containerDecl(
var fields_data = ArrayListUnmanaged(u32){};
defer fields_data.deinit(gpa);
try fields_data.ensureCapacity(gpa, counts.total_fields + counts.values);
try fields_data.ensureTotalCapacity(gpa, counts.total_fields + counts.values);
// We only need this if there are greater than 32 fields.
var bit_bag = ArrayListUnmanaged(u32){};
@@ -5184,8 +5132,7 @@ fn setCondBrPayload(
) !void {
const astgen = then_scope.astgen;
try astgen.extra.ensureCapacity(astgen.gpa, astgen.extra.items.len +
@typeInfo(Zir.Inst.CondBr).Struct.fields.len +
try astgen.extra.ensureUnusedCapacity(astgen.gpa, @typeInfo(Zir.Inst.CondBr).Struct.fields.len +
then_scope.instructions.items.len + else_scope.instructions.items.len);
const zir_datas = astgen.instructions.items(.data);
@@ -5476,7 +5423,7 @@ fn forExpr(
const tree = astgen.tree;
const token_tags = tree.tokens.items(.tag);
const array_ptr = try expr(parent_gz, scope, .ref, for_full.ast.cond_expr);
const array_ptr = try expr(parent_gz, scope, .none_or_ref, for_full.ast.cond_expr);
const len = try parent_gz.addUnNode(.indexable_ptr_len, array_ptr, for_full.ast.cond_expr);
const index_ptr = blk: {
@@ -5839,10 +5786,9 @@ fn switchExpr(
_ = try case_scope.addBreak(.@"break", switch_block, case_result);
}
// Documentation for this: `Zir.Inst.SwitchBlock` and `Zir.Inst.SwitchBlockMulti`.
try scalar_cases_payload.ensureCapacity(gpa, scalar_cases_payload.items.len +
try scalar_cases_payload.ensureUnusedCapacity(gpa, case_scope.instructions.items.len +
3 + // operand, scalar_cases_len, else body len
@boolToInt(multi_cases_len != 0) +
case_scope.instructions.items.len);
@boolToInt(multi_cases_len != 0));
scalar_cases_payload.appendAssumeCapacity(@enumToInt(operand));
scalar_cases_payload.appendAssumeCapacity(scalar_cases_len);
if (multi_cases_len != 0) {
@@ -5852,9 +5798,11 @@ fn switchExpr(
scalar_cases_payload.appendSliceAssumeCapacity(case_scope.instructions.items);
} else {
// Documentation for this: `Zir.Inst.SwitchBlock` and `Zir.Inst.SwitchBlockMulti`.
try scalar_cases_payload.ensureCapacity(gpa, scalar_cases_payload.items.len +
2 + // operand, scalar_cases_len
@boolToInt(multi_cases_len != 0));
try scalar_cases_payload.ensureUnusedCapacity(
gpa,
@as(usize, 2) + // operand, scalar_cases_len
@boolToInt(multi_cases_len != 0),
);
scalar_cases_payload.appendAssumeCapacity(@enumToInt(operand));
scalar_cases_payload.appendAssumeCapacity(scalar_cases_len);
if (multi_cases_len != 0) {
@@ -5975,8 +5923,8 @@ fn switchExpr(
block_scope.break_count += 1;
_ = try case_scope.addBreak(.@"break", switch_block, case_result);
}
try scalar_cases_payload.ensureCapacity(gpa, scalar_cases_payload.items.len +
2 + case_scope.instructions.items.len);
try scalar_cases_payload.ensureUnusedCapacity(gpa, 2 +
case_scope.instructions.items.len);
scalar_cases_payload.appendAssumeCapacity(@enumToInt(item_inst));
scalar_cases_payload.appendAssumeCapacity(@intCast(u32, case_scope.instructions.items.len));
scalar_cases_payload.appendSliceAssumeCapacity(case_scope.instructions.items);
@@ -6012,8 +5960,8 @@ fn switchExpr(
const payload_index = astgen.extra.items.len;
const zir_datas = astgen.instructions.items(.data);
zir_datas[switch_block].pl_node.payload_index = @intCast(u32, payload_index);
try astgen.extra.ensureCapacity(gpa, astgen.extra.items.len +
scalar_cases_payload.items.len + multi_cases_payload.items.len);
try astgen.extra.ensureUnusedCapacity(gpa, scalar_cases_payload.items.len +
multi_cases_payload.items.len);
const strat = rl.strategy(&block_scope);
switch (strat.tag) {
.break_operand => {
@@ -6821,7 +6769,7 @@ fn as(
) InnerError!Zir.Inst.Ref {
const dest_type = try typeExpr(gz, scope, lhs);
switch (rl) {
.none, .none_or_ref, .discard, .ref, .ty => {
.none, .none_or_ref, .discard, .ref, .ty, .coerced_ty => {
const result = try reachableExpr(gz, scope, .{ .ty = dest_type }, rhs, node);
return rvalue(gz, rl, result, node);
},
@@ -6844,7 +6792,7 @@ fn unionInit(
const union_type = try typeExpr(gz, scope, params[0]);
const field_name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, params[1]);
switch (rl) {
.none, .none_or_ref, .discard, .ref, .ty, .inferred_ptr => {
.none, .none_or_ref, .discard, .ref, .ty, .coerced_ty, .inferred_ptr => {
_ = try gz.addPlNode(.field_type_ref, params[1], Zir.Inst.FieldTypeRef{
.container_type = union_type,
.field_name = field_name,
@@ -6930,7 +6878,7 @@ fn bitCast(
const astgen = gz.astgen;
const dest_type = try typeExpr(gz, scope, lhs);
switch (rl) {
.none, .none_or_ref, .discard, .ty => {
.none, .none_or_ref, .discard, .ty, .coerced_ty => {
const operand = try expr(gz, scope, .none, rhs);
const result = try gz.addPlNode(.bitcast, node, Zir.Inst.Bin{
.lhs = dest_type,
@@ -7740,7 +7688,7 @@ fn callExpr(
.param_index = @intCast(u32, i),
} },
});
args[i] = try expr(gz, scope, .{ .ty = param_type }, param_node);
args[i] = try expr(gz, scope, .{ .coerced_ty = param_type }, param_node);
}
const modifier: std.builtin.CallOptions.Modifier = blk: {
@@ -8433,7 +8381,7 @@ fn rvalue(
src_node: ast.Node.Index,
) InnerError!Zir.Inst.Ref {
switch (rl) {
.none, .none_or_ref => return result,
.none, .none_or_ref, .coerced_ty => return result,
.discard => {
// Emit a compile error for discarding error values.
_ = try gz.addUnNode(.ensure_result_non_error, result, src_node);
@@ -8659,7 +8607,7 @@ fn failNodeNotes(
}
const notes_index: u32 = if (notes.len != 0) blk: {
const notes_start = astgen.extra.items.len;
try astgen.extra.ensureCapacity(astgen.gpa, notes_start + 1 + notes.len);
try astgen.extra.ensureTotalCapacity(astgen.gpa, notes_start + 1 + notes.len);
astgen.extra.appendAssumeCapacity(@intCast(u32, notes.len));
astgen.extra.appendSliceAssumeCapacity(notes);
break :blk @intCast(u32, notes_start);
@@ -8700,7 +8648,7 @@ fn failTokNotes(
}
const notes_index: u32 = if (notes.len != 0) blk: {
const notes_start = astgen.extra.items.len;
try astgen.extra.ensureCapacity(astgen.gpa, notes_start + 1 + notes.len);
try astgen.extra.ensureTotalCapacity(astgen.gpa, notes_start + 1 + notes.len);
astgen.extra.appendAssumeCapacity(@intCast(u32, notes.len));
astgen.extra.appendSliceAssumeCapacity(notes);
break :blk @intCast(u32, notes_start);
@@ -8864,7 +8812,7 @@ fn strLitNodeAsString(astgen: *AstGen, node: ast.Node.Index) !IndexSlice {
while (tok_i <= end) : (tok_i += 1) {
const slice = tree.tokenSlice(tok_i);
const line_bytes = slice[2 .. slice.len - 1];
try string_bytes.ensureCapacity(gpa, string_bytes.items.len + line_bytes.len + 1);
try string_bytes.ensureUnusedCapacity(gpa, line_bytes.len + 1);
string_bytes.appendAssumeCapacity('\n');
string_bytes.appendSliceAssumeCapacity(line_bytes);
}
@@ -9105,7 +9053,7 @@ const GenZir = struct {
// we emit ZIR for the block break instructions to have the result values,
// and then rvalue() on that to pass the value to the result location.
switch (parent_rl) {
.ty => |ty_inst| {
.ty, .coerced_ty => |ty_inst| {
gz.rl_ty_inst = ty_inst;
gz.break_result_loc = parent_rl;
},
@@ -9131,8 +9079,8 @@ const GenZir = struct {
fn setBoolBrBody(gz: GenZir, inst: Zir.Inst.Index) !void {
const gpa = gz.astgen.gpa;
try gz.astgen.extra.ensureCapacity(gpa, gz.astgen.extra.items.len +
@typeInfo(Zir.Inst.Block).Struct.fields.len + gz.instructions.items.len);
try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Block).Struct.fields.len +
gz.instructions.items.len);
const zir_datas = gz.astgen.instructions.items(.data);
zir_datas[inst].bool_br.payload_index = gz.astgen.addExtraAssumeCapacity(
Zir.Inst.Block{ .body_len = @intCast(u32, gz.instructions.items.len) },
@@ -9142,8 +9090,8 @@ const GenZir = struct {
fn setBlockBody(gz: GenZir, inst: Zir.Inst.Index) !void {
const gpa = gz.astgen.gpa;
try gz.astgen.extra.ensureCapacity(gpa, gz.astgen.extra.items.len +
@typeInfo(Zir.Inst.Block).Struct.fields.len + gz.instructions.items.len);
try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Block).Struct.fields.len +
gz.instructions.items.len);
const zir_datas = gz.astgen.instructions.items(.data);
zir_datas[inst].pl_node.payload_index = gz.astgen.addExtraAssumeCapacity(
Zir.Inst.Block{ .body_len = @intCast(u32, gz.instructions.items.len) },
@@ -9155,8 +9103,8 @@ const GenZir = struct {
/// `store_to_block_ptr` instructions with lhs set to .none.
fn setBlockBodyEliding(gz: GenZir, inst: Zir.Inst.Index) !void {
const gpa = gz.astgen.gpa;
try gz.astgen.extra.ensureCapacity(gpa, gz.astgen.extra.items.len +
@typeInfo(Zir.Inst.Block).Struct.fields.len + gz.instructions.items.len);
try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Block).Struct.fields.len +
gz.instructions.items.len);
const zir_datas = gz.astgen.instructions.items(.data);
const zir_tags = gz.astgen.instructions.items(.tag);
const block_pl_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.Block{
@@ -9177,9 +9125,10 @@ const GenZir = struct {
fn addFunc(gz: *GenZir, args: struct {
src_node: ast.Node.Index,
param_types: []const Zir.Inst.Ref,
body: []const Zir.Inst.Index,
ret_ty: Zir.Inst.Ref,
param_block: Zir.Inst.Index,
ret_ty: []const Zir.Inst.Index,
ret_br: Zir.Inst.Index,
cc: Zir.Inst.Ref,
align_inst: Zir.Inst.Ref,
lib_name: u32,
@@ -9187,11 +9136,8 @@ const GenZir = struct {
is_inferred_error: bool,
is_test: bool,
is_extern: bool,
cur_bit_bag: u32,
bit_bag: []const u32,
}) !Zir.Inst.Ref {
assert(args.src_node != 0);
assert(args.ret_ty != .none);
const astgen = gz.astgen;
const gpa = astgen.gpa;
@@ -9226,27 +9172,22 @@ const GenZir = struct {
src_locs = &src_locs_buffer;
}
const any_are_comptime = args.cur_bit_bag != 0 or for (args.bit_bag) |x| {
if (x != 0) break true;
} else false;
if (args.cc != .none or args.lib_name != 0 or
args.is_var_args or args.is_test or args.align_inst != .none or
args.is_extern or any_are_comptime)
args.is_extern)
{
try astgen.extra.ensureUnusedCapacity(
gpa,
@typeInfo(Zir.Inst.ExtendedFunc).Struct.fields.len +
@boolToInt(any_are_comptime) + args.bit_bag.len +
args.param_types.len + args.body.len + src_locs.len +
args.ret_ty.len + args.body.len + src_locs.len +
@boolToInt(args.lib_name != 0) +
@boolToInt(args.align_inst != .none) +
@boolToInt(args.cc != .none),
);
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.ExtendedFunc{
.src_node = gz.nodeIndexToRelative(args.src_node),
.return_type = args.ret_ty,
.param_types_len = @intCast(u32, args.param_types.len),
.param_block = args.param_block,
.ret_body_len = @intCast(u32, args.ret_ty.len),
.body_len = @intCast(u32, args.body.len),
});
if (args.lib_name != 0) {
@@ -9258,15 +9199,14 @@ const GenZir = struct {
if (args.align_inst != .none) {
astgen.extra.appendAssumeCapacity(@enumToInt(args.align_inst));
}
if (any_are_comptime) {
astgen.extra.appendSliceAssumeCapacity(args.bit_bag); // Likely empty.
astgen.extra.appendAssumeCapacity(args.cur_bit_bag);
}
astgen.appendRefsAssumeCapacity(args.param_types);
astgen.extra.appendSliceAssumeCapacity(args.ret_ty);
astgen.extra.appendSliceAssumeCapacity(args.body);
astgen.extra.appendSliceAssumeCapacity(src_locs);
const new_index = @intCast(Zir.Inst.Index, astgen.instructions.len);
if (args.ret_br != 0) {
astgen.instructions.items(.data)[args.ret_br].@"break".block_inst = new_index;
}
astgen.instructions.appendAssumeCapacity(.{
.tag = .extended,
.data = .{ .extended = .{
@@ -9279,7 +9219,6 @@ const GenZir = struct {
.has_align = args.align_inst != .none,
.is_test = args.is_test,
.is_extern = args.is_extern,
.has_comptime_bits = any_are_comptime,
}),
.operand = payload_index,
} },
@@ -9287,24 +9226,27 @@ const GenZir = struct {
gz.instructions.appendAssumeCapacity(new_index);
return indexToRef(new_index);
} else {
try gz.astgen.extra.ensureUnusedCapacity(
try astgen.extra.ensureUnusedCapacity(
gpa,
@typeInfo(Zir.Inst.Func).Struct.fields.len +
args.param_types.len + args.body.len + src_locs.len,
args.ret_ty.len + args.body.len + src_locs.len,
);
const payload_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.Func{
.return_type = args.ret_ty,
.param_types_len = @intCast(u32, args.param_types.len),
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.Func{
.param_block = args.param_block,
.ret_body_len = @intCast(u32, args.ret_ty.len),
.body_len = @intCast(u32, args.body.len),
});
gz.astgen.appendRefsAssumeCapacity(args.param_types);
gz.astgen.extra.appendSliceAssumeCapacity(args.body);
gz.astgen.extra.appendSliceAssumeCapacity(src_locs);
astgen.extra.appendSliceAssumeCapacity(args.ret_ty);
astgen.extra.appendSliceAssumeCapacity(args.body);
astgen.extra.appendSliceAssumeCapacity(src_locs);
const tag: Zir.Inst.Tag = if (args.is_inferred_error) .func_inferred else .func;
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
gz.astgen.instructions.appendAssumeCapacity(.{
const new_index = @intCast(Zir.Inst.Index, astgen.instructions.len);
if (args.ret_br != 0) {
astgen.instructions.items(.data)[args.ret_br].@"break".block_inst = new_index;
}
astgen.instructions.appendAssumeCapacity(.{
.tag = tag,
.data = .{ .pl_node = .{
.src_node = gz.nodeIndexToRelative(args.src_node),
@@ -9380,10 +9322,10 @@ const GenZir = struct {
assert(callee != .none);
assert(src_node != 0);
const gpa = gz.astgen.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
try gz.astgen.extra.ensureCapacity(gpa, gz.astgen.extra.items.len +
@typeInfo(Zir.Inst.Call).Struct.fields.len + args.len);
try gz.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Call).Struct.fields.len +
args.len);
const payload_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.Call{
.callee = callee,
@@ -9412,8 +9354,8 @@ const GenZir = struct {
) !Zir.Inst.Index {
assert(lhs != .none);
const gpa = gz.astgen.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
try gz.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.instructions.ensureUnusedCapacity(gpa, 1);
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
gz.astgen.instructions.appendAssumeCapacity(.{
@@ -9486,8 +9428,8 @@ const GenZir = struct {
extra: anytype,
) !Zir.Inst.Ref {
const gpa = gz.astgen.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
try gz.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.instructions.ensureUnusedCapacity(gpa, 1);
const payload_index = try gz.astgen.addExtra(extra);
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
@@ -9502,6 +9444,38 @@ const GenZir = struct {
return indexToRef(new_index);
}
fn addParam(
gz: *GenZir,
tag: Zir.Inst.Tag,
/// Absolute token index. This function does the conversion to Decl offset.
abs_tok_index: ast.TokenIndex,
name: u32,
body: []const u32,
) !Zir.Inst.Index {
const gpa = gz.astgen.gpa;
try gz.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Param).Struct.fields.len +
body.len);
const payload_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.Param{
.name = name,
.body_len = @intCast(u32, body.len),
});
gz.astgen.extra.appendSliceAssumeCapacity(body);
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
gz.astgen.instructions.appendAssumeCapacity(.{
.tag = tag,
.data = .{ .pl_tok = .{
.src_tok = gz.tokenIndexToRelative(abs_tok_index),
.payload_index = payload_index,
} },
});
gz.instructions.appendAssumeCapacity(new_index);
return new_index;
}
fn addExtendedPayload(
gz: *GenZir,
opcode: Zir.Inst.Extended,
@@ -9509,8 +9483,8 @@ const GenZir = struct {
) !Zir.Inst.Ref {
const gpa = gz.astgen.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
try gz.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.instructions.ensureUnusedCapacity(gpa, 1);
const payload_index = try gz.astgen.addExtra(extra);
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
@@ -9566,8 +9540,8 @@ const GenZir = struct {
elem_type: Zir.Inst.Ref,
) !Zir.Inst.Ref {
const gpa = gz.astgen.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
try gz.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.instructions.ensureUnusedCapacity(gpa, 1);
const payload_index = try gz.astgen.addExtra(Zir.Inst.ArrayTypeSentinel{
.sentinel = sentinel,
@@ -9822,7 +9796,7 @@ const GenZir = struct {
/// Leaves the `payload_index` field undefined.
fn addCondBr(gz: *GenZir, tag: Zir.Inst.Tag, node: ast.Node.Index) !Zir.Inst.Index {
const gpa = gz.astgen.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.instructions.ensureUnusedCapacity(gpa, 1);
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
try gz.astgen.instructions.append(gpa, .{
.tag = tag,

View File

@@ -2116,7 +2116,7 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
if (builtin.mode == .Debug and self.verbose_air) {
std.debug.print("# Begin Function AIR: {s}:\n", .{decl.name});
@import("print_air.zig").dump(gpa, air, decl.namespace.file_scope.zir, liveness);
std.debug.print("# End Function AIR: {s}:\n", .{decl.name});
std.debug.print("# End Function AIR: {s}\n\n", .{decl.name});
}
self.bin_file.updateFunc(module, func, air, liveness) catch |err| switch (err) {

View File

@@ -61,6 +61,11 @@ export_owners: std.AutoArrayHashMapUnmanaged(*Decl, []*Export) = .{},
/// Keys are fully resolved file paths. This table owns the keys and values.
import_table: std.StringArrayHashMapUnmanaged(*Scope.File) = .{},
/// The set of all the generic function instantiations. This is used so that when a generic
/// function is called twice with the same comptime parameter arguments, both calls dispatch
/// to the same function.
monomorphed_funcs: MonomorphedFuncsSet = .{},
/// We optimize memory usage for a compilation with no compile errors by storing the
/// error messages and mapping outside of `Decl`.
/// The ErrorMsg memory is owned by the decl, using Module's general purpose allocator.
@@ -114,6 +119,44 @@ emit_h: ?*GlobalEmitH,
test_functions: std.AutoArrayHashMapUnmanaged(*Decl, void) = .{},
const MonomorphedFuncsSet = std.HashMapUnmanaged(
*Fn,
void,
MonomorphedFuncsContext,
std.hash_map.default_max_load_percentage,
);
const MonomorphedFuncsContext = struct {
pub fn eql(ctx: @This(), a: *Fn, b: *Fn) bool {
_ = ctx;
return a == b;
}
/// Must match `Sema.GenericCallAdapter.hash`.
pub fn hash(ctx: @This(), key: *Fn) u64 {
_ = ctx;
var hasher = std.hash.Wyhash.init(0);
// The generic function Decl is guaranteed to be the first dependency
// of each of its instantiations.
const generic_owner_decl = key.owner_decl.dependencies.keys()[0];
const generic_func = generic_owner_decl.val.castTag(.function).?.data;
std.hash.autoHash(&hasher, @ptrToInt(generic_func));
// This logic must be kept in sync with the logic in `analyzeCall` that
// computes the hash.
const comptime_args = key.comptime_args.?;
const generic_ty_info = generic_owner_decl.ty.fnInfo();
for (generic_ty_info.param_types) |param_ty, i| {
if (generic_ty_info.paramIsComptime(i) and param_ty.tag() != .generic_poison) {
comptime_args[i].val.hash(param_ty, &hasher);
}
}
return hasher.final();
}
};
/// A `Module` has zero or one of these depending on whether `-femit-h` is enabled.
pub const GlobalEmitH = struct {
/// Where to put the output.
@@ -757,6 +800,10 @@ pub const Union = struct {
pub const Fn = struct {
/// The Decl that corresponds to the function itself.
owner_decl: *Decl,
/// If this is not null, this function is a generic function instantiation, and
/// there is a `Value` here for each parameter of the function. Non-comptime
/// parameters are marked with an `unreachable_value`.
comptime_args: ?[*]TypedValue = null,
/// The ZIR instruction that is a function instruction. Use this to find
/// the body. We store this rather than the body directly so that when ZIR
/// is regenerated on update(), we can map this to the new corresponding
@@ -795,6 +842,9 @@ pub const Fn = struct {
pub fn getInferredErrorSet(func: *Fn) ?*std.StringHashMapUnmanaged(void) {
const ret_ty = func.owner_decl.ty.fnReturnType();
if (ret_ty.tag() == .generic_poison) {
return null;
}
if (ret_ty.zigTypeTag() == .ErrorUnion) {
if (ret_ty.errorUnionSet().castTag(.error_set_inferred)) |payload| {
return &payload.data.map;
@@ -1169,6 +1219,8 @@ pub const Scope = struct {
/// for the one that will be the same for all Block instances.
src_decl: *Decl,
instructions: ArrayListUnmanaged(Air.Inst.Index),
// `param` instructions are collected here to be used by the `func` instruction.
params: std.ArrayListUnmanaged(Param) = .{},
label: ?*Label = null,
inlining: ?*Inlining,
/// If runtime_index is not 0 then one of these is guaranteed to be non null.
@@ -1183,6 +1235,12 @@ pub const Scope = struct {
/// when null, it is determined by build mode, changed by @setRuntimeSafety
want_safety: ?bool = null,
const Param = struct {
/// `noreturn` means `anytype`.
ty: Type,
is_comptime: bool,
};
/// This `Block` maps a block ZIR instruction to the corresponding
/// AIR instruction for break instruction analysis.
pub const Label = struct {
@@ -1630,8 +1688,11 @@ pub const SrcLoc = struct {
.@"asm" => tree.asmFull(node),
else => unreachable,
};
const asm_output = full.outputs[0];
const node_datas = tree.nodes.items(.data);
const ret_ty_node = node_datas[asm_output].lhs;
const main_tokens = tree.nodes.items(.main_token);
const tok_index = main_tokens[full.outputs[0]];
const tok_index = main_tokens[ret_ty_node];
const token_starts = tree.tokens.items(.start);
return token_starts[tok_index];
},
@@ -2095,7 +2156,20 @@ pub const LazySrcLoc = union(enum) {
};
pub const SemaError = error{ OutOfMemory, AnalysisFail };
pub const CompileError = error{ OutOfMemory, AnalysisFail, NeededSourceLocation };
pub const CompileError = error{
OutOfMemory,
/// When this is returned, the compile error for the failure has already been recorded.
AnalysisFail,
/// Returned when a compile error needed to be reported but a provided LazySrcLoc was set
/// to the `unneeded` tag. The source location was, in fact, needed. It is expected that
/// somewhere up the call stack, the operation will be retried after doing expensive work
/// to compute a source location.
NeededSourceLocation,
/// A Type or Value was needed to be used during semantic analysis, but it was not available
/// because the function is generic. This is only seen when analyzing the body of a param
/// instruction.
GenericPoison,
};
pub fn deinit(mod: *Module) void {
const gpa = mod.gpa;
@@ -2177,6 +2251,7 @@ pub fn deinit(mod: *Module) void {
mod.error_name_list.deinit(gpa);
mod.test_functions.deinit(gpa);
mod.monomorphed_funcs.deinit(gpa);
}
fn freeExportList(gpa: *Allocator, export_list: []*Export) void {
@@ -2792,14 +2867,16 @@ pub fn ensureDeclAnalyzed(mod: *Module, decl: *Decl) SemaError!void {
}
return error.AnalysisFail;
},
else => {
error.NeededSourceLocation => unreachable,
error.GenericPoison => unreachable,
else => |e| {
decl.analysis = .sema_failure_retryable;
try mod.failed_decls.ensureUnusedCapacity(mod.gpa, 1);
mod.failed_decls.putAssumeCapacityNoClobber(decl, try ErrorMsg.create(
mod.gpa,
decl.srcLoc(),
"unable to analyze: {s}",
.{@errorName(err)},
.{@errorName(e)},
));
return error.AnalysisFail;
},
@@ -2899,7 +2976,6 @@ pub fn semaFile(mod: *Module, file: *Scope.File) SemaError!void {
.namespace = &struct_obj.namespace,
.func = null,
.owner_func = null,
.param_inst_list = &.{},
};
defer sema.deinit();
var block_scope: Scope.Block = .{
@@ -2954,7 +3030,6 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
.namespace = decl.namespace,
.func = null,
.owner_func = null,
.param_inst_list = &.{},
};
defer sema.deinit();
@@ -2980,7 +3055,10 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
.inlining = null,
.is_comptime = true,
};
defer block_scope.instructions.deinit(gpa);
defer {
block_scope.instructions.deinit(gpa);
block_scope.params.deinit(gpa);
}
const zir_block_index = decl.zirBlockIndex();
const inst_data = zir_datas[zir_block_index].pl_node;
@@ -3625,8 +3703,6 @@ pub fn analyzeFnBody(mod: *Module, decl: *Decl, func: *Fn) SemaError!Air {
defer decl.value_arena.?.* = arena.state;
const fn_ty = decl.ty;
const param_inst_list = try gpa.alloc(Air.Inst.Ref, fn_ty.fnParamLen());
defer gpa.free(param_inst_list);
var sema: Sema = .{
.mod = mod,
@@ -3637,7 +3713,6 @@ pub fn analyzeFnBody(mod: *Module, decl: *Decl, func: *Fn) SemaError!Air {
.namespace = decl.namespace,
.func = func,
.owner_func = func,
.param_inst_list = param_inst_list,
};
defer sema.deinit();
@@ -3656,29 +3731,71 @@ pub fn analyzeFnBody(mod: *Module, decl: *Decl, func: *Fn) SemaError!Air {
};
defer inner_block.instructions.deinit(gpa);
// AIR requires the arg parameters to be the first N instructions.
try inner_block.instructions.ensureTotalCapacity(gpa, param_inst_list.len);
for (param_inst_list) |*param_inst, param_index| {
const param_type = fn_ty.fnParamType(param_index);
const fn_info = sema.code.getFnInfo(func.zir_body_inst);
const zir_tags = sema.code.instructions.items(.tag);
// Here we are performing "runtime semantic analysis" for a function body, which means
// we must map the parameter ZIR instructions to `arg` AIR instructions.
// AIR requires the `arg` parameters to be the first N instructions.
// This could be a generic function instantiation, however, in which case we need to
// map the comptime parameters to constant values and only emit arg AIR instructions
// for the runtime ones.
const runtime_params_len = @intCast(u32, fn_ty.fnParamLen());
try inner_block.instructions.ensureTotalCapacity(gpa, runtime_params_len);
try sema.air_instructions.ensureUnusedCapacity(gpa, fn_info.total_params_len * 2); // * 2 for the `addType`
try sema.inst_map.ensureUnusedCapacity(gpa, fn_info.total_params_len);
var runtime_param_index: usize = 0;
var total_param_index: usize = 0;
for (fn_info.param_body) |inst| {
const name = switch (zir_tags[inst]) {
.param, .param_comptime => blk: {
const inst_data = sema.code.instructions.items(.data)[inst].pl_tok;
const extra = sema.code.extraData(Zir.Inst.Param, inst_data.payload_index).data;
break :blk extra.name;
},
.param_anytype, .param_anytype_comptime => blk: {
const str_tok = sema.code.instructions.items(.data)[inst].str_tok;
break :blk str_tok.start;
},
else => continue,
};
if (func.comptime_args) |comptime_args| {
const arg_tv = comptime_args[total_param_index];
if (arg_tv.val.tag() != .unreachable_value) {
// We have a comptime value for this parameter.
const arg = try sema.addConstant(arg_tv.ty, arg_tv.val);
sema.inst_map.putAssumeCapacityNoClobber(inst, arg);
total_param_index += 1;
continue;
}
}
const param_type = fn_ty.fnParamType(runtime_param_index);
const ty_ref = try sema.addType(param_type);
const arg_index = @intCast(u32, sema.air_instructions.len);
inner_block.instructions.appendAssumeCapacity(arg_index);
param_inst.* = Air.indexToRef(arg_index);
try sema.air_instructions.append(gpa, .{
sema.air_instructions.appendAssumeCapacity(.{
.tag = .arg,
.data = .{
.ty_str = .{
.ty = ty_ref,
.str = undefined, // Set in the semantic analysis of the arg instruction.
},
},
.data = .{ .ty_str = .{
.ty = ty_ref,
.str = name,
} },
});
sema.inst_map.putAssumeCapacityNoClobber(inst, Air.indexToRef(arg_index));
total_param_index += 1;
runtime_param_index += 1;
}
func.state = .in_progress;
log.debug("set {s} to in_progress", .{decl.name});
try sema.analyzeFnBody(&inner_block, func.zir_body_inst);
_ = sema.analyzeBody(&inner_block, fn_info.body) catch |err| switch (err) {
error.NeededSourceLocation => @panic("zig compiler bug: NeededSourceLocation"),
error.GenericPoison => @panic("zig compiler bug: GenericPoison"),
else => |e| return e,
};
// Copy the block into place and mark that as the main block.
try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.Block).Struct.fields.len +
@@ -3714,7 +3831,7 @@ fn markOutdatedDecl(mod: *Module, decl: *Decl) !void {
decl.analysis = .outdated;
}
fn allocateNewDecl(mod: *Module, namespace: *Scope.Namespace, src_node: ast.Node.Index) !*Decl {
pub fn allocateNewDecl(mod: *Module, namespace: *Scope.Namespace, src_node: ast.Node.Index) !*Decl {
// If we have emit-h then we must allocate a bigger structure to store the emit-h state.
const new_decl: *Decl = if (mod.emit_h != null) blk: {
const parent_struct = try mod.gpa.create(DeclPlusEmitH);
@@ -4330,7 +4447,6 @@ pub fn analyzeStructFields(mod: *Module, struct_obj: *Struct) CompileError!void
.namespace = &struct_obj.namespace,
.owner_func = null,
.func = null,
.param_inst_list = &.{},
};
defer sema.deinit();
@@ -4484,7 +4600,6 @@ pub fn analyzeUnionFields(mod: *Module, union_obj: *Union) CompileError!void {
.namespace = &union_obj.namespace,
.owner_func = null,
.func = null,
.param_inst_list = &.{},
};
defer sema.deinit();

File diff suppressed because it is too large Load Diff

View File

@@ -61,7 +61,7 @@ pub const ExtraIndex = enum(u32) {
_,
};
pub fn getMainStruct(zir: Zir) Zir.Inst.Index {
pub fn getMainStruct(zir: Zir) Inst.Index {
return zir.extra[@enumToInt(ExtraIndex.main_struct)] -
@intCast(u32, Inst.Ref.typed_value_map.len);
}
@@ -173,11 +173,22 @@ pub const Inst = struct {
/// Twos complement wrapping integer addition.
/// Uses the `pl_node` union field. Payload is `Bin`.
addwrap,
/// Declares a parameter of the current function. Used for debug info and
/// for checking shadowing against declarations in the current namespace.
/// Uses the `str_tok` field. Token is the parameter name, string is the
/// parameter name.
arg,
/// Declares a parameter of the current function. Used for:
/// * debug info
/// * checking shadowing against declarations in the current namespace
/// * parameter type expressions referencing other parameters
/// These occur in the block outside a function body (the same block as
/// contains the func instruction).
/// Uses the `pl_tok` field. Token is the parameter name, payload is a `Param`.
param,
/// Same as `param` except the parameter is marked comptime.
param_comptime,
/// Same as `param` except the parameter is marked anytype.
/// Uses the `str_tok` field. Token is the parameter name. String is the parameter name.
param_anytype,
/// Same as `param` except the parameter is marked both comptime and anytype.
/// Uses the `str_tok` field. Token is the parameter name. String is the parameter name.
param_anytype_comptime,
/// Array concatenation. `a ++ b`
/// Uses the `pl_node` union field. Payload is `Bin`.
array_cat,
@@ -971,7 +982,10 @@ pub const Inst = struct {
/// Function calls do not count.
pub fn isNoReturn(tag: Tag) bool {
return switch (tag) {
.arg,
.param,
.param_comptime,
.param_anytype,
.param_anytype_comptime,
.add,
.addwrap,
.alloc,
@@ -1233,7 +1247,10 @@ pub const Inst = struct {
break :list std.enums.directEnumArray(Tag, Data.FieldEnum, 0, .{
.add = .pl_node,
.addwrap = .pl_node,
.arg = .str_tok,
.param = .pl_tok,
.param_comptime = .pl_tok,
.param_anytype = .str_tok,
.param_anytype_comptime = .str_tok,
.array_cat = .pl_node,
.array_mul = .pl_node,
.array_type = .bin,
@@ -1687,6 +1704,8 @@ pub const Inst = struct {
fn_ccc_void_no_args_type,
single_const_pointer_to_comptime_int_type,
const_slice_u8_type,
anyerror_void_error_union_type,
generic_poison_type,
/// `undefined` (untyped)
undef,
@@ -1714,6 +1733,9 @@ pub const Inst = struct {
calling_convention_c,
/// `std.builtin.CallingConvention.Inline`
calling_convention_inline,
/// Used for generic parameters where the type and value
/// is not known until generic function instantiation.
generic_poison,
_,
@@ -1892,6 +1914,14 @@ pub const Inst = struct {
.ty = Type.initTag(.type),
.val = Value.initTag(.const_slice_u8_type),
},
.anyerror_void_error_union_type = .{
.ty = Type.initTag(.type),
.val = Value.initTag(.anyerror_void_error_union_type),
},
.generic_poison_type = .{
.ty = Type.initTag(.type),
.val = Value.initTag(.generic_poison_type),
},
.enum_literal_type = .{
.ty = Type.initTag(.type),
.val = Value.initTag(.enum_literal_type),
@@ -1989,6 +2019,10 @@ pub const Inst = struct {
.ty = Type.initTag(.calling_convention),
.val = .{ .ptr_otherwise = &calling_convention_inline_payload.base },
},
.generic_poison = .{
.ty = Type.initTag(.generic_poison),
.val = Value.initTag(.generic_poison),
},
});
};
@@ -2047,6 +2081,17 @@ pub const Inst = struct {
return .{ .node_offset = self.src_node };
}
},
pl_tok: struct {
/// Offset from Decl AST token index.
src_tok: ast.TokenIndex,
/// index into extra.
/// `Tag` determines what lives there.
payload_index: u32,
pub fn src(self: @This()) LazySrcLoc {
return .{ .token_offset = self.src_tok };
}
},
bin: Bin,
/// For strings which may contain null bytes.
str: struct {
@@ -2170,6 +2215,7 @@ pub const Inst = struct {
un_node,
un_tok,
pl_node,
pl_tok,
bin,
str,
str_tok,
@@ -2226,17 +2272,15 @@ pub const Inst = struct {
/// 0. lib_name: u32, // null terminated string index, if has_lib_name is set
/// 1. cc: Ref, // if has_cc is set
/// 2. align: Ref, // if has_align is set
/// 3. comptime_bits: u32 // for every 32 parameters, if has_comptime_bits is set
/// - sets of 1 bit:
/// 0bX: whether corresponding parameter is comptime
/// 4. param_type: Ref // for each param_types_len
/// - `none` indicates that the param type is `anytype`.
/// 5. body: Index // for each body_len
/// 6. src_locs: Func.SrcLocs // if body_len != 0
/// 3. return_type: Index // for each ret_body_len
/// 4. body: Index // for each body_len
/// 5. src_locs: Func.SrcLocs // if body_len != 0
pub const ExtendedFunc = struct {
src_node: i32,
return_type: Ref,
param_types_len: u32,
/// If this is 0 it means a void return type.
ret_body_len: u32,
/// Points to the block that contains the param instructions for this function.
param_block: Index,
body_len: u32,
pub const Small = packed struct {
@@ -2247,8 +2291,7 @@ pub const Inst = struct {
has_align: bool,
is_test: bool,
is_extern: bool,
has_comptime_bits: bool,
_: u8 = undefined,
_: u9 = undefined,
};
};
@@ -2271,13 +2314,14 @@ pub const Inst = struct {
};
/// Trailing:
/// 0. param_type: Ref // for each param_types_len
/// - `none` indicates that the param type is `anytype`.
/// 0. return_type: Index // for each ret_body_len
/// 1. body: Index // for each body_len
/// 2. src_locs: SrcLocs // if body_len != 0
pub const Func = struct {
return_type: Ref,
param_types_len: u32,
/// If this is 0 it means a void return type.
ret_body_len: u32,
/// Points to the block that contains the param instructions for this function.
param_block: Index,
body_len: u32,
pub const SrcLocs = struct {
@@ -2764,6 +2808,14 @@ pub const Inst = struct {
args: Ref,
};
/// Trailing: inst: Index // for every body_len
pub const Param = struct {
/// Null-terminated string index.
name: u32,
/// The body contains the type of the parameter.
body_len: u32,
};
/// Trailing:
/// 0. type_inst: Ref, // if small 0b000X is set
/// 1. align_inst: Ref, // if small 0b00X0 is set
@@ -3108,11 +3160,14 @@ const Writer = struct {
.decl_ref,
.decl_val,
.import,
.arg,
.ret_err_value,
.ret_err_value_code,
.param_anytype,
.param_anytype_comptime,
=> try self.writeStrTok(stream, inst),
.param, .param_comptime => try self.writeParam(stream, inst),
.func => try self.writeFunc(stream, inst, false),
.func_inferred => try self.writeFunc(stream, inst, true),
@@ -3314,6 +3369,22 @@ const Writer = struct {
try self.writeSrc(stream, inst_data.src());
}
fn writeParam(self: *Writer, stream: anytype, inst: Inst.Index) !void {
const inst_data = self.code.instructions.items(.data)[inst].pl_tok;
const extra = self.code.extraData(Inst.Param, inst_data.payload_index);
const body = self.code.extra[extra.end..][0..extra.data.body_len];
try stream.print("\"{}\", ", .{
std.zig.fmtEscapes(self.code.nullTerminatedString(extra.data.name)),
});
try stream.writeAll("{\n");
self.indent += 2;
try self.writeBody(stream, body);
self.indent -= 2;
try stream.writeByteNTimes(' ', self.indent);
try stream.writeAll(") ");
try self.writeSrc(stream, inst_data.src());
}
fn writePlNodeBin(self: *Writer, stream: anytype, inst: Inst.Index) !void {
const inst_data = self.code.instructions.items(.data)[inst].pl_node;
const extra = self.code.extraData(Inst.Bin, inst_data.payload_index).data;
@@ -4277,17 +4348,21 @@ const Writer = struct {
const inst_data = self.code.instructions.items(.data)[inst].pl_node;
const src = inst_data.src();
const extra = self.code.extraData(Inst.Func, inst_data.payload_index);
const param_types = self.code.refSlice(extra.end, extra.data.param_types_len);
const body = self.code.extra[extra.end + param_types.len ..][0..extra.data.body_len];
var extra_index = extra.end;
const ret_ty_body = self.code.extra[extra_index..][0..extra.data.ret_body_len];
extra_index += ret_ty_body.len;
const body = self.code.extra[extra_index..][0..extra.data.body_len];
extra_index += body.len;
var src_locs: Zir.Inst.Func.SrcLocs = undefined;
if (body.len != 0) {
const extra_index = extra.end + param_types.len + body.len;
src_locs = self.code.extraData(Zir.Inst.Func.SrcLocs, extra_index).data;
}
return self.writeFuncCommon(
stream,
param_types,
extra.data.return_type,
ret_ty_body,
inferred_error_set,
false,
false,
@@ -4296,7 +4371,6 @@ const Writer = struct {
body,
src,
src_locs,
&.{},
);
}
@@ -4323,15 +4397,8 @@ const Writer = struct {
break :blk align_inst;
};
const comptime_bits: []const u32 = if (!small.has_comptime_bits) &.{} else blk: {
const amt = (extra.data.param_types_len + 31) / 32;
const bit_bags = self.code.extra[extra_index..][0..amt];
extra_index += amt;
break :blk bit_bags;
};
const param_types = self.code.refSlice(extra_index, extra.data.param_types_len);
extra_index += param_types.len;
const ret_ty_body = self.code.extra[extra_index..][0..extra.data.ret_body_len];
extra_index += ret_ty_body.len;
const body = self.code.extra[extra_index..][0..extra.data.body_len];
extra_index += body.len;
@@ -4342,8 +4409,7 @@ const Writer = struct {
}
return self.writeFuncCommon(
stream,
param_types,
extra.data.return_type,
ret_ty_body,
small.is_inferred_error,
small.is_var_args,
small.is_extern,
@@ -4352,7 +4418,6 @@ const Writer = struct {
body,
src,
src_locs,
comptime_bits,
);
}
@@ -4426,8 +4491,7 @@ const Writer = struct {
fn writeFuncCommon(
self: *Writer,
stream: anytype,
param_types: []const Inst.Ref,
ret_ty: Inst.Ref,
ret_ty_body: []const Inst.Index,
inferred_error_set: bool,
var_args: bool,
is_extern: bool,
@@ -4436,20 +4500,18 @@ const Writer = struct {
body: []const Inst.Index,
src: LazySrcLoc,
src_locs: Zir.Inst.Func.SrcLocs,
comptime_bits: []const u32,
) !void {
try stream.writeAll("[");
for (param_types) |param_type, i| {
if (i != 0) try stream.writeAll(", ");
if (comptime_bits.len != 0) {
const bag = comptime_bits[i / 32];
const is_comptime = @truncate(u1, bag >> @intCast(u5, i % 32)) != 0;
try self.writeFlag(stream, "comptime ", is_comptime);
}
try self.writeInstRef(stream, param_type);
if (ret_ty_body.len == 0) {
try stream.writeAll("ret_ty=void");
} else {
try stream.writeAll("ret_ty={\n");
self.indent += 2;
try self.writeBody(stream, ret_ty_body);
self.indent -= 2;
try stream.writeByteNTimes(' ', self.indent);
try stream.writeAll("}");
}
try stream.writeAll("], ");
try self.writeInstRef(stream, ret_ty);
try self.writeOptionalInstRef(stream, ", cc=", cc);
try self.writeOptionalInstRef(stream, ", align=", align_inst);
try self.writeFlag(stream, ", vargs", var_args);
@@ -4457,9 +4519,9 @@ const Writer = struct {
try self.writeFlag(stream, ", inferror", inferred_error_set);
if (body.len == 0) {
try stream.writeAll(", {}) ");
try stream.writeAll(", body={}) ");
} else {
try stream.writeAll(", {\n");
try stream.writeAll(", body={\n");
self.indent += 2;
try self.writeBody(stream, body);
self.indent -= 2;
@@ -4714,8 +4776,7 @@ fn findDeclsInner(
const inst_data = datas[inst].pl_node;
const extra = zir.extraData(Inst.Func, inst_data.payload_index);
const param_types_len = extra.data.param_types_len;
const body = zir.extra[extra.end + param_types_len ..][0..extra.data.body_len];
const body = zir.extra[extra.end..][0..extra.data.body_len];
return zir.findDeclsBody(list, body);
},
.extended => {
@@ -4730,7 +4791,6 @@ fn findDeclsInner(
extra_index += @boolToInt(small.has_lib_name);
extra_index += @boolToInt(small.has_cc);
extra_index += @boolToInt(small.has_align);
extra_index += extra.data.param_types_len;
const body = zir.extra[extra_index..][0..extra.data.body_len];
return zir.findDeclsBody(list, body);
},
@@ -4885,10 +4945,83 @@ fn findDeclsSwitchMulti(
fn findDeclsBody(
zir: Zir,
list: *std.ArrayList(Zir.Inst.Index),
body: []const Zir.Inst.Index,
list: *std.ArrayList(Inst.Index),
body: []const Inst.Index,
) Allocator.Error!void {
for (body) |member| {
try zir.findDeclsInner(list, member);
}
}
pub const FnInfo = struct {
param_body: []const Inst.Index,
ret_ty_body: []const Inst.Index,
body: []const Inst.Index,
total_params_len: u32,
};
pub fn getFnInfo(zir: Zir, fn_inst: Inst.Index) FnInfo {
const tags = zir.instructions.items(.tag);
const datas = zir.instructions.items(.data);
const info: struct {
param_block: Inst.Index,
body: []const Inst.Index,
ret_ty_body: []const Inst.Index,
} = switch (tags[fn_inst]) {
.func, .func_inferred => blk: {
const inst_data = datas[fn_inst].pl_node;
const extra = zir.extraData(Inst.Func, inst_data.payload_index);
var extra_index: usize = extra.end;
const ret_ty_body = zir.extra[extra_index..][0..extra.data.ret_body_len];
extra_index += ret_ty_body.len;
const body = zir.extra[extra_index..][0..extra.data.body_len];
extra_index += body.len;
break :blk .{
.param_block = extra.data.param_block,
.ret_ty_body = ret_ty_body,
.body = body,
};
},
.extended => blk: {
const extended = datas[fn_inst].extended;
assert(extended.opcode == .func);
const extra = zir.extraData(Inst.ExtendedFunc, extended.operand);
const small = @bitCast(Inst.ExtendedFunc.Small, extended.small);
var extra_index: usize = extra.end;
extra_index += @boolToInt(small.has_lib_name);
extra_index += @boolToInt(small.has_cc);
extra_index += @boolToInt(small.has_align);
const ret_ty_body = zir.extra[extra_index..][0..extra.data.ret_body_len];
extra_index += ret_ty_body.len;
const body = zir.extra[extra_index..][0..extra.data.body_len];
extra_index += body.len;
break :blk .{
.param_block = extra.data.param_block,
.ret_ty_body = ret_ty_body,
.body = body,
};
},
else => unreachable,
};
assert(tags[info.param_block] == .block or tags[info.param_block] == .block_inline);
const param_block = zir.extraData(Inst.Block, datas[info.param_block].pl_node.payload_index);
const param_body = zir.extra[param_block.end..][0..param_block.data.body_len];
var total_params_len: u32 = 0;
for (param_body) |inst| {
switch (tags[inst]) {
.param, .param_comptime, .param_anytype, .param_anytype_comptime => {
total_params_len += 1;
},
else => continue,
}
}
return .{
.param_body = param_body,
.ret_ty_body = info.ret_ty_body,
.body = info.body,
.total_params_len = total_params_len,
};
}

View File

@@ -575,6 +575,14 @@ pub const DeclGen = struct {
const info = t.intInfo(self.module.getTarget());
return self.context.intType(info.bits);
},
.Float => switch (t.floatBits(self.module.getTarget())) {
16 => return self.context.halfType(),
32 => return self.context.floatType(),
64 => return self.context.doubleType(),
80 => return self.context.x86FP80Type(),
128 => return self.context.fp128Type(),
else => unreachable,
},
.Bool => return self.context.intType(1),
.Pointer => {
if (t.isSlice()) {
@@ -661,7 +669,6 @@ pub const DeclGen = struct {
.BoundFn => @panic("TODO remove BoundFn from the language"),
.Float,
.Enum,
.Union,
.Opaque,
@@ -699,13 +706,40 @@ pub const DeclGen = struct {
}
return llvm_int;
},
.Float => {
if (tv.ty.floatBits(self.module.getTarget()) <= 64) {
const llvm_ty = try self.llvmType(tv.ty);
return llvm_ty.constReal(tv.val.toFloat(f64));
}
return self.todo("bitcast to f128 from an integer", .{});
},
.Pointer => switch (tv.val.tag()) {
.decl_ref => {
const decl = tv.val.castTag(.decl_ref).?.data;
decl.alive = true;
const val = try self.resolveGlobalDecl(decl);
const llvm_type = try self.llvmType(tv.ty);
return val.constBitCast(llvm_type);
if (tv.ty.isSlice()) {
var buf: Type.Payload.ElemType = undefined;
const ptr_ty = tv.ty.slicePtrFieldType(&buf);
var slice_len: Value.Payload.U64 = .{
.base = .{ .tag = .int_u64 },
.data = tv.val.sliceLen(),
};
const fields: [2]*const llvm.Value = .{
try self.genTypedValue(.{
.ty = ptr_ty,
.val = tv.val,
}),
try self.genTypedValue(.{
.ty = Type.initTag(.usize),
.val = Value.initPayload(&slice_len.base),
}),
};
return self.context.constStruct(&fields, fields.len, .False);
} else {
const decl = tv.val.castTag(.decl_ref).?.data;
decl.alive = true;
const val = try self.resolveGlobalDecl(decl);
const llvm_type = try self.llvmType(tv.ty);
return val.constBitCast(llvm_type);
}
},
.variable => {
const decl = tv.val.castTag(.variable).?.data.owner_decl;
@@ -839,6 +873,10 @@ pub const DeclGen = struct {
.False,
);
},
.ComptimeInt => unreachable,
.ComptimeFloat => unreachable,
.Type => unreachable,
.EnumLiteral => unreachable,
else => return self.todo("implement const of type '{}'", .{tv.ty}),
}
}

View File

@@ -31,6 +31,21 @@ pub const Context = opaque {
pub const intType = LLVMIntTypeInContext;
extern fn LLVMIntTypeInContext(C: *const Context, NumBits: c_uint) *const Type;
pub const halfType = LLVMHalfTypeInContext;
extern fn LLVMHalfTypeInContext(C: *const Context) *const Type;
pub const floatType = LLVMFloatTypeInContext;
extern fn LLVMFloatTypeInContext(C: *const Context) *const Type;
pub const doubleType = LLVMDoubleTypeInContext;
extern fn LLVMDoubleTypeInContext(C: *const Context) *const Type;
pub const x86FP80Type = LLVMX86FP80TypeInContext;
extern fn LLVMX86FP80TypeInContext(C: *const Context) *const Type;
pub const fp128Type = LLVMFP128TypeInContext;
extern fn LLVMFP128TypeInContext(C: *const Context) *const Type;
pub const voidType = LLVMVoidTypeInContext;
extern fn LLVMVoidTypeInContext(C: *const Context) *const Type;
@@ -127,6 +142,9 @@ pub const Type = opaque {
pub const constInt = LLVMConstInt;
extern fn LLVMConstInt(IntTy: *const Type, N: c_ulonglong, SignExtend: Bool) *const Value;
pub const constReal = LLVMConstReal;
extern fn LLVMConstReal(RealTy: *const Type, N: f64) *const Value;
pub const constArray = LLVMConstArray;
extern fn LLVMConstArray(ElementTy: *const Type, ConstantVals: [*]*const Value, Length: c_uint) *const Value;

View File

@@ -222,7 +222,7 @@ const Writer = struct {
const extra = w.air.extraData(Air.Block, ty_pl.payload);
const body = w.air.extra[extra.end..][0..extra.data.body_len];
try s.writeAll("{\n");
try s.print("{}, {{\n", .{w.air.getRefType(ty_pl.ty)});
const old_indent = w.indent;
w.indent += 2;
try w.writeBody(s, body);

View File

@@ -21,8 +21,14 @@ pub const Type = extern union {
tag_if_small_enough: usize,
ptr_otherwise: *Payload,
pub fn zigTypeTag(self: Type) std.builtin.TypeId {
switch (self.tag()) {
pub fn zigTypeTag(ty: Type) std.builtin.TypeId {
return ty.zigTypeTagOrPoison() catch unreachable;
}
pub fn zigTypeTagOrPoison(ty: Type) error{GenericPoison}!std.builtin.TypeId {
switch (ty.tag()) {
.generic_poison => return error.GenericPoison,
.u1,
.u8,
.i8,
@@ -548,8 +554,13 @@ pub const Type = extern union {
pub fn hash(self: Type) u64 {
var hasher = std.hash.Wyhash.init(0);
self.hashWithHasher(&hasher);
return hasher.final();
}
pub fn hashWithHasher(self: Type, hasher: *std.hash.Wyhash) void {
const zig_type_tag = self.zigTypeTag();
std.hash.autoHash(&hasher, zig_type_tag);
std.hash.autoHash(hasher, zig_type_tag);
switch (zig_type_tag) {
.Type,
.Void,
@@ -567,34 +578,34 @@ pub const Type = extern union {
.Int => {
// Detect that e.g. u64 != usize, even if the bits match on a particular target.
if (self.isNamedInt()) {
std.hash.autoHash(&hasher, self.tag());
std.hash.autoHash(hasher, self.tag());
} else {
// Remaining cases are arbitrary sized integers.
// The target will not be branched upon, because we handled target-dependent cases above.
const info = self.intInfo(@as(Target, undefined));
std.hash.autoHash(&hasher, info.signedness);
std.hash.autoHash(&hasher, info.bits);
std.hash.autoHash(hasher, info.signedness);
std.hash.autoHash(hasher, info.bits);
}
},
.Array, .Vector => {
std.hash.autoHash(&hasher, self.arrayLen());
std.hash.autoHash(&hasher, self.elemType().hash());
std.hash.autoHash(hasher, self.arrayLen());
std.hash.autoHash(hasher, self.elemType().hash());
// TODO hash array sentinel
},
.Fn => {
std.hash.autoHash(&hasher, self.fnReturnType().hash());
std.hash.autoHash(&hasher, self.fnCallingConvention());
std.hash.autoHash(hasher, self.fnReturnType().hash());
std.hash.autoHash(hasher, self.fnCallingConvention());
const params_len = self.fnParamLen();
std.hash.autoHash(&hasher, params_len);
std.hash.autoHash(hasher, params_len);
var i: usize = 0;
while (i < params_len) : (i += 1) {
std.hash.autoHash(&hasher, self.fnParamType(i).hash());
std.hash.autoHash(hasher, self.fnParamType(i).hash());
}
std.hash.autoHash(&hasher, self.fnIsVarArgs());
std.hash.autoHash(hasher, self.fnIsVarArgs());
},
.Optional => {
var buf: Payload.ElemType = undefined;
std.hash.autoHash(&hasher, self.optionalChild(&buf).hash());
std.hash.autoHash(hasher, self.optionalChild(&buf).hash());
},
.Float,
.Struct,
@@ -611,7 +622,6 @@ pub const Type = extern union {
// TODO implement more type hashing
},
}
return hasher.final();
}
pub const HashContext64 = struct {
@@ -699,6 +709,7 @@ pub const Type = extern union {
.export_options,
.extern_options,
.@"anyframe",
.generic_poison,
=> unreachable,
.array_u8,
@@ -759,12 +770,15 @@ pub const Type = extern union {
for (payload.param_types) |param_type, i| {
param_types[i] = try param_type.copy(allocator);
}
const other_comptime_params = payload.comptime_params[0..payload.param_types.len];
const comptime_params = try allocator.dupe(bool, other_comptime_params);
return Tag.function.create(allocator, .{
.return_type = try payload.return_type.copy(allocator),
.param_types = param_types,
.cc = payload.cc,
.is_var_args = payload.is_var_args,
.is_generic = payload.is_generic,
.comptime_params = comptime_params.ptr,
});
},
.pointer => {
@@ -1080,11 +1094,118 @@ pub const Type = extern union {
},
.inferred_alloc_const => return writer.writeAll("(inferred_alloc_const)"),
.inferred_alloc_mut => return writer.writeAll("(inferred_alloc_mut)"),
.generic_poison => return writer.writeAll("(generic poison)"),
}
unreachable;
}
}
/// Anything that reports hasCodeGenBits() false returns false here as well.
/// `generic_poison` will return false.
pub fn requiresComptime(ty: Type) bool {
return switch (ty.tag()) {
.u1,
.u8,
.i8,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.u128,
.i128,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
.c_longdouble,
.f16,
.f32,
.f64,
.f128,
.c_void,
.bool,
.void,
.anyerror,
.noreturn,
.@"anyframe",
.@"null",
.@"undefined",
.atomic_ordering,
.atomic_rmw_op,
.calling_convention,
.float_mode,
.reduce_op,
.call_options,
.export_options,
.extern_options,
.manyptr_u8,
.manyptr_const_u8,
.fn_noreturn_no_args,
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.single_const_pointer_to_comptime_int,
.const_slice_u8,
.anyerror_void_error_union,
.empty_struct_literal,
.function,
.empty_struct,
.error_set,
.error_set_single,
.error_set_inferred,
.@"opaque",
.generic_poison,
=> false,
.type,
.comptime_int,
.comptime_float,
.enum_literal,
=> true,
.var_args_param => unreachable,
.inferred_alloc_mut => unreachable,
.inferred_alloc_const => unreachable,
.array_u8,
.array_u8_sentinel_0,
.array,
.array_sentinel,
.vector,
.pointer,
.single_const_pointer,
.single_mut_pointer,
.many_const_pointer,
.many_mut_pointer,
.c_const_pointer,
.c_mut_pointer,
.const_slice,
.mut_slice,
.int_signed,
.int_unsigned,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
.error_union,
.anyframe_T,
.@"struct",
.@"union",
.union_tagged,
.enum_simple,
.enum_full,
.enum_nonexhaustive,
=> false, // TODO some of these should be `true` depending on their child types
};
}
pub fn toValue(self: Type, allocator: *Allocator) Allocator.Error!Value {
switch (self.tag()) {
.u1 => return Value.initTag(.u1_type),
@@ -1179,7 +1300,6 @@ pub const Type = extern union {
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.function,
.single_const_pointer_to_comptime_int,
.const_slice_u8,
.array_u8_sentinel_0,
@@ -1204,6 +1324,8 @@ pub const Type = extern union {
.anyframe_T,
=> true,
.function => !self.castTag(.function).?.data.is_generic,
.@"struct" => {
// TODO introduce lazy value mechanism
const struct_obj = self.castTag(.@"struct").?.data;
@@ -1283,6 +1405,7 @@ pub const Type = extern union {
.inferred_alloc_const => unreachable,
.inferred_alloc_mut => unreachable,
.var_args_param => unreachable,
.generic_poison => unreachable,
};
}
@@ -1505,6 +1628,8 @@ pub const Type = extern union {
.@"opaque",
.var_args_param,
=> unreachable,
.generic_poison => unreachable,
};
}
@@ -1532,6 +1657,7 @@ pub const Type = extern union {
.inferred_alloc_mut => unreachable,
.@"opaque" => unreachable,
.var_args_param => unreachable,
.generic_poison => unreachable,
.@"struct" => {
const s = self.castTag(.@"struct").?.data;
@@ -1698,6 +1824,7 @@ pub const Type = extern union {
.inferred_alloc_mut => unreachable,
.@"opaque" => unreachable,
.var_args_param => unreachable,
.generic_poison => unreachable,
.@"struct" => {
@panic("TODO bitSize struct");
@@ -2408,14 +2535,41 @@ pub const Type = extern union {
};
}
/// Asserts the type is a function.
pub fn fnIsGeneric(self: Type) bool {
return switch (self.tag()) {
.fn_noreturn_no_args => false,
.fn_void_no_args => false,
.fn_naked_noreturn_no_args => false,
.fn_ccc_void_no_args => false,
.function => self.castTag(.function).?.data.is_generic,
pub fn fnInfo(ty: Type) Payload.Function.Data {
return switch (ty.tag()) {
.fn_noreturn_no_args => .{
.param_types = &.{},
.comptime_params = undefined,
.return_type = initTag(.noreturn),
.cc = .Unspecified,
.is_var_args = false,
.is_generic = false,
},
.fn_void_no_args => .{
.param_types = &.{},
.comptime_params = undefined,
.return_type = initTag(.void),
.cc = .Unspecified,
.is_var_args = false,
.is_generic = false,
},
.fn_naked_noreturn_no_args => .{
.param_types = &.{},
.comptime_params = undefined,
.return_type = initTag(.noreturn),
.cc = .Naked,
.is_var_args = false,
.is_generic = false,
},
.fn_ccc_void_no_args => .{
.param_types = &.{},
.comptime_params = undefined,
.return_type = initTag(.void),
.cc = .C,
.is_var_args = false,
.is_generic = false,
},
.function => ty.castTag(.function).?.data,
else => unreachable,
};
@@ -2595,6 +2749,7 @@ pub const Type = extern union {
.inferred_alloc_const => unreachable,
.inferred_alloc_mut => unreachable,
.generic_poison => unreachable,
};
}
@@ -3008,6 +3163,7 @@ pub const Type = extern union {
single_const_pointer_to_comptime_int,
const_slice_u8,
anyerror_void_error_union,
generic_poison,
/// This is a special type for variadic parameters of a function call.
/// Casts to it will validate that the type can be passed to a c calling convetion function.
var_args_param,
@@ -3105,6 +3261,7 @@ pub const Type = extern union {
.single_const_pointer_to_comptime_int,
.anyerror_void_error_union,
.const_slice_u8,
.generic_poison,
.inferred_alloc_const,
.inferred_alloc_mut,
.var_args_param,
@@ -3223,13 +3380,23 @@ pub const Type = extern union {
pub const base_tag = Tag.function;
base: Payload = Payload{ .tag = base_tag },
data: struct {
data: Data,
// TODO look into optimizing this memory to take fewer bytes
pub const Data = struct {
param_types: []Type,
comptime_params: [*]bool,
return_type: Type,
cc: std.builtin.CallingConvention,
is_var_args: bool,
is_generic: bool,
},
pub fn paramIsComptime(self: @This(), i: usize) bool {
if (!self.is_generic) return false;
assert(i < self.param_types.len);
return self.comptime_params[i];
}
};
};
pub const ErrorSet = struct {

View File

@@ -76,6 +76,8 @@ pub const Value = extern union {
fn_ccc_void_no_args_type,
single_const_pointer_to_comptime_int_type,
const_slice_u8_type,
anyerror_void_error_union_type,
generic_poison_type,
undef,
zero,
@@ -85,6 +87,7 @@ pub const Value = extern union {
null_value,
bool_true,
bool_false,
generic_poison,
abi_align_default,
empty_struct_value,
@@ -188,6 +191,8 @@ pub const Value = extern union {
.single_const_pointer_to_comptime_int_type,
.anyframe_type,
.const_slice_u8_type,
.anyerror_void_error_union_type,
.generic_poison_type,
.enum_literal_type,
.undef,
.zero,
@@ -210,6 +215,7 @@ pub const Value = extern union {
.call_options_type,
.export_options_type,
.extern_options_type,
.generic_poison,
=> @compileError("Value Tag " ++ @tagName(t) ++ " has no payload"),
.int_big_positive,
@@ -366,6 +372,8 @@ pub const Value = extern union {
.single_const_pointer_to_comptime_int_type,
.anyframe_type,
.const_slice_u8_type,
.anyerror_void_error_union_type,
.generic_poison_type,
.enum_literal_type,
.undef,
.zero,
@@ -388,6 +396,7 @@ pub const Value = extern union {
.call_options_type,
.export_options_type,
.extern_options_type,
.generic_poison,
=> unreachable,
.ty => {
@@ -556,6 +565,9 @@ pub const Value = extern union {
.single_const_pointer_to_comptime_int_type => return out_stream.writeAll("*const comptime_int"),
.anyframe_type => return out_stream.writeAll("anyframe"),
.const_slice_u8_type => return out_stream.writeAll("[]const u8"),
.anyerror_void_error_union_type => return out_stream.writeAll("anyerror!void"),
.generic_poison_type => return out_stream.writeAll("(generic poison type)"),
.generic_poison => return out_stream.writeAll("(generic poison)"),
.enum_literal_type => return out_stream.writeAll("@Type(.EnumLiteral)"),
.manyptr_u8_type => return out_stream.writeAll("[*]u8"),
.manyptr_const_u8_type => return out_stream.writeAll("[*]const u8"),
@@ -709,6 +721,8 @@ pub const Value = extern union {
.single_const_pointer_to_comptime_int_type => Type.initTag(.single_const_pointer_to_comptime_int),
.anyframe_type => Type.initTag(.@"anyframe"),
.const_slice_u8_type => Type.initTag(.const_slice_u8),
.anyerror_void_error_union_type => Type.initTag(.anyerror_void_error_union),
.generic_poison_type => Type.initTag(.generic_poison),
.enum_literal_type => Type.initTag(.enum_literal),
.manyptr_u8_type => Type.initTag(.manyptr_u8),
.manyptr_const_u8_type => Type.initTag(.manyptr_const_u8),
@@ -732,46 +746,7 @@ pub const Value = extern union {
return Type.initPayload(&buffer.base);
},
.undef,
.zero,
.one,
.void_value,
.unreachable_value,
.empty_array,
.bool_true,
.bool_false,
.null_value,
.int_u64,
.int_i64,
.int_big_positive,
.int_big_negative,
.function,
.extern_fn,
.variable,
.decl_ref,
.decl_ref_mut,
.elem_ptr,
.field_ptr,
.bytes,
.repeated,
.array,
.slice,
.float_16,
.float_32,
.float_64,
.float_128,
.enum_literal,
.enum_field_index,
.@"error",
.error_union,
.empty_struct_value,
.@"struct",
.@"union",
.inferred_alloc,
.inferred_alloc_comptime,
.abi_align_default,
.eu_payload_ptr,
=> unreachable,
else => unreachable,
};
}
@@ -1142,12 +1117,82 @@ pub const Value = extern union {
return order(a, b).compare(.eq);
}
pub fn hash(val: Value, ty: Type, hasher: *std.hash.Wyhash) void {
switch (ty.zigTypeTag()) {
.BoundFn => unreachable, // TODO remove this from the language
.Void,
.NoReturn,
.Undefined,
.Null,
=> {},
.Type => {
var buf: ToTypeBuffer = undefined;
return val.toType(&buf).hashWithHasher(hasher);
},
.Bool => {
std.hash.autoHash(hasher, val.toBool());
},
.Int, .ComptimeInt => {
var space: BigIntSpace = undefined;
const big = val.toBigInt(&space);
std.hash.autoHash(hasher, big.positive);
for (big.limbs) |limb| {
std.hash.autoHash(hasher, limb);
}
},
.Float, .ComptimeFloat => {
@panic("TODO implement hashing float values");
},
.Pointer => {
@panic("TODO implement hashing pointer values");
},
.Array, .Vector => {
@panic("TODO implement hashing array/vector values");
},
.Struct => {
@panic("TODO implement hashing struct values");
},
.Optional => {
@panic("TODO implement hashing optional values");
},
.ErrorUnion => {
@panic("TODO implement hashing error union values");
},
.ErrorSet => {
@panic("TODO implement hashing error set values");
},
.Enum => {
@panic("TODO implement hashing enum values");
},
.Union => {
@panic("TODO implement hashing union values");
},
.Fn => {
@panic("TODO implement hashing function values");
},
.Opaque => {
@panic("TODO implement hashing opaque values");
},
.Frame => {
@panic("TODO implement hashing frame values");
},
.AnyFrame => {
@panic("TODO implement hashing anyframe values");
},
.EnumLiteral => {
@panic("TODO implement hashing enum literal values");
},
}
}
pub const ArrayHashContext = struct {
ty: Type,
pub fn hash(self: @This(), v: Value) u32 {
pub fn hash(self: @This(), val: Value) u32 {
const other_context: HashContext = .{ .ty = self.ty };
return @truncate(u32, other_context.hash(v));
return @truncate(u32, other_context.hash(val));
}
pub fn eql(self: @This(), a: Value, b: Value) bool {
return a.eql(b, self.ty);
@@ -1157,76 +1202,9 @@ pub const Value = extern union {
pub const HashContext = struct {
ty: Type,
pub fn hash(self: @This(), v: Value) u64 {
pub fn hash(self: @This(), val: Value) u64 {
var hasher = std.hash.Wyhash.init(0);
switch (self.ty.zigTypeTag()) {
.BoundFn => unreachable, // TODO remove this from the language
.Void,
.NoReturn,
.Undefined,
.Null,
=> {},
.Type => {
var buf: ToTypeBuffer = undefined;
return v.toType(&buf).hash();
},
.Bool => {
std.hash.autoHash(&hasher, v.toBool());
},
.Int, .ComptimeInt => {
var space: BigIntSpace = undefined;
const big = v.toBigInt(&space);
std.hash.autoHash(&hasher, big.positive);
for (big.limbs) |limb| {
std.hash.autoHash(&hasher, limb);
}
},
.Float, .ComptimeFloat => {
@panic("TODO implement hashing float values");
},
.Pointer => {
@panic("TODO implement hashing pointer values");
},
.Array, .Vector => {
@panic("TODO implement hashing array/vector values");
},
.Struct => {
@panic("TODO implement hashing struct values");
},
.Optional => {
@panic("TODO implement hashing optional values");
},
.ErrorUnion => {
@panic("TODO implement hashing error union values");
},
.ErrorSet => {
@panic("TODO implement hashing error set values");
},
.Enum => {
@panic("TODO implement hashing enum values");
},
.Union => {
@panic("TODO implement hashing union values");
},
.Fn => {
@panic("TODO implement hashing function values");
},
.Opaque => {
@panic("TODO implement hashing opaque values");
},
.Frame => {
@panic("TODO implement hashing frame values");
},
.AnyFrame => {
@panic("TODO implement hashing anyframe values");
},
.EnumLiteral => {
@panic("TODO implement hashing enum literal values");
},
}
val.hash(self.ty, &hasher);
return hasher.final();
}

View File

@@ -4,6 +4,7 @@ test {
// Tests that pass for both.
_ = @import("behavior/bool.zig");
_ = @import("behavior/basic.zig");
_ = @import("behavior/generics.zig");
if (!builtin.zig_is_stage2) {
// Tests that only pass for stage1.
@@ -94,7 +95,7 @@ test {
_ = @import("behavior/fn_in_struct_in_comptime.zig");
_ = @import("behavior/fn_delegation.zig");
_ = @import("behavior/for.zig");
_ = @import("behavior/generics.zig");
_ = @import("behavior/generics_stage1.zig");
_ = @import("behavior/hasdecl.zig");
_ = @import("behavior/hasfield.zig");
_ = @import("behavior/if.zig");

View File

@@ -1,4 +1,5 @@
const std = @import("std");
const mem = std.mem;
const expect = std.testing.expect;
// normal comment
@@ -83,3 +84,81 @@ test "unicode escape in character literal" {
test "unicode character in character literal" {
try expect('💩' == 128169);
}
fn first4KeysOfHomeRow() []const u8 {
return "aoeu";
}
test "return string from function" {
try expect(mem.eql(u8, first4KeysOfHomeRow(), "aoeu"));
}
test "hex escape" {
try expect(mem.eql(u8, "\x68\x65\x6c\x6c\x6f", "hello"));
}
test "multiline string" {
const s1 =
\\one
\\two)
\\three
;
const s2 = "one\ntwo)\nthree";
try expect(mem.eql(u8, s1, s2));
}
test "multiline string comments at start" {
const s1 =
//\\one
\\two)
\\three
;
const s2 = "two)\nthree";
try expect(mem.eql(u8, s1, s2));
}
test "multiline string comments at end" {
const s1 =
\\one
\\two)
//\\three
;
const s2 = "one\ntwo)";
try expect(mem.eql(u8, s1, s2));
}
test "multiline string comments in middle" {
const s1 =
\\one
//\\two)
\\three
;
const s2 = "one\nthree";
try expect(mem.eql(u8, s1, s2));
}
test "multiline string comments at multiple places" {
const s1 =
\\one
//\\two
\\three
//\\four
\\five
;
const s2 = "one\nthree\nfive";
try expect(mem.eql(u8, s1, s2));
}
test "call result of if else expression" {
try expect(mem.eql(u8, f2(true), "a"));
try expect(mem.eql(u8, f2(false), "b"));
}
fn f2(x: bool) []const u8 {
return (if (x) fA else fB)();
}
fn fA() []const u8 {
return "a";
}
fn fB() []const u8 {
return "b";
}

View File

@@ -1,16 +1,43 @@
const std = @import("std");
const builtin = @import("builtin");
const testing = std.testing;
const expect = testing.expect;
const expectEqual = testing.expectEqual;
test "one param, explicit comptime" {
var x: usize = 0;
x += checkSize(i32);
x += checkSize(bool);
x += checkSize(bool);
try expect(x == 6);
}
fn checkSize(comptime T: type) usize {
return @sizeOf(T);
}
test "simple generic fn" {
try expect(max(i32, 3, -1) == 3);
try expect(max(f32, 0.123, 0.456) == 0.456);
try expect(max(u8, 1, 100) == 100);
if (!builtin.zig_is_stage2) {
// TODO: stage2 is incorrectly emitting the following:
// error: cast of value 1.23e-01 to type 'f32' loses information
try expect(max(f32, 0.123, 0.456) == 0.456);
}
try expect(add(2, 3) == 5);
}
fn max(comptime T: type, a: T, b: T) T {
return if (a > b) a else b;
if (!builtin.zig_is_stage2) {
// TODO: stage2 is incorrectly emitting AIR that allocates a result
// value, stores to it, but then returns void instead of the result.
return if (a > b) a else b;
}
if (a > b) {
return a;
} else {
return b;
}
}
fn add(comptime a: i32, b: i32) i32 {
@@ -37,133 +64,9 @@ fn sameButWithFloats(a: f64, b: f64) f64 {
test "fn with comptime args" {
try expect(gimmeTheBigOne(1234, 5678) == 5678);
try expect(shouldCallSameInstance(34, 12) == 34);
try expect(sameButWithFloats(0.43, 0.49) == 0.49);
}
test "var params" {
try expect(max_i32(12, 34) == 34);
try expect(max_f64(1.2, 3.4) == 3.4);
}
test {
comptime try expect(max_i32(12, 34) == 34);
comptime try expect(max_f64(1.2, 3.4) == 3.4);
}
fn max_var(a: anytype, b: anytype) @TypeOf(a + b) {
return if (a > b) a else b;
}
fn max_i32(a: i32, b: i32) i32 {
return max_var(a, b);
}
fn max_f64(a: f64, b: f64) f64 {
return max_var(a, b);
}
pub fn List(comptime T: type) type {
return SmallList(T, 8);
}
pub fn SmallList(comptime T: type, comptime STATIC_SIZE: usize) type {
return struct {
items: []T,
length: usize,
prealloc_items: [STATIC_SIZE]T,
};
}
test "function with return type type" {
var list: List(i32) = undefined;
var list2: List(i32) = undefined;
list.length = 10;
list2.length = 10;
try expect(list.prealloc_items.len == 8);
try expect(list2.prealloc_items.len == 8);
}
test "generic struct" {
var a1 = GenNode(i32){
.value = 13,
.next = null,
};
var b1 = GenNode(bool){
.value = true,
.next = null,
};
try expect(a1.value == 13);
try expect(a1.value == a1.getVal());
try expect(b1.getVal());
}
fn GenNode(comptime T: type) type {
return struct {
value: T,
next: ?*GenNode(T),
fn getVal(n: *const GenNode(T)) T {
return n.value;
}
};
}
test "const decls in struct" {
try expect(GenericDataThing(3).count_plus_one == 4);
}
fn GenericDataThing(comptime count: isize) type {
return struct {
const count_plus_one = count + 1;
};
}
test "use generic param in generic param" {
try expect(aGenericFn(i32, 3, 4) == 7);
}
fn aGenericFn(comptime T: type, comptime a: T, b: T) T {
return a + b;
}
test "generic fn with implicit cast" {
try expect(getFirstByte(u8, &[_]u8{13}) == 13);
try expect(getFirstByte(u16, &[_]u16{
0,
13,
}) == 0);
}
fn getByte(ptr: ?*const u8) u8 {
return ptr.?.*;
}
fn getFirstByte(comptime T: type, mem: []const T) u8 {
return getByte(@ptrCast(*const u8, &mem[0]));
}
const foos = [_]fn (anytype) bool{
foo1,
foo2,
};
fn foo1(arg: anytype) bool {
return arg;
}
fn foo2(arg: anytype) bool {
return !arg;
}
test "array of generic fns" {
try expect(foos[0](true));
try expect(!foos[1](true));
}
test "generic fn keeps non-generic parameter types" {
const A = 128;
const S = struct {
fn f(comptime T: type, s: []T) !void {
try expect(A != @typeInfo(@TypeOf(s)).Pointer.alignment);
}
};
// The compiler monomorphizes `S.f` for `T=u8` on its first use, check that
// `x` type not affect `s` parameter type.
var x: [16]u8 align(A) = undefined;
try S.f(u8, &x);
if (!builtin.zig_is_stage2) {
// TODO: stage2 llvm backend needs to use fcmp instead of icmp
// probably AIR should just have different instructions for floats.
try expect(sameButWithFloats(0.43, 0.49) == 0.49);
}
}

View File

@@ -0,0 +1,132 @@
const std = @import("std");
const testing = std.testing;
const expect = testing.expect;
const expectEqual = testing.expectEqual;
test "anytype params" {
try expect(max_i32(12, 34) == 34);
try expect(max_f64(1.2, 3.4) == 3.4);
}
test {
comptime try expect(max_i32(12, 34) == 34);
comptime try expect(max_f64(1.2, 3.4) == 3.4);
}
fn max_anytype(a: anytype, b: anytype) @TypeOf(a + b) {
return if (a > b) a else b;
}
fn max_i32(a: i32, b: i32) i32 {
return max_anytype(a, b);
}
fn max_f64(a: f64, b: f64) f64 {
return max_anytype(a, b);
}
pub fn List(comptime T: type) type {
return SmallList(T, 8);
}
pub fn SmallList(comptime T: type, comptime STATIC_SIZE: usize) type {
return struct {
items: []T,
length: usize,
prealloc_items: [STATIC_SIZE]T,
};
}
test "function with return type type" {
var list: List(i32) = undefined;
var list2: List(i32) = undefined;
list.length = 10;
list2.length = 10;
try expect(list.prealloc_items.len == 8);
try expect(list2.prealloc_items.len == 8);
}
test "generic struct" {
var a1 = GenNode(i32){
.value = 13,
.next = null,
};
var b1 = GenNode(bool){
.value = true,
.next = null,
};
try expect(a1.value == 13);
try expect(a1.value == a1.getVal());
try expect(b1.getVal());
}
fn GenNode(comptime T: type) type {
return struct {
value: T,
next: ?*GenNode(T),
fn getVal(n: *const GenNode(T)) T {
return n.value;
}
};
}
test "const decls in struct" {
try expect(GenericDataThing(3).count_plus_one == 4);
}
fn GenericDataThing(comptime count: isize) type {
return struct {
const count_plus_one = count + 1;
};
}
test "use generic param in generic param" {
try expect(aGenericFn(i32, 3, 4) == 7);
}
fn aGenericFn(comptime T: type, comptime a: T, b: T) T {
return a + b;
}
test "generic fn with implicit cast" {
try expect(getFirstByte(u8, &[_]u8{13}) == 13);
try expect(getFirstByte(u16, &[_]u16{
0,
13,
}) == 0);
}
fn getByte(ptr: ?*const u8) u8 {
return ptr.?.*;
}
fn getFirstByte(comptime T: type, mem: []const T) u8 {
return getByte(@ptrCast(*const u8, &mem[0]));
}
const foos = [_]fn (anytype) bool{
foo1,
foo2,
};
fn foo1(arg: anytype) bool {
return arg;
}
fn foo2(arg: anytype) bool {
return !arg;
}
test "array of generic fns" {
try expect(foos[0](true));
try expect(!foos[1](true));
}
test "generic fn keeps non-generic parameter types" {
const A = 128;
const S = struct {
fn f(comptime T: type, s: []T) !void {
try expect(A != @typeInfo(@TypeOf(s)).Pointer.alignment);
}
};
// The compiler monomorphizes `S.f` for `T=u8` on its first use, check that
// `x` type not affect `s` parameter type.
var x: [16]u8 align(A) = undefined;
try S.f(u8, &x);
}

View File

@@ -5,14 +5,6 @@ const expectEqualStrings = std.testing.expectEqualStrings;
const mem = std.mem;
const builtin = @import("builtin");
fn first4KeysOfHomeRow() []const u8 {
return "aoeu";
}
test "return string from function" {
try expect(mem.eql(u8, first4KeysOfHomeRow(), "aoeu"));
}
test "memcpy and memset intrinsics" {
var foo: [20]u8 = undefined;
var bar: [20]u8 = undefined;
@@ -48,10 +40,6 @@ test "constant equal function pointers" {
fn emptyFn() void {}
test "hex escape" {
try expect(mem.eql(u8, "\x68\x65\x6c\x6c\x6f", "hello"));
}
test "string concatenation" {
try expect(mem.eql(u8, "OK" ++ " IT " ++ "WORKED", "OK IT WORKED"));
}
@@ -70,59 +58,7 @@ test "string escapes" {
try expectEqualStrings("\u{1234}\u{069}\u{1}", "\xe1\x88\xb4\x69\x01");
}
test "multiline string" {
const s1 =
\\one
\\two)
\\three
;
const s2 = "one\ntwo)\nthree";
try expect(mem.eql(u8, s1, s2));
}
test "multiline string comments at start" {
const s1 =
//\\one
\\two)
\\three
;
const s2 = "two)\nthree";
try expect(mem.eql(u8, s1, s2));
}
test "multiline string comments at end" {
const s1 =
\\one
\\two)
//\\three
;
const s2 = "one\ntwo)";
try expect(mem.eql(u8, s1, s2));
}
test "multiline string comments in middle" {
const s1 =
\\one
//\\two)
\\three
;
const s2 = "one\nthree";
try expect(mem.eql(u8, s1, s2));
}
test "multiline string comments at multiple places" {
const s1 =
\\one
//\\two
\\three
//\\four
\\five
;
const s2 = "one\nthree\nfive";
try expect(mem.eql(u8, s1, s2));
}
test "multiline C string" {
test "multiline string literal is null terminated" {
const s1 =
\\one
\\two)
@@ -177,20 +113,6 @@ fn outer() i64 {
return inner();
}
test "call result of if else expression" {
try expect(mem.eql(u8, f2(true), "a"));
try expect(mem.eql(u8, f2(false), "b"));
}
fn f2(x: bool) []const u8 {
return (if (x) fA else fB)();
}
fn fA() []const u8 {
return "a";
}
fn fB() []const u8 {
return "b";
}
test "constant enum initialization with differing sizes" {
try test3_1(test3_foo);
try test3_2(test3_bar);

View File

@@ -1572,7 +1572,7 @@ pub fn addCases(ctx: *TestContext) !void {
\\ const x = asm volatile ("syscall"
\\ : [o] "{rax}" (-> number)
\\ : [number] "{rax}" (231),
\\ [arg1] "{rdi}" (code)
\\ [arg1] "{rdi}" (60)
\\ : "rcx", "r11", "memory"
\\ );
\\ _ = x;