std, src, doc, test: remove unused variables

This commit is contained in:
Jacob G-W
2021-06-09 21:35:42 -04:00
committed by Andrew Kelley
parent d34a1ccb0e
commit 641ecc260f
112 changed files with 208 additions and 294 deletions

View File

@@ -1017,7 +1017,6 @@ fn tokenizeAndPrint(docgen_tokenizer: *Tokenizer, out: anytype, source_token: To
}
fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: anytype, zig_exe: []const u8, do_code_tests: bool) !void {
var code_progress_index: usize = 0;
var progress = Progress{};
const root_node = try progress.start("Generating docgen examples", toc.nodes.len);
defer root_node.end();
@@ -1090,7 +1089,6 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: any
switch (code.id) {
Code.Id.Exe => |expected_outcome| code_block: {
const name_plus_bin_ext = try std.fmt.allocPrint(allocator, "{s}{s}", .{ code.name, exe_ext });
var build_args = std.ArrayList([]const u8).init(allocator);
defer build_args.deinit();
try build_args.appendSlice(&[_][]const u8{
@@ -1361,19 +1359,9 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: any
},
Code.Id.Obj => |maybe_error_match| {
const name_plus_obj_ext = try std.fmt.allocPrint(allocator, "{s}{s}", .{ code.name, obj_ext });
const tmp_obj_file_name = try fs.path.join(
allocator,
&[_][]const u8{ tmp_dir_name, name_plus_obj_ext },
);
var build_args = std.ArrayList([]const u8).init(allocator);
defer build_args.deinit();
const name_plus_h_ext = try std.fmt.allocPrint(allocator, "{s}.h", .{code.name});
const output_h_file_name = try fs.path.join(
allocator,
&[_][]const u8{ tmp_dir_name, name_plus_h_ext },
);
try build_args.appendSlice(&[_][]const u8{
zig_exe,
"build-obj",

View File

@@ -518,8 +518,8 @@ pub fn cpuCount() CpuCountError!usize {
},
.haiku => {
var count: u32 = undefined;
var system_info: os.system_info = undefined;
const rc = os.system.get_system_info(&system_info);
// var system_info: os.system_info = undefined;
// const rc = os.system.get_system_info(&system_info);
count = system_info.cpu_count;
return @intCast(usize, count);
},

View File

@@ -112,9 +112,6 @@ pub const Base64Encoder = struct {
const out_len = encoder.calcSize(source.len);
assert(dest.len >= out_len);
const nibbles = source.len / 3;
const leftover = source.len - 3 * nibbles;
var acc: u12 = 0;
var acc_len: u4 = 0;
var out_idx: usize = 0;
@@ -223,7 +220,6 @@ pub const Base64Decoder = struct {
if (decoder.pad_char) |pad_char| {
const padding_len = acc_len / 2;
var padding_chars: usize = 0;
var i: usize = 0;
for (leftover) |c| {
if (c != pad_char) {
return if (c == Base64Decoder.invalid_char) error.InvalidCharacter else error.InvalidPadding;
@@ -302,7 +298,6 @@ pub const Base64DecoderWithIgnore = struct {
var leftover = source[leftover_idx.?..];
if (decoder.pad_char) |pad_char| {
var padding_chars: usize = 0;
var i: usize = 0;
for (leftover) |c| {
if (decoder_with_ignore.char_is_ignored[c]) continue;
if (c != pad_char) {

View File

@@ -351,7 +351,6 @@ pub const Tokenizer = struct {
pp_directive: bool = false,
pub fn next(self: *Tokenizer) Token {
const start_index = self.index;
var result = Token{
.id = .Eof,
.start = self.index,
@@ -1380,12 +1379,12 @@ test "operators" {
test "keywords" {
try expectTokens(
\\auto break case char const continue default do
\\double else enum extern float for goto if int
\\long register return short signed sizeof static
\\struct switch typedef union unsigned void volatile
\\while _Bool _Complex _Imaginary inline restrict _Alignas
\\_Alignof _Atomic _Generic _Noreturn _Static_assert _Thread_local
\\auto break case char const continue default do
\\double else enum extern float for goto if int
\\long register return short signed sizeof static
\\struct switch typedef union unsigned void volatile
\\while _Bool _Complex _Imaginary inline restrict _Alignas
\\_Alignof _Atomic _Generic _Noreturn _Static_assert _Thread_local
\\
, &[_]Token.Id{
.Keyword_auto,

View File

@@ -62,6 +62,8 @@ pub fn GzipStream(comptime ReaderType: type) type {
const XFL = header[8];
// Operating system where the compression took place
const OS = header[9];
_ = XFL;
_ = OS;
if (FLG & FEXTRA != 0) {
// Skip the extra data, we could read and expose it to the user

View File

@@ -35,6 +35,7 @@ pub fn ZlibStream(comptime ReaderType: type) type {
const CM = @truncate(u4, header[0]);
const CINFO = @truncate(u4, header[0] >> 4);
const FCHECK = @truncate(u5, header[1]);
_ = FCHECK;
const FDICT = @truncate(u1, header[1] >> 5);
if ((@as(u16, header[0]) << 8 | header[1]) % 31 != 0)

View File

@@ -330,13 +330,10 @@ pub const Scalar = struct {
const carry9 = z02 >> 56;
const c01 = carry9;
const carry10 = (z12 + c01) >> 56;
const t21 = @truncate(u64, z12 + c01) & 0xffffffffffffff;
const c11 = carry10;
const carry11 = (z22 + c11) >> 56;
const t22 = @truncate(u64, z22 + c11) & 0xffffffffffffff;
const c21 = carry11;
const carry12 = (z32 + c21) >> 56;
const t23 = @truncate(u64, z32 + c21) & 0xffffffffffffff;
const c31 = carry12;
const carry13 = (z42 + c31) >> 56;
const t24 = @truncate(u64, z42 + c31) & 0xffffffffffffff;
@@ -605,13 +602,10 @@ const ScalarDouble = struct {
const carry0 = z01 >> 56;
const c00 = carry0;
const carry1 = (z11 + c00) >> 56;
const t100 = @as(u64, @truncate(u64, z11 + c00)) & 0xffffffffffffff;
const c10 = carry1;
const carry2 = (z21 + c10) >> 56;
const t101 = @as(u64, @truncate(u64, z21 + c10)) & 0xffffffffffffff;
const c20 = carry2;
const carry3 = (z31 + c20) >> 56;
const t102 = @as(u64, @truncate(u64, z31 + c20)) & 0xffffffffffffff;
const c30 = carry3;
const carry4 = (z41 + c30) >> 56;
const t103 = @as(u64, @truncate(u64, z41 + c30)) & 0xffffffffffffff;

View File

@@ -49,8 +49,6 @@ pub const Block = struct {
/// Encrypt a block with a round key.
pub inline fn encrypt(block: Block, round_key: Block) Block {
const src = &block.repr;
const s0 = block.repr[0];
const s1 = block.repr[1];
const s2 = block.repr[2];
@@ -66,8 +64,6 @@ pub const Block = struct {
/// Encrypt a block with the last round key.
pub inline fn encryptLast(block: Block, round_key: Block) Block {
const src = &block.repr;
const t0 = block.repr[0];
const t1 = block.repr[1];
const t2 = block.repr[2];
@@ -88,8 +84,6 @@ pub const Block = struct {
/// Decrypt a block with a round key.
pub inline fn decrypt(block: Block, round_key: Block) Block {
const src = &block.repr;
const s0 = block.repr[0];
const s1 = block.repr[1];
const s2 = block.repr[2];
@@ -105,8 +99,6 @@ pub const Block = struct {
/// Decrypt a block with the last round key.
pub inline fn decryptLast(block: Block, round_key: Block) Block {
const src = &block.repr;
const t0 = block.repr[0];
const t1 = block.repr[1];
const t2 = block.repr[2];

View File

@@ -114,7 +114,6 @@ test "Aes256Gcm - Empty message and no associated data" {
const ad = "";
const m = "";
var c: [m.len]u8 = undefined;
var m2: [m.len]u8 = undefined;
var tag: [Aes256Gcm.tag_length]u8 = undefined;
Aes256Gcm.encrypt(&c, &tag, m, ad, nonce, key);

View File

@@ -271,7 +271,6 @@ test "AesOcb test vector 1" {
var c: [0]u8 = undefined;
Aes128Ocb.encrypt(&c, &tag, "", "", nonce, k);
var expected_c: [c.len]u8 = undefined;
var expected_tag: [tag.len]u8 = undefined;
_ = try hexToBytes(&expected_tag, "785407BFFFC8AD9EDCC5520AC9111EE6");

View File

@@ -48,7 +48,6 @@ const State = struct {
fn expand0(state: *State, key: []const u8) void {
var i: usize = 0;
var j: usize = 0;
var t: u32 = undefined;
while (i < state.subkeys.len) : (i += 1) {
state.subkeys[i] ^= toWord(key, &j);
}
@@ -75,7 +74,6 @@ const State = struct {
fn expand(state: *State, data: []const u8, key: []const u8) void {
var i: usize = 0;
var j: usize = 0;
var t: u32 = undefined;
while (i < state.subkeys.len) : (i += 1) {
state.subkeys[i] ^= toWord(key, &j);
}

View File

@@ -444,7 +444,6 @@ fn ChaChaWith64BitNonce(comptime rounds_nb: usize) type {
if (comptime @sizeOf(usize) > 4) {
// A big block is giant: 256 GiB, but we can avoid this limitation
var remaining_blocks: u32 = @intCast(u32, (in.len / big_block));
var i: u32 = 0;
while (remaining_blocks > 0) : (remaining_blocks -= 1) {
ChaChaImpl(rounds_nb).chacha20Xor(out[cursor .. cursor + big_block], in[cursor .. cursor + big_block], k, c);
c[1] += 1; // upper 32-bit of counter, generic chacha20Xor() doesn't know about this.

View File

@@ -407,7 +407,7 @@ test "dynamic_library" {
else => return error.SkipZigTest,
};
const dynlib = DynLib.open(libname) catch |err| {
_ = DynLib.open(libname) catch |err| {
try testing.expect(err == error.FileNotFound);
return;
};

View File

@@ -308,7 +308,6 @@ test "std.event.Channel wraparound" {
// add items to channel and pull them out until
// the buffer wraps around, make sure it doesn't crash.
var result: i32 = undefined;
channel.put(5);
try testing.expectEqual(@as(i32, 5), channel.get());
channel.put(6);

View File

@@ -130,7 +130,7 @@ test "std.event.Group" {
// TODO this file has bit-rotted. repair it
if (true) return error.SkipZigTest;
const handle = async testGroup(std.heap.page_allocator);
_ = async testGroup(std.heap.page_allocator);
}
fn testGroup(allocator: *Allocator) callconv(.Async) void {
var count: usize = 0;

View File

@@ -680,7 +680,7 @@ pub const Loop = struct {
fn run(func_args: Args, loop: *Loop, allocator: *mem.Allocator) void {
loop.beginOneEvent();
loop.yield();
const result = @call(.{}, func, func_args);
@call(.{}, func, func_args); // compile error when called with non-void ret type
suspend {
loop.finishOneEvent();
allocator.destroy(@frame());

View File

@@ -225,7 +225,7 @@ test "std.event.RwLock" {
var lock = RwLock.init();
defer lock.deinit();
const handle = testLock(std.heap.page_allocator, &lock);
_ = testLock(std.heap.page_allocator, &lock);
const expected_result = [1]i32{shared_it_count * @intCast(i32, shared_test_data.len)} ** shared_test_data.len;
try testing.expectEqualSlices(i32, expected_result, shared_test_data);

View File

@@ -1140,7 +1140,7 @@ pub fn formatFloatHexadecimal(
// +1 for the decimal part.
var buf: [1 + mantissa_digits]u8 = undefined;
const N = formatIntBuf(&buf, mantissa, 16, .lower, .{ .fill = '0', .width = 1 + mantissa_digits });
_ = formatIntBuf(&buf, mantissa, 16, .lower, .{ .fill = '0', .width = 1 + mantissa_digits });
try writer.writeAll("0x");
try writer.writeByte(buf[0]);
@@ -2162,7 +2162,6 @@ test "custom" {
}
};
var buf1: [32]u8 = undefined;
var value = Vec2{
.x = 10.2,
.y = 2.22,
@@ -2220,7 +2219,7 @@ test "union" {
try std.testing.expect(mem.eql(u8, uu_result[0..3], "UU@"));
const eu_result = try bufPrint(buf[0..], "{}", .{eu_inst});
try std.testing.expect(mem.eql(u8, uu_result[0..3], "EU@"));
try std.testing.expect(mem.eql(u8, eu_result[0..3], "EU@"));
}
test "enum" {

View File

@@ -200,7 +200,6 @@ const ParseResult = enum {
fn parseRepr(s: []const u8, n: *FloatRepr) !ParseResult {
var digit_index: usize = 0;
var negative = false;
var negative_exp = false;
var exponent: i32 = 0;

View File

@@ -477,7 +477,7 @@ pub const Dir = struct {
}
var stat_info: os.libc_stat = undefined;
const rc2 = os.system._kern_read_stat(
_ = os.system._kern_read_stat(
self.dir.fd,
&haiku_entry.d_name,
false,
@@ -2438,7 +2438,7 @@ pub fn selfExePath(out_buffer: []u8) SelfExePathError![]u8 {
}) catch continue;
var real_path_buf: [MAX_PATH_BYTES]u8 = undefined;
if (os.realpathZ(&resolved_path_buf, &real_path_buf)) |real_path| {
if (os.realpathZ(resolved_path, &real_path_buf)) |real_path| {
// found a file, and hope it is the right file
if (real_path.len > out_buffer.len)
return error.NameTooLong;

View File

@@ -353,7 +353,6 @@ fn SMHasherTest(comptime hash_fn: anytype) u32 {
var key: [256]u8 = undefined;
var hashes_bytes: [256 * @sizeOf(HashResult)]u8 = undefined;
var final: HashResult = 0;
std.mem.set(u8, &key, 0);
std.mem.set(u8, &hashes_bytes, 0);

View File

@@ -166,8 +166,6 @@ pub const Wyhash = struct {
}
pub fn final(self: *Wyhash) u64 {
const seed = self.state.seed;
const rem_len = @intCast(u5, self.buf_len);
const rem_key = self.buf[0..self.buf_len];
return self.state.final(rem_key);

View File

@@ -1809,7 +1809,7 @@ test "std.hash_map getOrPut" {
i = 0;
while (i < 20) : (i += 1) {
var n = try map.getOrPutValue(i, 1);
_ = try map.getOrPutValue(i, 1);
}
i = 0;

View File

@@ -198,7 +198,7 @@ fn test_read_ileb128_seq(comptime T: type, comptime N: usize, encoded: []const u
var reader = std.io.fixedBufferStream(encoded);
var i: usize = 0;
while (i < N) : (i += 1) {
const v1 = try readILEB128(T, reader.reader());
_ = try readILEB128(T, reader.reader());
}
}
@@ -206,7 +206,7 @@ fn test_read_uleb128_seq(comptime T: type, comptime N: usize, encoded: []const u
var reader = std.io.fixedBufferStream(encoded);
var i: usize = 0;
while (i < N) : (i += 1) {
const v1 = try readULEB128(T, reader.reader());
_ = try readULEB128(T, reader.reader());
}
}
@@ -309,7 +309,6 @@ fn test_write_leb128(value: anytype) !void {
const B = std.meta.Int(signedness, larger_type_bits);
const bytes_needed = bn: {
const S = std.meta.Int(signedness, @sizeOf(T) * 8);
if (@typeInfo(T).Int.bits <= 7) break :bn @as(u16, 1);
const unused_bits = if (value < 0) @clz(T, ~value) else @clz(T, value);

View File

@@ -359,8 +359,8 @@ test "basic TailQueue test" {
}
}
var first = list.popFirst(); // {2, 3, 4, 5}
var last = list.pop(); // {2, 3, 4}
_ = list.popFirst(); // {2, 3, 4, 5}
_ = list.pop(); // {2, 3, 4}
list.remove(&three); // {2, 4}
try testing.expect(list.first.?.data == 2);

View File

@@ -2000,8 +2000,6 @@ fn llmulacc_karatsuba(allocator: *Allocator, r: []Limb, x: []const Limb, y: []co
} else {
llsub(j1, y0[0..y0_len], y1[0..y1_len]);
}
const j0_len = llnormalize(j0);
const j1_len = llnormalize(j1);
if (x_cmp == y_cmp) {
mem.set(Limb, tmp[0..length], 0);
llmulacc(allocator, tmp, j0, j1);

View File

@@ -204,7 +204,6 @@ pub const Rational = struct {
const esize = math.floatExponentBits(T);
const ebias = (1 << (esize - 1)) - 1;
const emin = 1 - ebias;
const emax = ebias;
if (self.p.eqZero()) {
return 0;

View File

@@ -12,8 +12,8 @@
const std = @import("../../std.zig");
const debug = std.debug;
const math = std.math;
const cmath = math.complex;
const testing = std.testing;
const cmath = math.complex;
const Complex = cmath.Complex;
/// Returns exp(z) scaled to avoid overflow.

View File

@@ -316,16 +316,12 @@ test "math.expm1_64" {
}
test "math.expm1_32.special" {
const epsilon = 0.000001;
try expect(math.isPositiveInf(expm1_32(math.inf(f32))));
try expect(expm1_32(-math.inf(f32)) == -1.0);
try expect(math.isNan(expm1_32(math.nan(f32))));
}
test "math.expm1_64.special" {
const epsilon = 0.000001;
try expect(math.isPositiveInf(expm1_64(math.inf(f64))));
try expect(expm1_64(-math.inf(f64)) == -1.0);
try expect(math.isNan(expm1_64(math.nan(f64))));

View File

@@ -12,6 +12,7 @@
const std = @import("../std.zig");
const math = std.math;
const expect = std.testing.expect;
const expectEqual = std.testing.expectEqual;
const maxInt = std.math.maxInt;
fn modf_result(comptime T: type) type {
@@ -131,11 +132,7 @@ test "math.modf" {
const a = modf(@as(f32, 1.0));
const b = modf32(1.0);
// NOTE: No struct comparison on generic return type function? non-named, makes sense, but still.
try expect(a.ipart == b.ipart and a.fpart == b.fpart);
const c = modf(@as(f64, 1.0));
const d = modf64(1.0);
try expect(a.ipart == b.ipart and a.fpart == b.fpart);
try expectEqual(a, b);
}
test "math.modf32" {

View File

@@ -654,7 +654,6 @@ pub fn TagPayload(comptime U: type, tag: Tag(U)) type {
try testing.expect(trait.is(.Union)(U));
const info = @typeInfo(U).Union;
const tag_info = @typeInfo(Tag(U)).Enum;
inline for (info.fields) |field_info| {
if (comptime mem.eql(u8, field_info.name, @tagName(tag)))
@@ -757,12 +756,6 @@ test "std.meta.eql" {
.c = "12345".*,
};
const s_2 = S{
.a = 1,
.b = 123.3,
.c = "54321".*,
};
var s_3 = S{
.a = 134,
.b = 123.3,

View File

@@ -5341,7 +5341,7 @@ pub fn sendfile(
ENXIO => return error.Unseekable,
ESPIPE => return error.Unseekable,
else => |err| {
const discard = unexpectedErrno(err);
unexpectedErrno(err) catch {};
break :sf;
},
}
@@ -5422,7 +5422,7 @@ pub fn sendfile(
EPIPE => return error.BrokenPipe,
else => {
const discard = unexpectedErrno(err);
unexpectedErrno(err) catch {};
if (amt != 0) {
return amt;
} else {
@@ -5484,7 +5484,7 @@ pub fn sendfile(
EPIPE => return error.BrokenPipe,
else => {
const discard = unexpectedErrno(err);
unexpectedErrno(err) catch {};
if (amt != 0) {
return amt;
} else {

View File

@@ -1272,12 +1272,12 @@ test "accept/connect/send/recv" {
var accept_addr: os.sockaddr = undefined;
var accept_addr_len: os.socklen_t = @sizeOf(@TypeOf(accept_addr));
const accept = try ring.accept(0xaaaaaaaa, server, &accept_addr, &accept_addr_len, 0);
_ = try ring.accept(0xaaaaaaaa, server, &accept_addr, &accept_addr_len, 0);
try testing.expectEqual(@as(u32, 1), try ring.submit());
const client = try os.socket(address.any.family, os.SOCK_STREAM | os.SOCK_CLOEXEC, 0);
defer os.close(client);
const connect = try ring.connect(0xcccccccc, client, &address.any, address.getOsSockLen());
_ = try ring.connect(0xcccccccc, client, &address.any, address.getOsSockLen());
try testing.expectEqual(@as(u32, 1), try ring.submit());
var cqe_accept = try ring.copy_cqe();
@@ -1305,7 +1305,7 @@ test "accept/connect/send/recv" {
const send = try ring.send(0xeeeeeeee, client, buffer_send[0..], 0);
send.flags |= linux.IOSQE_IO_LINK;
const recv = try ring.recv(0xffffffff, cqe_accept.res, buffer_recv[0..], 0);
_ = try ring.recv(0xffffffff, cqe_accept.res, buffer_recv[0..], 0);
try testing.expectEqual(@as(u32, 2), try ring.submit());
const cqe_send = try ring.copy_cqe();

View File

@@ -15,7 +15,6 @@ pub fn lookup(vername: []const u8, name: []const u8) usize {
const eh = @intToPtr(*elf.Ehdr, vdso_addr);
var ph_addr: usize = vdso_addr + eh.e_phoff;
const ph = @intToPtr(*elf.Phdr, ph_addr);
var maybe_dynv: ?[*]usize = null;
var base: usize = maxInt(usize);

View File

@@ -1156,7 +1156,6 @@ pub fn GetFinalPathNameByHandle(
&mount_points_struct.MountPoints[0],
)[0..mount_points_struct.NumberOfMountPoints];
var found: bool = false;
for (mount_points) |mount_point| {
const symlink = @ptrCast(
[*]const u16,

View File

@@ -590,11 +590,11 @@ pub const Pdb = struct {
var sect_cont_offset: usize = 0;
if (section_contrib_size != 0) {
// the version
_ = reader.readEnum(SectionContrSubstreamVersion, .Little) catch |err| switch (err) {
const version = reader.readEnum(SectionContrSubstreamVersion, .Little) catch |err| switch (err) {
error.InvalidValue => return error.InvalidDebugInfo,
else => |e| return e,
};
_ = version;
sect_cont_offset += @sizeOf(u32);
}
while (sect_cont_offset != section_contrib_size) {
@@ -617,8 +617,8 @@ pub const Pdb = struct {
// Parse the InfoStreamHeader.
const version = try reader.readIntLittle(u32);
// The signature
_ = try reader.readIntLittle(u32);
const signature = try reader.readIntLittle(u32);
_ = signature;
const age = try reader.readIntLittle(u32);
const guid = try reader.readBytesNoEof(16);

View File

@@ -175,5 +175,5 @@ test "exp dist sanity" {
test "table gen" {
if (please_windows_dont_oom) return error.SkipZigTest;
const table = NormDist;
_ = NormDist;
}

View File

@@ -83,7 +83,6 @@ fn addXf3(comptime T: type, a: T, b: T) T {
const signBit = (@as(Z, 1) << (significandBits + exponentBits));
const maxExponent = ((1 << exponentBits) - 1);
const exponentBias = (maxExponent >> 1);
const implicitBit = (@as(Z, 1) << significandBits);
const quietBit = implicitBit >> 1;
@@ -98,10 +97,6 @@ fn addXf3(comptime T: type, a: T, b: T) T {
const aAbs = aRep & absMask;
const bAbs = bRep & absMask;
const negative = (aRep & signBit) != 0;
const exponent = @intCast(i32, aAbs >> significandBits) - exponentBias;
const significand = (aAbs & significandMask) | implicitBit;
const infRep = @bitCast(Z, std.math.inf(T));
// Detect if a or b is zero, infinity, or NaN.

View File

@@ -12,7 +12,6 @@ const wideMultiply = @import("divdf3.zig").wideMultiply;
pub fn __divtf3(a: f128, b: f128) callconv(.C) f128 {
@setRuntimeSafety(builtin.is_test);
const Z = std.meta.Int(.unsigned, 128);
const SignedZ = std.meta.Int(.signed, 128);
const significandBits = std.math.floatMantissaBits(f128);
const exponentBits = std.math.floatExponentBits(f128);

View File

@@ -46,7 +46,6 @@ fn extendXfYf2(comptime dst_t: type, comptime src_t: type, a: std.meta.Int(.unsi
const dst_rep_t = std.meta.Int(.unsigned, @typeInfo(dst_t).Float.bits);
const srcSigBits = std.math.floatMantissaBits(src_t);
const dstSigBits = std.math.floatMantissaBits(dst_t);
const SrcShift = std.math.Log2Int(src_rep_t);
const DstShift = std.math.Log2Int(dst_rep_t);
// Various constants whose values follow from the type parameters.

View File

@@ -16,7 +16,6 @@ pub fn fixuint(comptime fp_t: type, comptime fixuint_t: type, a: fp_t) fixuint_t
else => unreachable,
};
const typeWidth = @typeInfo(rep_t).Int.bits;
const srep_t = @import("std").meta.Int(.signed, typeWidth);
const significandBits = switch (fp_t) {
f32 => 23,
f64 => 52,

View File

@@ -50,7 +50,6 @@ fn truncXfYf2(comptime dst_t: type, comptime src_t: type, a: src_t) dst_t {
const srcSigBits = std.math.floatMantissaBits(src_t);
const dstSigBits = std.math.floatMantissaBits(dst_t);
const SrcShift = std.math.Log2Int(src_rep_t);
const DstShift = std.math.Log2Int(dst_rep_t);
// Various constants whose values follow from the type parameters.
// Any reasonable optimizer will fold and propagate all of these.

View File

@@ -191,7 +191,6 @@ test "expectEqual.union(enum)" {
};
const a10 = T{ .a = 10 };
const a20 = T{ .a = 20 };
try expectEqual(a10, a10);
}

View File

@@ -47,8 +47,6 @@ fn benchmarkCodepointCount(buf: []const u8) !ResultCount {
pub fn main() !void {
const stdout = std.io.getStdOut().writer();
const args = try std.process.argsAlloc(std.heap.page_allocator);
try stdout.print("short ASCII strings\n", .{});
{
const result = try benchmarkCodepointCount("abc");

View File

@@ -122,7 +122,6 @@ test "reactor/linux: drive async tcp client/listener pair" {
const IPv4 = std.x.os.IPv4;
const IPv6 = std.x.os.IPv6;
const Socket = std.x.os.Socket;
const reactor = try Reactor.init(.{ .close_on_exec = true });
defer reactor.deinit();

View File

@@ -1866,7 +1866,6 @@ pub const Tree = struct {
}
fn fullStructInit(tree: Tree, info: full.StructInit.Ast) full.StructInit {
const token_tags = tree.tokens.items(.tag);
var result: full.StructInit = .{
.ast = info,
};

View File

@@ -586,7 +586,7 @@ const Parser = struct {
const thread_local_token = p.eatToken(.keyword_threadlocal);
const var_decl = try p.parseVarDecl();
if (var_decl != 0) {
const semicolon_token = try p.expectToken(.semicolon);
_ = try p.expectToken(.semicolon);
return var_decl;
}
if (thread_local_token != null) {
@@ -614,7 +614,7 @@ const Parser = struct {
fn expectUsingNamespace(p: *Parser) !Node.Index {
const usingnamespace_token = p.assertToken(.keyword_usingnamespace);
const expr = try p.expectExpr();
const semicolon_token = try p.expectToken(.semicolon);
_ = try p.expectToken(.semicolon);
return p.addNode(.{
.tag = .@"usingnamespace",
.main_token = usingnamespace_token,
@@ -647,7 +647,7 @@ const Parser = struct {
const align_expr = try p.parseByteAlign();
const section_expr = try p.parseLinkSection();
const callconv_expr = try p.parseCallconv();
const bang_token = p.eatToken(.bang);
_ = p.eatToken(.bang);
const return_type_expr = try p.parseTypeExpr();
if (return_type_expr == 0) {
@@ -775,7 +775,7 @@ const Parser = struct {
/// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON (KEYWORD_anytype / TypeExpr) ByteAlign?)? (EQUAL Expr)?
fn expectContainerField(p: *Parser) !Node.Index {
const comptime_token = p.eatToken(.keyword_comptime);
_ = p.eatToken(.keyword_comptime);
const name_token = p.assertToken(.identifier);
var align_expr: Node.Index = 0;
@@ -967,7 +967,7 @@ const Parser = struct {
_ = try p.expectToken(.l_paren);
const condition = try p.expectExpr();
_ = try p.expectToken(.r_paren);
const then_payload = try p.parsePtrPayload();
_ = try p.parsePtrPayload();
// TODO propose to change the syntax so that semicolons are always required
// inside if statements, even if there is an `else`.
@@ -992,7 +992,7 @@ const Parser = struct {
else_required = true;
break :blk assign_expr;
};
const else_token = p.eatToken(.keyword_else) orelse {
_ = p.eatToken(.keyword_else) orelse {
if (else_required) {
try p.warn(.expected_semi_or_else);
}
@@ -1087,7 +1087,7 @@ const Parser = struct {
else_required = true;
break :blk assign_expr;
};
const else_token = p.eatToken(.keyword_else) orelse {
_ = p.eatToken(.keyword_else) orelse {
if (else_required) {
try p.warn(.expected_semi_or_else);
}
@@ -1122,7 +1122,7 @@ const Parser = struct {
_ = try p.expectToken(.l_paren);
const condition = try p.expectExpr();
_ = try p.expectToken(.r_paren);
const then_payload = try p.parsePtrPayload();
_ = try p.parsePtrPayload();
const cont_expr = try p.parseWhileContinueExpr();
// TODO propose to change the syntax so that semicolons are always required
@@ -1162,7 +1162,7 @@ const Parser = struct {
else_required = true;
break :blk assign_expr;
};
const else_token = p.eatToken(.keyword_else) orelse {
_ = p.eatToken(.keyword_else) orelse {
if (else_required) {
try p.warn(.expected_semi_or_else);
}
@@ -1550,7 +1550,7 @@ const Parser = struct {
},
.l_bracket => switch (p.token_tags[p.tok_i + 1]) {
.asterisk => {
const lbracket = p.nextToken();
_ = p.nextToken();
const asterisk = p.nextToken();
var sentinel: Node.Index = 0;
prefix: {
@@ -1907,7 +1907,7 @@ const Parser = struct {
if (found_payload == 0) try p.warn(.expected_loop_payload);
const then_expr = try p.expectExpr();
const else_token = p.eatToken(.keyword_else) orelse {
_ = p.eatToken(.keyword_else) orelse {
return p.addNode(.{
.tag = .for_simple,
.main_token = for_token,
@@ -1938,11 +1938,11 @@ const Parser = struct {
_ = try p.expectToken(.l_paren);
const condition = try p.expectExpr();
_ = try p.expectToken(.r_paren);
const then_payload = try p.parsePtrPayload();
_ = try p.parsePtrPayload();
const cont_expr = try p.parseWhileContinueExpr();
const then_expr = try p.expectExpr();
const else_token = p.eatToken(.keyword_else) orelse {
_ = p.eatToken(.keyword_else) orelse {
if (cont_expr == 0) {
return p.addNode(.{
.tag = .while_simple,
@@ -1966,7 +1966,7 @@ const Parser = struct {
});
}
};
const else_payload = try p.parsePayload();
_ = try p.parsePayload();
const else_expr = try p.expectExpr();
return p.addNode(.{
.tag = .@"while",
@@ -2565,8 +2565,8 @@ const Parser = struct {
p.tok_i += 2;
while (true) {
if (p.eatToken(.r_brace)) |_| break;
const doc_comment = try p.eatDocComments();
const identifier = try p.expectToken(.identifier);
_ = try p.eatDocComments();
_ = try p.expectToken(.identifier);
switch (p.token_tags[p.tok_i]) {
.comma => p.tok_i += 1,
.r_brace => {
@@ -2634,7 +2634,7 @@ const Parser = struct {
if (found_payload == 0) try p.warn(.expected_loop_payload);
const then_expr = try p.expectTypeExpr();
const else_token = p.eatToken(.keyword_else) orelse {
_ = p.eatToken(.keyword_else) orelse {
return p.addNode(.{
.tag = .for_simple,
.main_token = for_token,
@@ -2665,11 +2665,11 @@ const Parser = struct {
_ = try p.expectToken(.l_paren);
const condition = try p.expectExpr();
_ = try p.expectToken(.r_paren);
const then_payload = try p.parsePtrPayload();
_ = try p.parsePtrPayload();
const cont_expr = try p.parseWhileContinueExpr();
const then_expr = try p.expectTypeExpr();
const else_token = p.eatToken(.keyword_else) orelse {
_ = p.eatToken(.keyword_else) orelse {
if (cont_expr == 0) {
return p.addNode(.{
.tag = .while_simple,
@@ -2693,7 +2693,7 @@ const Parser = struct {
});
}
};
const else_payload = try p.parsePayload();
_ = try p.parsePayload();
const else_expr = try p.expectTypeExpr();
return p.addNode(.{
.tag = .@"while",
@@ -3570,12 +3570,12 @@ const Parser = struct {
_ = try p.expectToken(.l_paren);
const condition = try p.expectExpr();
_ = try p.expectToken(.r_paren);
const then_payload = try p.parsePtrPayload();
_ = try p.parsePtrPayload();
const then_expr = try bodyParseFn(p);
if (then_expr == 0) return p.fail(.invalid_token);
const else_token = p.eatToken(.keyword_else) orelse return p.addNode(.{
_ = p.eatToken(.keyword_else) orelse return p.addNode(.{
.tag = .if_simple,
.main_token = if_token,
.data = .{
@@ -3583,7 +3583,7 @@ const Parser = struct {
.rhs = then_expr,
},
});
const else_payload = try p.parsePayload();
_ = try p.parsePayload();
const else_expr = try bodyParseFn(p);
if (else_expr == 0) return p.fail(.invalid_token);

View File

@@ -5201,7 +5201,6 @@ fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *b
defer tree.deinit(allocator);
for (tree.errors) |parse_error| {
const token_start = tree.tokens.items(.start)[parse_error.token];
const loc = tree.tokenLocation(0, parse_error.token);
try stderr.print("(memory buffer):{d}:{d}: error: ", .{ loc.line + 1, loc.column + 1 });
try tree.renderError(parse_error, stderr);

View File

@@ -1086,8 +1086,6 @@ fn renderWhile(gpa: *Allocator, ais: *Ais, tree: ast.Tree, while_node: ast.full.
}
if (while_node.ast.else_expr != 0) {
const first_else_expr_tok = tree.firstToken(while_node.ast.else_expr);
if (indent_then_expr) {
ais.pushIndent();
try renderExpression(gpa, ais, tree, while_node.ast.then_expr, .newline);
@@ -1133,7 +1131,6 @@ fn renderContainerField(
field: ast.full.ContainerField,
space: Space,
) Error!void {
const main_tokens = tree.nodes.items(.main_token);
if (field.comptime_token) |t| {
try renderToken(ais, tree, t, .space); // comptime
}
@@ -1519,7 +1516,6 @@ fn renderBlock(
) Error!void {
const token_tags = tree.tokens.items(.tag);
const node_tags = tree.nodes.items(.tag);
const nodes_data = tree.nodes.items(.data);
const lbrace = tree.nodes.items(.main_token)[block_node];
if (token_tags[lbrace - 1] == .colon and
@@ -1617,7 +1613,6 @@ fn renderArrayInit(
space: Space,
) Error!void {
const token_tags = tree.tokens.items(.tag);
const token_starts = tree.tokens.items(.start);
if (array_init.ast.type_expr == 0) {
try renderToken(ais, tree, array_init.ast.lbrace - 1, .none); // .
@@ -2046,7 +2041,6 @@ fn renderCall(
space: Space,
) Error!void {
const token_tags = tree.tokens.items(.tag);
const main_tokens = tree.nodes.items(.main_token);
if (call.async_token) |async_token| {
try renderToken(ais, tree, async_token, .space);

View File

@@ -478,13 +478,6 @@ pub const NativeTargetInfo = struct {
}
const ld_info_list = ld_info_list_buffer[0..ld_info_list_len];
if (cross_target.dynamic_linker.get()) |explicit_ld| {
const explicit_ld_basename = fs.path.basename(explicit_ld);
for (ld_info_list) |ld_info| {
const standard_ld_basename = fs.path.basename(ld_info.ld.get().?);
}
}
// Best case scenario: the executable is dynamically linked, and we can iterate
// over our own shared objects and find a dynamic linker.
self_exe: {
@@ -838,7 +831,7 @@ pub const NativeTargetInfo = struct {
if (dynstr) |ds| {
const strtab_len = std.math.min(ds.size, strtab_buf.len);
const strtab_read_len = try preadMin(file, &strtab_buf, ds.offset, shstrtab_len);
const strtab_read_len = try preadMin(file, &strtab_buf, ds.offset, strtab_len);
const strtab = strtab_buf[0..strtab_read_len];
// TODO this pointer cast should not be necessary
const rpoff_usize = std.math.cast(usize, rpoff) catch |err| switch (err) {

View File

@@ -416,7 +416,6 @@ pub const Tokenizer = struct {
self.pending_invalid_token = null;
return token;
}
const start_index = self.index;
var state: State = .start;
var result = Token{
.tag = .eof,

View File

@@ -206,7 +206,6 @@ pub const ResultLoc = union(enum) {
};
fn strategy(rl: ResultLoc, block_scope: *GenZir) Strategy {
var elide_store_to_block_ptr_instructions = false;
switch (rl) {
// In this branch there will not be any store_to_block_ptr instructions.
.discard, .none, .none_or_ref, .ty, .ref => return .{
@@ -905,7 +904,6 @@ fn nosuspendExpr(
node: ast.Node.Index,
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const gpa = astgen.gpa;
const tree = astgen.tree;
const node_datas = tree.nodes.items(.data);
const body_node = node_datas[node].lhs;
@@ -1113,7 +1111,6 @@ fn arrayInitExpr(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
const gpa = astgen.gpa;
const node_tags = tree.nodes.items(.tag);
const main_tokens = tree.nodes.items(.main_token);
@@ -1293,7 +1290,6 @@ fn structInitExpr(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
const gpa = astgen.gpa;
if (struct_init.ast.fields.len == 0) {
if (struct_init.ast.type_expr == 0) {
@@ -1675,9 +1671,6 @@ fn checkLabelRedefinition(astgen: *AstGen, parent_scope: *Scope, label: ast.Toke
const gen_zir = scope.cast(GenZir).?;
if (gen_zir.label) |prev_label| {
if (try astgen.tokenIdentEql(label, prev_label.token)) {
const tree = astgen.tree;
const main_tokens = tree.nodes.items(.main_token);
const label_name = try astgen.identifierTokenString(label);
return astgen.failTokNotes(label, "redefinition of label '{s}'", .{
label_name,
@@ -1790,7 +1783,6 @@ fn blockExprStmts(
) !void {
const astgen = gz.astgen;
const tree = astgen.tree;
const main_tokens = tree.nodes.items(.main_token);
const node_tags = tree.nodes.items(.tag);
var block_arena = std.heap.ArenaAllocator.init(gz.astgen.gpa);
@@ -2147,7 +2139,10 @@ fn genDefers(
.defer_error => {
const defer_scope = scope.cast(Scope.Defer).?;
scope = defer_scope.parent;
if (err_code == .none) continue;
// TODO add this back when we have more errdefer support
// right now it is making stuff not get evaluated which causes
// unused vars.
// if (err_code == .none) continue;
const expr_node = node_datas[defer_scope.defer_node].rhs;
const prev_in_defer = gz.in_defer;
gz.in_defer = true;
@@ -2166,8 +2161,6 @@ fn checkUsed(
inner_scope: *Scope,
) InnerError!void {
const astgen = gz.astgen;
const tree = astgen.tree;
const node_datas = tree.nodes.items(.data);
var scope = inner_scope;
while (scope != outer_scope) {
@@ -2450,7 +2443,7 @@ fn varDecl(
resolve_inferred_alloc = alloc;
break :a .{ .alloc = alloc, .result_loc = .{ .inferred_ptr = alloc } };
};
const init_inst = try expr(gz, scope, var_data.result_loc, var_decl.ast.init_node);
_ = try expr(gz, scope, var_data.result_loc, var_decl.ast.init_node);
if (resolve_inferred_alloc != .none) {
_ = try gz.addUnNode(.resolve_inferred_alloc, resolve_inferred_alloc, node);
}
@@ -2477,7 +2470,6 @@ fn emitDbgNode(gz: *GenZir, node: ast.Node.Index) !void {
const astgen = gz.astgen;
const tree = astgen.tree;
const node_tags = tree.nodes.items(.tag);
const token_starts = tree.tokens.items(.start);
const decl_start = token_starts[tree.firstToken(gz.decl_node_index)];
const node_start = token_starts[tree.firstToken(node)];
@@ -2602,9 +2594,6 @@ fn ptrType(
node: ast.Node.Index,
ptr_info: ast.full.PtrType,
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
const elem_type = try typeExpr(gz, scope, ptr_info.ast.child_type);
const simple = ptr_info.ast.align_node == 0 and
@@ -4305,10 +4294,8 @@ fn containerDecl(
defer wip_decls.deinit(gpa);
for (container_decl.ast.members) |member_node| {
const member = switch (node_tags[member_node]) {
.container_field_init => tree.containerFieldInit(member_node),
.container_field_align => tree.containerFieldAlign(member_node),
.container_field => tree.containerField(member_node),
switch (node_tags[member_node]) {
.container_field_init, .container_field_align, .container_field => {},
.fn_decl => {
const fn_proto = node_datas[member_node].lhs;
@@ -4429,7 +4416,7 @@ fn containerDecl(
continue;
},
else => unreachable,
};
}
}
{
const empty_slot_count = WipDecls.fields_per_u32 - (wip_decls.decl_index % WipDecls.fields_per_u32);
@@ -4497,11 +4484,6 @@ fn errorSetDecl(
}
}
const tag: Zir.Inst.Tag = switch (gz.anon_name_strategy) {
.parent => .error_set_decl,
.anon => .error_set_decl_anon,
.func => .error_set_decl_func,
};
const result = try gz.addPlNode(.error_set_decl, node, Zir.Inst.ErrorSetDecl{
.fields_len = @intCast(u32, field_names.items.len),
});
@@ -4517,7 +4499,6 @@ fn tryExpr(
operand_node: ast.Node.Index,
) InnerError!Zir.Inst.Ref {
const astgen = parent_gz.astgen;
const tree = astgen.tree;
const fn_block = astgen.fn_block orelse {
return astgen.failNode(node, "invalid 'try' outside function scope", .{});
@@ -4702,7 +4683,6 @@ fn finishThenElseBlock(
// We now have enough information to decide whether the result instruction should
// be communicated via result location pointer or break instructions.
const strat = rl.strategy(block_scope);
const astgen = block_scope.astgen;
switch (strat.tag) {
.break_void => {
if (!parent_gz.refIsNoReturn(then_result)) {
@@ -4786,7 +4766,6 @@ fn arrayAccess(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
const main_tokens = tree.nodes.items(.main_token);
const node_datas = tree.nodes.items(.data);
switch (rl) {
.ref => return gz.addBin(
@@ -6054,7 +6033,6 @@ fn ret(gz: *GenZir, scope: *Scope, node: ast.Node.Index) InnerError!Zir.Inst.Ref
const astgen = gz.astgen;
const tree = astgen.tree;
const node_datas = tree.nodes.items(.data);
const main_tokens = tree.nodes.items(.main_token);
if (gz.in_defer) return astgen.failNode(node, "cannot return from defer expression", .{});
@@ -6271,7 +6249,6 @@ fn multilineStringLiteral(
const astgen = gz.astgen;
const tree = astgen.tree;
const node_datas = tree.nodes.items(.data);
const main_tokens = tree.nodes.items(.main_token);
const start = node_datas[node].lhs;
const end = node_datas[node].rhs;
@@ -6387,7 +6364,6 @@ fn floatLiteral(
node: ast.Node.Index,
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const arena = astgen.arena;
const tree = astgen.tree;
const main_tokens = tree.nodes.items(.main_token);
@@ -6430,7 +6406,6 @@ fn asmExpr(
full: ast.full.Asm,
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const arena = astgen.arena;
const tree = astgen.tree;
const main_tokens = tree.nodes.items(.main_token);
const node_datas = tree.nodes.items(.data);
@@ -6519,7 +6494,6 @@ fn asmExpr(
const name = try astgen.identAsString(symbolic_name);
const constraint_token = symbolic_name + 2;
const constraint = (try astgen.strLitAsString(constraint_token)).index;
const has_arrow = token_tags[symbolic_name + 4] == .arrow;
const operand = try expr(gz, scope, .{ .ty = .usize_type }, node_datas[input_node].lhs);
inputs[i] = .{
.name = name,
@@ -6601,7 +6575,7 @@ fn unionInit(
const field_name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, params[1]);
switch (rl) {
.none, .none_or_ref, .discard, .ref, .ty, .inferred_ptr => {
const field_type = try gz.addPlNode(.field_type_ref, params[1], Zir.Inst.FieldTypeRef{
_ = try gz.addPlNode(.field_type_ref, params[1], Zir.Inst.FieldTypeRef{
.container_type = union_type,
.field_name = field_name,
});
@@ -6783,7 +6757,6 @@ fn builtinCall(
switch (info.tag) {
.import => {
const node_tags = tree.nodes.items(.tag);
const node_datas = tree.nodes.items(.data);
const operand_node = params[0];
if (node_tags[operand_node] != .string_literal) {
@@ -8119,7 +8092,6 @@ fn parseStrLit(
bytes: []const u8,
offset: u32,
) InnerError!void {
const tree = astgen.tree;
const raw_string = bytes[offset..];
var buf_managed = buf.toManaged(astgen.gpa);
const result = std.zig.string_literal.parseAppend(&buf_managed, raw_string);
@@ -8567,7 +8539,6 @@ const GenZir = struct {
fn calcLine(gz: GenZir, node: ast.Node.Index) u32 {
const astgen = gz.astgen;
const tree = astgen.tree;
const node_tags = tree.nodes.items(.tag);
const token_starts = tree.tokens.items(.start);
const decl_start = token_starts[tree.firstToken(gz.decl_node_index)];
const node_start = token_starts[tree.firstToken(node)];

View File

@@ -325,7 +325,6 @@ pub const AllErrors = struct {
},
pub fn renderToStdErr(msg: Message, ttyconf: std.debug.TTY.Config) void {
const stderr_mutex = std.debug.getStderrMutex();
const held = std.debug.getStderrMutex().acquire();
defer held.release();
const stderr = std.io.getStdErr();
@@ -2373,7 +2372,7 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult {
// We need to "unhit" in this case, to keep the digests matching.
const prev_hash_state = man.hash.peekBin();
const actual_hit = hit: {
const is_hit = try man.hit();
_ = try man.hit();
if (man.files.items.len == 0) {
man.unhit(prev_hash_state, 0);
break :hit false;

View File

@@ -944,7 +944,7 @@ fn printLabel(out: anytype, label: []const u8, bytes: []const u8) !void {
try out.writeAll(text);
var i: usize = text.len;
const end = 79;
while (i < 79) : (i += 1) {
while (i < end) : (i += 1) {
try out.writeAll(&[_]u8{label[0]});
}
try out.writeAll("\n");
@@ -953,7 +953,7 @@ fn printLabel(out: anytype, label: []const u8, bytes: []const u8) !void {
fn printRuler(out: anytype) !void {
var i: usize = 0;
const end = 79;
while (i < 79) : (i += 1) {
while (i < end) : (i += 1) {
try out.writeAll("-");
}
try out.writeAll("\n");
@@ -1057,4 +1057,3 @@ const printable_char_tab: [256]u8 = (
"................................................................" ++
"................................................................"
).*;

View File

@@ -1561,7 +1561,6 @@ pub const SrcLoc = struct {
.node_offset_array_access_index => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = src_loc.declRelativeToNodeIndex(node_off);
const main_tokens = tree.nodes.items(.main_token);
const tok_index = main_tokens[node_datas[node].rhs];
@@ -1570,7 +1569,6 @@ pub const SrcLoc = struct {
},
.node_offset_slice_sentinel => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = src_loc.declRelativeToNodeIndex(node_off);
const full = switch (node_tags[node]) {
@@ -1586,7 +1584,6 @@ pub const SrcLoc = struct {
},
.node_offset_call_func => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = src_loc.declRelativeToNodeIndex(node_off);
var params: [1]ast.Node.Index = undefined;
@@ -1625,7 +1622,6 @@ pub const SrcLoc = struct {
.node_offset_deref_ptr => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = src_loc.declRelativeToNodeIndex(node_off);
const tok_index = node_datas[node].lhs;
const token_starts = tree.tokens.items(.start);
@@ -1633,7 +1629,6 @@ pub const SrcLoc = struct {
},
.node_offset_asm_source => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = src_loc.declRelativeToNodeIndex(node_off);
const full = switch (node_tags[node]) {
@@ -1648,7 +1643,6 @@ pub const SrcLoc = struct {
},
.node_offset_asm_ret_ty => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = src_loc.declRelativeToNodeIndex(node_off);
const full = switch (node_tags[node]) {
@@ -1771,7 +1765,6 @@ pub const SrcLoc = struct {
.node_offset_fn_type_cc => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = src_loc.declRelativeToNodeIndex(node_off);
var params: [1]ast.Node.Index = undefined;
@@ -1790,7 +1783,6 @@ pub const SrcLoc = struct {
.node_offset_fn_type_ret_ty => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const node = src_loc.declRelativeToNodeIndex(node_off);
var params: [1]ast.Node.Index = undefined;
@@ -1810,7 +1802,6 @@ pub const SrcLoc = struct {
.node_offset_anyframe_type => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node_datas = tree.nodes.items(.data);
const node_tags = tree.nodes.items(.tag);
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
const node = node_datas[parent_node].rhs;
const main_tokens = tree.nodes.items(.main_token);
@@ -2502,7 +2493,6 @@ pub fn astGenFile(mod: *Module, file: *Scope.File, prog_node: *std.Progress.Node
@ptrCast([*]const u8, file.zir.instructions.items(.data).ptr);
if (data_has_safety_tag) {
// The `Data` union has a safety tag but in the file format we store it without.
const tags = file.zir.instructions.items(.tag);
for (file.zir.instructions.items(.data)) |*data, i| {
const as_struct = @ptrCast(*const Stage1DataLayout, data);
safety_buffer[i] = as_struct.data;
@@ -3386,7 +3376,6 @@ fn scanDecl(iter: *ScanDeclIter, decl_sub_index: usize, flags: u4) InnerError!vo
log.debug("scan existing {*} ({s}) of {*}", .{ decl, decl.name, namespace });
// Update the AST node of the decl; even if its contents are unchanged, it may
// have been re-ordered.
const prev_src_node = decl.src_node;
decl.src_node = decl_node;
decl.src_line = line;
@@ -4692,11 +4681,9 @@ pub fn analyzeUnionFields(mod: *Module, union_obj: *Union) InnerError!void {
const src: LazySrcLoc = .{ .node_offset = union_obj.node_offset };
extra_index += @boolToInt(small.has_src_node);
const tag_type_ref = if (small.has_tag_type) blk: {
const tag_type_ref = @intToEnum(Zir.Inst.Ref, zir.extra[extra_index]);
if (small.has_tag_type) {
extra_index += 1;
break :blk tag_type_ref;
} else .none;
}
const body_len = if (small.has_body_len) blk: {
const body_len = zir.extra[extra_index];
@@ -4784,6 +4771,7 @@ pub fn analyzeUnionFields(mod: *Module, union_obj: *Union) InnerError!void {
cur_bit_bag >>= 1;
const unused = @truncate(u1, cur_bit_bag) != 0;
cur_bit_bag >>= 1;
_ = unused;
const field_name_zir = zir.nullTerminatedString(zir.extra[extra_index]);
extra_index += 1;
@@ -4800,11 +4788,9 @@ pub fn analyzeUnionFields(mod: *Module, union_obj: *Union) InnerError!void {
break :blk align_ref;
} else .none;
const tag_ref: Zir.Inst.Ref = if (has_tag) blk: {
const tag_ref = @intToEnum(Zir.Inst.Ref, zir.extra[extra_index]);
if (has_tag) {
extra_index += 1;
break :blk tag_ref;
} else .none;
}
// This string needs to outlive the ZIR code.
const field_name = try decl_arena.allocator.dupe(u8, field_name_zir);

View File

@@ -1073,6 +1073,11 @@ fn zirOpaqueDecl(
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
const src = inst_data.src();
const extra = sema.code.extraData(Zir.Inst.Block, inst_data.payload_index);
if (false) {
inst_data;
src;
extra;
}
return sema.mod.fail(&block.base, sema.src, "TODO implement zirOpaqueDecl", .{});
}
@@ -1230,7 +1235,6 @@ fn zirIndexablePtrLen(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) In
fn zirArg(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!*Inst {
const inst_data = sema.code.instructions.items(.data)[inst].str_tok;
const src = inst_data.src();
const arg_name = inst_data.get(sema.code);
const arg_index = sema.next_arg_index;
sema.next_arg_index += 1;
@@ -3005,7 +3009,6 @@ fn zirFunc(
defer tracy.end();
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
const src = inst_data.src();
const extra = sema.code.extraData(Zir.Inst.Func, inst_data.payload_index);
const param_types = sema.code.refSlice(extra.end, extra.data.param_types_len);
@@ -3332,9 +3335,7 @@ fn zirBitcast(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError
defer tracy.end();
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
const src = inst_data.src();
const dest_ty_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = inst_data.src_node };
const operand_src: LazySrcLoc = .{ .node_offset_builtin_call_arg1 = inst_data.src_node };
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
const dest_type = try sema.resolveType(block, dest_ty_src, extra.lhs);
@@ -3653,7 +3654,6 @@ fn analyzeSwitch(
extra_index += 1;
const body_len = sema.code.extra[extra_index];
extra_index += 1;
const body = sema.code.extra[extra_index..][0..body_len];
extra_index += body_len;
try sema.validateSwitchItemEnum(
@@ -3763,7 +3763,6 @@ fn analyzeSwitch(
extra_index += 1;
const body_len = sema.code.extra[extra_index];
extra_index += 1;
const body = sema.code.extra[extra_index..][0..body_len];
extra_index += body_len;
try sema.validateSwitchItem(
@@ -3859,7 +3858,6 @@ fn analyzeSwitch(
extra_index += 1;
const body_len = sema.code.extra[extra_index];
extra_index += 1;
const body = sema.code.extra[extra_index..][0..body_len];
extra_index += body_len;
try sema.validateSwitchItemBool(
@@ -3942,7 +3940,6 @@ fn analyzeSwitch(
extra_index += 1;
const body_len = sema.code.extra[extra_index];
extra_index += 1;
const body = sema.code.extra[extra_index..][0..body_len];
extra_index += body_len;
try sema.validateSwitchItemSparse(
@@ -4457,6 +4454,7 @@ fn validateSwitchNoRange(
fn zirHasField(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!*Inst {
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
_ = extra;
const src = inst_data.src();
return sema.mod.fail(&block.base, src, "TODO implement zirHasField", .{});
@@ -6035,7 +6033,6 @@ fn zirVarExtended(
) InnerError!*Inst {
const extra = sema.code.extraData(Zir.Inst.ExtendedVar, extended.operand);
const src = sema.src;
const align_src: LazySrcLoc = src; // TODO add a LazySrcLoc that points at align
const ty_src: LazySrcLoc = src; // TODO add a LazySrcLoc that points at type
const mut_src: LazySrcLoc = src; // TODO add a LazySrcLoc that points at mut token
const init_src: LazySrcLoc = src; // TODO add a LazySrcLoc that points at init expr
@@ -7131,6 +7128,7 @@ fn analyzeSlice(
ptr_child.isVolatilePtr(),
return_ptr_size,
);
_ = return_type;
return sema.mod.fail(&block.base, src, "TODO implement analysis of slice", .{});
}

View File

@@ -101,7 +101,7 @@ pub fn deinit(self: *ThreadPool) void {
pub fn spawn(self: *ThreadPool, comptime func: anytype, args: anytype) !void {
if (std.builtin.single_threaded) {
const result = @call(.{}, func, args);
@call(.{}, func, args);
return;
}
@@ -114,7 +114,7 @@ pub fn spawn(self: *ThreadPool, comptime func: anytype, args: anytype) !void {
fn runFn(runnable: *Runnable) void {
const run_node = @fieldParentPtr(RunQueue.Node, "data", runnable);
const closure = @fieldParentPtr(@This(), "run_node", run_node);
const result = @call(.{}, func, closure.arguments);
@call(.{}, func, closure.arguments);
const held = closure.pool.lock.acquire();
defer held.release();

View File

@@ -3176,6 +3176,7 @@ const Writer = struct {
inst: Inst.Index,
) (@TypeOf(stream).Error || error{OutOfMemory})!void {
const inst_data = self.code.instructions.items(.data)[inst].array_type_sentinel;
_ = inst_data;
try stream.writeAll("TODO)");
}
@@ -3213,6 +3214,7 @@ const Writer = struct {
inst: Inst.Index,
) (@TypeOf(stream).Error || error{OutOfMemory})!void {
const inst_data = self.code.instructions.items(.data)[inst].ptr_type;
_ = inst_data;
try stream.writeAll("TODO)");
}
@@ -4739,7 +4741,6 @@ fn findDeclsSwitch(
var extra_index: usize = special.end;
var scalar_i: usize = 0;
while (scalar_i < extra.data.cases_len) : (scalar_i += 1) {
const item_ref = @intToEnum(Inst.Ref, zir.extra[extra_index]);
extra_index += 1;
const body_len = zir.extra[extra_index];
extra_index += 1;
@@ -4779,7 +4780,6 @@ fn findDeclsSwitchMulti(
{
var scalar_i: usize = 0;
while (scalar_i < extra.data.scalar_cases_len) : (scalar_i += 1) {
const item_ref = @intToEnum(Inst.Ref, zir.extra[extra_index]);
extra_index += 1;
const body_len = zir.extra[extra_index];
extra_index += 1;
@@ -4800,12 +4800,11 @@ fn findDeclsSwitchMulti(
extra_index += 1;
const items = zir.refSlice(extra_index, items_len);
extra_index += items_len;
_ = items;
var range_i: usize = 0;
while (range_i < ranges_len) : (range_i += 1) {
const item_first = @intToEnum(Inst.Ref, zir.extra[extra_index]);
extra_index += 1;
const item_last = @intToEnum(Inst.Ref, zir.extra[extra_index]);
extra_index += 1;
}

View File

@@ -118,7 +118,6 @@ pub fn generateSymbol(
if (typed_value.ty.sentinel()) |sentinel| {
try code.ensureCapacity(code.items.len + payload.data.len + 1);
code.appendSliceAssumeCapacity(payload.data);
const prev_len = code.items.len;
switch (try generateSymbol(bin_file, src_loc, .{
.ty = typed_value.ty.elemType(),
.val = sentinel,

View File

@@ -1107,7 +1107,6 @@ fn genAsm(o: *Object, as: *Inst.Assembly) !CValue {
for (as.inputs) |i, index| {
if (i[0] == '{' and i[i.len - 1] == '}') {
const reg = i[1 .. i.len - 1];
const arg = as.args[index];
if (index > 0) {
try writer.writeAll(", ");
}

View File

@@ -714,7 +714,6 @@ pub const DeclGen = struct {
return self.fail(inst.base.src, "TODO: SPIR-V backend: binary operations for strange integers", .{});
}
const is_bool = info.class == .bool;
const is_float = info.class == .float;
const is_signed = info.signedness == .signed;
// **Note**: All these operations must be valid for vectors as well!
@@ -802,8 +801,6 @@ pub const DeclGen = struct {
const result_id = self.spv.allocResultId();
const result_type_id = try self.genType(inst.base.src, inst.base.ty);
const info = try self.arithmeticTypeInfo(inst.operand.ty);
const opcode = switch (inst.base.tag) {
// Bool -> bool
.not => Opcode.OpLogicalNot,
@@ -867,6 +864,7 @@ pub const DeclGen = struct {
// are not allowed to be created from a phi node, and throw an error for those. For now, genType already throws
// an error for pointers.
const result_type_id = try self.genType(inst.base.src, inst.base.ty);
_ = result_type_id;
try writeOpcode(&self.code, .OpPhi, 2 + @intCast(u16, incoming_blocks.items.len * 2)); // result type + result + variable/parent...

View File

@@ -849,7 +849,6 @@ pub const Context = struct {
}
fn genCall(self: *Context, inst: *Inst.Call) InnerError!WValue {
const func_inst = inst.func.castTag(.constant).?;
const func_val = inst.func.value().?;
const target: *Decl = blk: {
@@ -1146,8 +1145,6 @@ pub const Context = struct {
}
fn genCmp(self: *Context, inst: *Inst.BinOp, op: std.math.CompareOperator) InnerError!WValue {
const ty = inst.lhs.ty.tag();
// save offset, so potential conditions can insert blocks in front of
// the comparison that we can later jump back to
const offset = self.code.items.len;

View File

@@ -497,7 +497,6 @@ fn add_include_dirs(comp: *Compilation, arena: *Allocator, args: *std.ArrayList(
const target = comp.getTarget();
const arch = target.cpu.arch;
const opt_nptl: ?[]const u8 = if (target.os.tag == .linux) "nptl" else "htl";
const glibc = try lib_path(comp, arena, lib_libc ++ "glibc");
const s = path.sep_str;

View File

@@ -2918,7 +2918,6 @@ fn relocateSymbolTable(self: *MachO) !void {
const nsyms = nlocals + nglobals + nundefs;
if (symtab.nsyms < nsyms) {
const linkedit_segment = self.load_commands.items[self.linkedit_segment_cmd_index.?].Segment;
const needed_size = nsyms * @sizeOf(macho.nlist_64);
if (needed_size > self.allocatedSizeLinkedit(symtab.symoff)) {
// Move the entire symbol table to a new location
@@ -3150,7 +3149,6 @@ fn writeExportTrie(self: *MachO) !void {
const nwritten = try trie.write(stream.writer());
assert(nwritten == trie.size);
const linkedit_segment = self.load_commands.items[self.linkedit_segment_cmd_index.?].Segment;
const dyld_info = &self.load_commands.items[self.dyld_info_cmd_index.?].DyldInfoOnly;
const allocated_size = self.allocatedSizeLinkedit(dyld_info.export_off);
const needed_size = mem.alignForwardGeneric(u64, buffer.len, @alignOf(u64));
@@ -3357,7 +3355,6 @@ fn populateLazyBindOffsetsInStubHelper(self: *MachO, buffer: []const u8) !void {
error.EndOfStream => break,
else => return err,
};
const imm: u8 = inst & macho.BIND_IMMEDIATE_MASK;
const opcode: u8 = inst & macho.BIND_OPCODE_MASK;
switch (opcode) {

View File

@@ -500,7 +500,6 @@ pub fn flushModule(self: *DebugSymbols, allocator: *Allocator, options: link.Opt
if (self.debug_aranges_section_dirty) {
const dwarf_segment = &self.load_commands.items[self.dwarf_segment_cmd_index.?].Segment;
const debug_aranges_sect = &dwarf_segment.sections.items[self.debug_aranges_section_index.?];
const debug_info_sect = dwarf_segment.sections.items[self.debug_info_section_index.?];
var di_buf = std.ArrayList(u8).init(allocator);
defer di_buf.deinit();
@@ -844,7 +843,6 @@ fn relocateSymbolTable(self: *DebugSymbols) !void {
const nsyms = nlocals + nglobals;
if (symtab.nsyms < nsyms) {
const linkedit_segment = self.load_commands.items[self.linkedit_segment_cmd_index.?].Segment;
const needed_size = nsyms * @sizeOf(macho.nlist_64);
if (needed_size > self.allocatedSizeLinkedit(symtab.symoff)) {
// Move the entire symbol table to a new location
@@ -904,11 +902,6 @@ pub fn updateDeclLineNumber(self: *DebugSymbols, module: *Module, decl: *const M
const tracy = trace(@src());
defer tracy.end();
const tree = decl.namespace.file_scope.tree;
const node_tags = tree.nodes.items(.tag);
const node_datas = tree.nodes.items(.data);
const token_starts = tree.tokens.items(.start);
const func = decl.val.castTag(.function).?.data;
const line_off = @intCast(u28, decl.src_line + func.lbrace_line);

View File

@@ -478,7 +478,6 @@ pub fn parseDebugInfo(self: *Object) !void {
self.tu_path = try std.fs.path.join(self.allocator, &[_][]const u8{ comp_dir, name });
self.tu_mtime = mtime: {
var buffer: [std.fs.MAX_PATH_BYTES]u8 = undefined;
const stat = try self.file.?.stat();
break :mtime @intCast(u64, @divFloor(stat.mtime, 1_000_000_000));
};

View File

@@ -432,7 +432,6 @@ fn mapAndUpdateSections(
fn updateMetadata(self: *Zld) !void {
for (self.objects.items) |object| {
const object_seg = object.load_commands.items[object.segment_cmd_index.?].Segment;
const text_seg = &self.load_commands.items[self.text_segment_cmd_index.?].Segment;
const data_const_seg = &self.load_commands.items[self.data_const_segment_cmd_index.?].Segment;
const data_seg = &self.load_commands.items[self.data_segment_cmd_index.?].Segment;
@@ -1294,7 +1293,6 @@ fn allocateLinkeditSegment(self: *Zld) void {
}
fn allocateSegment(self: *Zld, index: u16, offset: u64) !void {
const base_vmaddr = self.load_commands.items[self.pagezero_segment_cmd_index.?].Segment.inner.vmsize;
const seg = &self.load_commands.items[index].Segment;
// Allocate the sections according to their alignment at the beginning of the segment.
@@ -1427,7 +1425,6 @@ fn writeStubHelperCommon(self: *Zld) !void {
const got = &data_const_segment.sections.items[self.got_section_index.?];
const data_segment = &self.load_commands.items[self.data_segment_cmd_index.?].Segment;
const data = &data_segment.sections.items[self.data_section_index.?];
const la_symbol_ptr = data_segment.sections.items[self.la_symbol_ptr_section_index.?];
self.stub_helper_stubs_start_off = blk: {
switch (self.arch.?) {
@@ -2654,7 +2651,6 @@ fn setEntryPoint(self: *Zld) !void {
// TODO we should respect the -entry flag passed in by the user to set a custom
// entrypoint. For now, assume default of `_main`.
const seg = self.load_commands.items[self.text_segment_cmd_index.?].Segment;
const text = seg.sections.items[self.text_section_index.?];
const sym = self.globals.get("_main") orelse return error.MissingMainEntrypoint;
const entry_sym = sym.cast(Symbol.Regular) orelse unreachable;
const ec = &self.load_commands.items[self.main_cmd_index.?].Main;
@@ -2862,7 +2858,6 @@ fn populateLazyBindOffsetsInStubHelper(self: *Zld, buffer: []const u8) !void {
error.EndOfStream => break,
else => return err,
};
const imm: u8 = inst & macho.BIND_IMMEDIATE_MASK;
const opcode: u8 = inst & macho.BIND_OPCODE_MASK;
switch (opcode) {
@@ -2959,6 +2954,7 @@ fn writeDebugInfo(self: *Zld) !void {
for (self.objects.items) |object| {
const tu_path = object.tu_path orelse continue;
const tu_mtime = object.tu_mtime orelse continue;
_ = tu_mtime;
const dirname = std.fs.path.dirname(tu_path) orelse "./";
// Current dir
try stabs.append(.{

View File

@@ -175,7 +175,6 @@ pub const Parser = struct {
const rel_type = @intToEnum(macho.reloc_type_x86_64, rel.r_type);
const target = Relocation.Target.from_reloc(rel, parser.symbols);
const is_extern = rel.r_extern == 1;
const offset = @intCast(u32, rel.r_address);
const inst = parser.code[offset..][0..4];

View File

@@ -496,7 +496,6 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void {
if (data_size != 0) {
const header_offset = try reserveVecSectionHeader(file);
const writer = file.writer();
var len: u32 = 0;
// index to memory section (currently, there can only be 1 memory section in wasm)
try leb.writeULEB128(writer, @as(u32, 0));

View File

@@ -3749,7 +3749,6 @@ pub fn cmdAstCheck(
var color: Color = .auto;
var want_output_text = false;
var have_zig_source_file = false;
var zig_source_file: ?[]const u8 = null;
var i: usize = 0;

View File

@@ -372,11 +372,9 @@ pub fn buildImportLib(comp: *Compilation, lib_name: []const u8) !void {
try child.spawn();
const stdout_reader = child.stdout.?.reader();
const stderr_reader = child.stderr.?.reader();
// TODO https://github.com/ziglang/zig/issues/6343
const stdout = try stdout_reader.readAllAlloc(arena, std.math.maxInt(u32));
const stderr = try stderr_reader.readAllAlloc(arena, 10 * 1024 * 1024);
const term = child.wait() catch |err| {

View File

@@ -143,7 +143,6 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
const dirname = path.dirname(src_file).?;
const basename = path.basename(src_file);
const noextbasename = basename[0 .. basename.len - std.fs.path.extension(basename).len];
const before_arch_dir = path.dirname(dirname).?;
const dirbasename = path.basename(dirname);
var is_arch_specific = false;

View File

@@ -281,12 +281,6 @@ test "default state" {
};
defer function.deinit();
var mock_instruction = ir.Inst{
.tag = .breakpoint,
.ty = Type.initTag(.void),
.src = .unneeded,
};
try expect(!function.register_manager.isRegAllocated(.r2));
try expect(!function.register_manager.isRegAllocated(.r3));
try expect(function.register_manager.isRegFree(.r2));
@@ -365,12 +359,6 @@ test "tryAllocRegs" {
};
defer function.deinit();
var mock_instruction = ir.Inst{
.tag = .breakpoint,
.ty = Type.initTag(.void),
.src = .unneeded,
};
try expectEqual([_]MockRegister2{ .r0, .r1, .r2 }, function.register_manager.tryAllocRegs(3, .{ null, null, null }, &.{}).?);
// Exceptions

View File

@@ -1321,7 +1321,6 @@ fn transConvertVectorExpr(
const src_type = qualTypeCanon(src_expr.getType());
const src_vector_ty = @ptrCast(*const clang.VectorType, src_type);
const src_element_qt = src_vector_ty.getElementType();
const src_element_type_node = try transQualType(c, &block_scope.base, src_element_qt, base_stmt.getBeginLoc());
const src_expr_node = try transExpr(c, &block_scope.base, src_expr, .used);
@@ -3802,7 +3801,6 @@ fn transBinaryConditionalOperator(c: *Context, scope: *Scope, stmt: *const clang
const res_is_bool = qualTypeIsBoolean(qt);
const casted_stmt = @ptrCast(*const clang.AbstractConditionalOperator, stmt);
const cond_expr = casted_stmt.getCond();
const true_expr = casted_stmt.getTrueExpr();
const false_expr = casted_stmt.getFalseExpr();
// c: (cond_expr)?:(false_expr)
@@ -4336,8 +4334,6 @@ fn transCreateNodeNumber(c: *Context, num: anytype, num_kind: enum { int, float
}
fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: Node, proto_alias: *ast.Payload.Func) !Node {
const scope = &c.global_scope.base;
var fn_params = std.ArrayList(ast.Payload.Param).init(c.gpa);
defer fn_params.deinit();

View File

@@ -3013,7 +3013,7 @@ pub const Type = extern union {
.base = .{ .tag = t },
.data = data,
};
return Type{ .ptr_otherwise = &ptr.base };
return file_struct.Type{ .ptr_otherwise = &ptr.base };
}
pub fn Data(comptime t: Tag) type {
@@ -3163,7 +3163,6 @@ pub const CType = enum {
longdouble,
pub fn sizeInBits(self: CType, target: Target) u16 {
const arch = target.cpu.arch;
switch (target.os.tag) {
.freestanding, .other => switch (target.cpu.arch) {
.msp430 => switch (self) {

View File

@@ -13,6 +13,7 @@ test "simple coroutine suspend and resume" {
resume frame;
try expect(global_x == 3);
const af: anyframe->void = &frame;
_ = af;
resume frame;
try expect(global_x == 4);
}
@@ -45,6 +46,7 @@ test "suspend at end of function" {
fn doTheTest() !void {
try expect(x == 1);
const p = async suspendAtEnd();
_ = p;
try expect(x == 2);
}
@@ -132,6 +134,7 @@ test "@frameSize" {
}
fn other(param: i32) void {
var local: i32 = undefined;
_ = local;
suspend {}
}
};
@@ -181,6 +184,7 @@ test "coroutine suspend, resume" {
test "coroutine suspend with block" {
const p = async testSuspendBlock();
_ = p;
try expect(!global_result);
resume a_promise;
try expect(global_result);
@@ -207,6 +211,7 @@ var await_final_result: i32 = 0;
test "coroutine await" {
await_seq('a');
var p = async await_amain();
_ = p;
await_seq('f');
resume await_a_promise;
await_seq('i');
@@ -243,6 +248,7 @@ var early_final_result: i32 = 0;
test "coroutine await early return" {
early_seq('a');
var p = async early_amain();
_ = p;
early_seq('f');
try expect(early_final_result == 1234);
try expect(std.mem.eql(u8, &early_points, "abcdef"));
@@ -276,6 +282,7 @@ test "async function with dot syntax" {
}
};
const p = async S.foo();
_ = p;
try expect(S.y == 2);
}
@@ -362,11 +369,13 @@ test "error return trace across suspend points - early return" {
const p = nonFailing();
resume p;
const p2 = async printTrace(p);
_ = p2;
}
test "error return trace across suspend points - async return" {
const p = nonFailing();
const p2 = async printTrace(p);
_ = p2;
resume p;
}
@@ -396,6 +405,7 @@ fn printTrace(p: anyframe->(anyerror!void)) callconv(.Async) void {
test "break from suspend" {
var my_result: i32 = 1;
const p = async testBreakFromSuspend(&my_result);
_ = p;
try std.testing.expect(my_result == 2);
}
fn testBreakFromSuspend(my_result: *i32) callconv(.Async) void {
@@ -619,6 +629,7 @@ test "returning a const error from async function" {
fn amain() !void {
var download_frame = async fetchUrl(10, "a string");
const download_text = try await download_frame;
_ = download_text;
@panic("should not get here");
}
@@ -730,6 +741,7 @@ test "alignment of local variables in async functions" {
const S = struct {
fn doTheTest() !void {
var y: u8 = 123;
_ = y;
var x: u8 align(128) = 1;
try expect(@ptrToInt(&x) % 128 == 0);
}
@@ -742,6 +754,7 @@ test "no reason to resolve frame still works" {
}
fn simpleNothing() void {
var x: i32 = 1234;
_ = x;
}
test "async call a generic function" {
@@ -802,6 +815,7 @@ test "struct parameter to async function is copied to the frame" {
if (x == 0) return;
clobberStack(x - 1);
var y: i32 = x;
_ = y;
}
fn bar(f: *@Frame(foo)) void {
@@ -1654,6 +1668,7 @@ test "@asyncCall with pass-by-value arguments" {
[_]u8{ 1, 2, 3, 4, 5 },
F2,
});
_ = frame_ptr;
}
test "@asyncCall with arguments having non-standard alignment" {
@@ -1673,4 +1688,5 @@ test "@asyncCall with arguments having non-standard alignment" {
// The function pointer must not be comptime-known.
var t = S.f;
var frame_ptr = @asyncCall(&buffer, {}, t, .{ F0, undefined, F1 });
_ = frame_ptr;
}

View File

@@ -97,7 +97,6 @@ test "cmpxchg with ptr" {
test "cmpxchg with ignored result" {
var x: i32 = 1234;
var ptr = &x;
_ = @cmpxchgStrong(i32, &x, 1234, 5678, .Monotonic, .Monotonic);
@@ -195,7 +194,6 @@ fn testAtomicRmwInt() !void {
test "atomics with different types" {
try testAtomicsWithType(bool, true, false);
inline for (.{ u1, i4, u5, i15, u24 }) |T| {
var x: T = 0;
try testAtomicsWithType(T, 0, 1);
}
try testAtomicsWithType(u0, 0, 0);

View File

@@ -12,6 +12,7 @@ var await_final_result = Foo{ .x = 0 };
test "coroutine await struct" {
await_seq('a');
var p = async await_amain();
_ = p;
await_seq('f');
resume await_a_promise;
await_seq('i');

View File

@@ -100,5 +100,5 @@ test "comptime shr of BigInt" {
}
test "comptime shift safety check" {
const x = @as(usize, 42) << @sizeOf(usize);
_ = @as(usize, 42) << @sizeOf(usize);
}

View File

@@ -4,4 +4,5 @@ pub const S = extern struct {
};
test "bug 1467" {
const s: S = undefined;
_ = s;
}

View File

@@ -7,4 +7,8 @@ const B = fn (A) void;
test "allow these dependencies" {
var a: A = undefined;
var b: B = undefined;
if (false) {
a;
b;
}
}

View File

@@ -1,6 +1,8 @@
test "fixed" {
const a: *void = undefined;
const b: *[1]void = a;
_ = b;
const c: *[0]u8 = undefined;
const d: []u8 = c;
_ = d;
}

View File

@@ -8,4 +8,5 @@ test "fixed" {
var ctr = Container{
.params = NoteParams{},
};
_ = ctr;
}

View File

@@ -1,5 +1,5 @@
fn f(buf: []u8) void {
var ptr = &buf[@sizeOf(u32)];
_ = &buf[@sizeOf(u32)];
}
test "crash" {

View File

@@ -5,4 +5,5 @@ test "@Type should resolve its children types" {
comptime var sparse_info = @typeInfo(anyerror!sparse);
sparse_info.ErrorUnion.payload = dense;
const B = @Type(sparse_info);
_ = B;
}

View File

@@ -30,4 +30,8 @@ test "comptime struct return should not return the same instance" {
//a second parameter is required to trigger the bug
const ValA = constCount(&CountBy.One, 12);
const ValB = constCount(&CountBy.One, 15);
if (false) {
ValA;
ValB;
}
}

View File

@@ -102,6 +102,7 @@ fn castToOptionalTypeError(z: i32) !void {
const f = z;
const g: anyerror!?i32 = f;
_ = g catch {};
const a = A{ .a = z };
const b: anyerror!?A = a;
@@ -114,7 +115,9 @@ test "implicitly cast from int to anyerror!?T" {
}
fn implicitIntLitToOptional() void {
const f: ?i32 = 1;
_ = f;
const g: anyerror!?i32 = 1;
_ = g catch {};
}
test "return null from fn() anyerror!?&T" {

View File

@@ -111,6 +111,8 @@ test "enum type" {
.y = 5678,
},
};
try expect(foo1.One == 13);
try expect(foo2.Two.x == 1234 and foo2.Two.y == 5678);
const bar = Bar.B;
try expect(bar == Bar.B);

View File

@@ -103,6 +103,7 @@ fn testErrorSetType() !void {
const a: MyErrSet!i32 = 5678;
const b: MyErrSet!i32 = MyErrSet.OutOfMemory;
try expect(b catch error.OutOfMemory == error.OutOfMemory);
if (a) |value| try expect(value == 5678) else |err| switch (err) {
error.OutOfMemory => unreachable,
@@ -162,6 +163,7 @@ fn testErrToIntWithOnePossibleValue(
test "empty error union" {
const x = error{} || error{};
_ = x;
}
test "error union peer type resolution" {
@@ -204,6 +206,7 @@ fn entry() void {
fn foo2(f: fn () anyerror!void) void {
const x = f();
x catch {};
}
fn bar2() (error{}!void) {}
@@ -338,6 +341,7 @@ test "optional error set is the same size as error set" {
test "debug info for optional error set" {
const SomeError = error{Hello};
var a_local_variable: ?SomeError = null;
_ = a_local_variable;
}
test "nested catch" {
@@ -349,7 +353,7 @@ test "nested catch" {
return error.Wrong;
}
fn func() anyerror!Foo {
const x = fail() catch
_ = fail() catch
fail() catch
return error.Bad;
unreachable;

View File

@@ -184,6 +184,7 @@ fn testTryToTrickEvalWithRuntimeIf(b: bool) usize {
comptime var i: usize = 0;
inline while (i < 10) : (i += 1) {
const result = if (b) false else true;
_ = result;
}
comptime {
return i;
@@ -195,6 +196,7 @@ test "inlined loop has array literal with elided runtime scope on first iteratio
comptime var i: usize = 0;
inline while (i < 2) : (i += 1) {
const result = if (i == 0) [1]i32{2} else runtime;
_ = result;
}
comptime {
try expect(i == 2);

View File

@@ -18,5 +18,5 @@ test "import in non-toplevel scope" {
}
test "import empty file" {
const empty = @import("import/empty.zig");
_ = @import("import/empty.zig");
}

View File

@@ -5,7 +5,7 @@ test "casting random address to function pointer" {
fn randomAddressToFunction() void {
var addr: usize = 0xdeadbeef;
var ptr = @intToPtr(fn () void, addr);
_ = @intToPtr(fn () void, addr);
}
test "mutate through ptr initialized with constant intToPtr value" {

View File

@@ -5,6 +5,7 @@ fn foo(id: u64) !i32 {
1 => getErrInt(),
2 => {
const size = try getErrInt();
_ = size;
return try getErrInt();
},
else => error.ItBroke,

View File

@@ -333,6 +333,12 @@ test "quad hex float literal parsing in range" {
const b = 0x1.dedafcff354b6ae9758763545432p-9;
const c = 0x1.2f34dd5f437e849b4baab754cdefp+4534;
const d = 0x1.edcbff8ad76ab5bf46463233214fp-435;
if (false) {
a;
b;
c;
d;
}
}
test "quad hex float literal parsing accurate" {
@@ -457,6 +463,11 @@ test "hex float literal within range" {
const a = 0x1.0p16383;
const b = 0x0.1p16387;
const c = 0x1.0p-16382;
if (false) {
a;
b;
c;
}
}
test "truncating shift left" {

View File

@@ -234,6 +234,7 @@ test "compile time global reinterpret" {
test "explicit cast maybe pointers" {
const a: ?*i32 = undefined;
const b: ?*f32 = @ptrCast(?*f32, a);
_ = b;
}
test "generic malloc free" {

View File

@@ -39,6 +39,7 @@ test "test maybe object and get a pointer to the inner value" {
test "rhs maybe unwrap return" {
const x: ?bool = true;
const y = x orelse return;
_ = y;
}
test "maybe return" {

View File

@@ -128,6 +128,7 @@ test "nested orelse" {
const x = maybe() orelse
maybe() orelse
return null;
_ = x;
unreachable;
}
const Foo = struct {

View File

@@ -65,6 +65,10 @@ test "assigning integer to C pointer" {
var x: i32 = 0;
var ptr: [*c]u8 = 0;
var ptr2: [*c]u8 = x;
if (false) {
ptr;
ptr2;
}
}
test "implicit cast single item pointer to C pointer and back" {
@@ -78,7 +82,6 @@ test "implicit cast single item pointer to C pointer and back" {
test "C pointer comparison and arithmetic" {
const S = struct {
fn doTheTest() !void {
var one: usize = 1;
var ptr1: [*c]u32 = 0;
var ptr2 = ptr1 + 10;
try expect(ptr1 == 0);
@@ -325,6 +328,7 @@ test "@ptrToInt on null optional at comptime" {
{
const pointer = @intToPtr(?*u8, 0x000);
const x = @ptrToInt(pointer);
_ = x;
comptime try expect(0 == @ptrToInt(pointer));
}
{

View File

@@ -195,11 +195,11 @@ test "branching logic inside @TypeOf" {
fn fn1(alpha: bool) void {
const n: usize = 7;
const v = if (alpha) n else @sizeOf(usize);
_ = if (alpha) n else @sizeOf(usize);
}
test "lazy @sizeOf result is checked for definedness" {
const f = fn1;
_ = fn1;
}
test "@bitSizeOf" {

View File

@@ -104,6 +104,7 @@ test "obtaining a null terminated slice" {
// now we obtain a null terminated slice:
const ptr = buf[0..3 :0];
_ = ptr;
var runtime_len: usize = 3;
const ptr2 = buf[0..runtime_len :0];

Some files were not shown because too many files have changed in this diff Show More