diff --git a/lib/compiler/resinator/cvtres.zig b/lib/compiler/resinator/cvtres.zig index 27e14ae9a3..e181b9390e 100644 --- a/lib/compiler/resinator/cvtres.zig +++ b/lib/compiler/resinator/cvtres.zig @@ -665,19 +665,16 @@ const ResourceTree = struct { pub fn writeCoff( self: *const ResourceTree, allocator: Allocator, - writer: anytype, + w: anytype, resources_in_data_order: []const Resource, lengths: Lengths, coff_string_table: *StringTable, ) ![]const std.coff.Symbol { if (self.type_to_name_map.count() == 0) { - try writer.writeByteNTimes(0, 16); + try w.writeByteNTimes(0, 16); return &.{}; } - var counting_writer = std.io.countingWriter(writer); - const w = counting_writer.writer(); - var level2_list: std.ArrayListUnmanaged(*const NameToLanguageMap) = .empty; defer level2_list.deinit(allocator); @@ -735,7 +732,6 @@ const ResourceTree = struct { try level2_list.append(allocator, name_to_lang_map); } } - std.debug.assert(counting_writer.bytes_written == level2_start); const level3_start = level2_start + lengths.level2; var level3_address = level3_start; @@ -771,7 +767,6 @@ const ResourceTree = struct { try level3_list.append(allocator, lang_to_resources_map); } } - std.debug.assert(counting_writer.bytes_written == level3_start); var reloc_addresses = try allocator.alloc(u32, resources_in_data_order.len); defer allocator.free(reloc_addresses); @@ -813,7 +808,6 @@ const ResourceTree = struct { try resources_list.append(allocator, reloc_resource); } } - std.debug.assert(counting_writer.bytes_written == data_entries_start); for (resources_list.items, 0..) |reloc_resource, i| { // TODO: This logic works but is convoluted, would be good to clean this up @@ -827,7 +821,6 @@ const ResourceTree = struct { }; try w.writeStructEndian(data_entry, .little); } - std.debug.assert(counting_writer.bytes_written == strings_start); for (self.rsrc_string_table.keys()) |v| { const str = v.name; diff --git a/lib/std/Io.zig b/lib/std/Io.zig index 746a457c5b..3ad511242b 100644 --- a/lib/std/Io.zig +++ b/lib/std/Io.zig @@ -432,10 +432,6 @@ pub const FixedBufferStream = @import("Io/fixed_buffer_stream.zig").FixedBufferS /// Deprecated in favor of `Reader`. pub const fixedBufferStream = @import("Io/fixed_buffer_stream.zig").fixedBufferStream; /// Deprecated with no replacement; inefficient pattern -pub const CountingWriter = @import("Io/counting_writer.zig").CountingWriter; -/// Deprecated with no replacement; inefficient pattern -pub const countingWriter = @import("Io/counting_writer.zig").countingWriter; -/// Deprecated with no replacement; inefficient pattern pub const CountingReader = @import("Io/counting_reader.zig").CountingReader; /// Deprecated with no replacement; inefficient pattern pub const countingReader = @import("Io/counting_reader.zig").countingReader; @@ -917,7 +913,6 @@ test { _ = Reader; _ = Writer; _ = BufferedWriter; - _ = CountingWriter; _ = CountingReader; _ = FixedBufferStream; _ = tty; diff --git a/lib/std/Io/counting_writer.zig b/lib/std/Io/counting_writer.zig deleted file mode 100644 index 32c3ed930f..0000000000 --- a/lib/std/Io/counting_writer.zig +++ /dev/null @@ -1,39 +0,0 @@ -const std = @import("../std.zig"); -const io = std.io; -const testing = std.testing; - -/// A Writer that counts how many bytes has been written to it. -pub fn CountingWriter(comptime WriterType: type) type { - return struct { - bytes_written: u64, - child_stream: WriterType, - - pub const Error = WriterType.Error; - pub const Writer = io.GenericWriter(*Self, Error, write); - - const Self = @This(); - - pub fn write(self: *Self, bytes: []const u8) Error!usize { - const amt = try self.child_stream.write(bytes); - self.bytes_written += amt; - return amt; - } - - pub fn writer(self: *Self) Writer { - return .{ .context = self }; - } - }; -} - -pub fn countingWriter(child_stream: anytype) CountingWriter(@TypeOf(child_stream)) { - return .{ .bytes_written = 0, .child_stream = child_stream }; -} - -test CountingWriter { - var counting_stream = countingWriter(std.io.null_writer); - const stream = counting_stream.writer(); - - const bytes = "yay" ** 100; - stream.writeAll(bytes) catch unreachable; - try testing.expect(counting_stream.bytes_written == bytes.len); -} diff --git a/lib/std/crypto/phc_encoding.zig b/lib/std/crypto/phc_encoding.zig index 507a02b983..e53977b42b 100644 --- a/lib/std/crypto/phc_encoding.zig +++ b/lib/std/crypto/phc_encoding.zig @@ -5,6 +5,7 @@ const fmt = std.fmt; const io = std.io; const mem = std.mem; const meta = std.meta; +const Writer = std.Io.Writer; const fields_delimiter = "$"; const fields_delimiter_scalar = '$'; @@ -188,19 +189,20 @@ pub fn deserialize(comptime HashResult: type, str: []const u8) Error!HashResult /// /// `params` can also include any additional parameters. pub fn serialize(params: anytype, str: []u8) Error![]const u8 { - var buf = io.fixedBufferStream(str); - try serializeTo(params, buf.writer()); - return buf.getWritten(); + var w: Writer = .fixed(str); + serializeTo(params, &w) catch return error.NoSpaceLeft; + return w.buffered(); } /// Compute the number of bytes required to serialize `params` pub fn calcSize(params: anytype) usize { - var buf = io.countingWriter(io.null_writer); - serializeTo(params, buf.writer()) catch unreachable; - return @as(usize, @intCast(buf.bytes_written)); + var trash: [128]u8 = undefined; + var d: Writer.Discarding = .init(&trash); + serializeTo(params, &d.writer) catch unreachable; + return @intCast(d.fullCount()); } -fn serializeTo(params: anytype, out: anytype) !void { +fn serializeTo(params: anytype, out: *std.Io.Writer) !void { const HashResult = @TypeOf(params); if (@hasField(HashResult, version_param_name)) { diff --git a/lib/std/crypto/scrypt.zig b/lib/std/crypto/scrypt.zig index 23202e7937..f12306a1dc 100644 --- a/lib/std/crypto/scrypt.zig +++ b/lib/std/crypto/scrypt.zig @@ -311,9 +311,10 @@ const crypt_format = struct { /// Compute the number of bytes required to serialize `params` pub fn calcSize(params: anytype) usize { - var buf = io.countingWriter(io.null_writer); - serializeTo(params, buf.writer()) catch unreachable; - return @as(usize, @intCast(buf.bytes_written)); + var trash: [128]u8 = undefined; + var d: std.Io.Writer.Discarding = .init(&trash); + serializeTo(params, &d) catch unreachable; + return @intCast(d.fullCount()); } fn serializeTo(params: anytype, out: anytype) !void { diff --git a/src/arch/x86_64/encoder.zig b/src/arch/x86_64/encoder.zig index 43d23af5fc..ed31c068dc 100644 --- a/src/arch/x86_64/encoder.zig +++ b/src/arch/x86_64/encoder.zig @@ -1204,11 +1204,10 @@ const TestEncode = struct { mnemonic: Instruction.Mnemonic, ops: []const Instruction.Operand, ) !void { - var stream = std.io.fixedBufferStream(&enc.buffer); - var count_writer = std.io.countingWriter(stream.writer()); + var writer: std.Io.Writer = .fixed(&enc.buffer); const inst: Instruction = try .new(.none, mnemonic, ops); - try inst.encode(count_writer.writer(), .{}); - enc.index = count_writer.bytes_written; + try inst.encode(&writer, .{}); + enc.index = writer.bufferedLen(); } fn code(enc: TestEncode) []const u8 { diff --git a/src/link/Elf.zig b/src/link/Elf.zig index 785f200928..be6c050e5e 100644 --- a/src/link/Elf.zig +++ b/src/link/Elf.zig @@ -3152,10 +3152,11 @@ fn writeSyntheticSections(self: *Elf) !void { if (self.section_indexes.gnu_hash) |shndx| { const shdr = slice.items(.shdr)[shndx]; - var buffer = try std.ArrayList(u8).initCapacity(gpa, self.gnu_hash.size()); - defer buffer.deinit(); - try self.gnu_hash.write(self, buffer.writer()); - try self.pwriteAll(buffer.items, shdr.sh_offset); + var aw: std.Io.Writer.Allocating = .init(gpa); + try aw.ensureUnusedCapacity(self.gnu_hash.size()); + defer aw.deinit(); + try self.gnu_hash.write(self, &aw.writer); + try self.pwriteAll(aw.getWritten(), shdr.sh_offset); } if (self.section_indexes.versym) |shndx| { diff --git a/src/link/Elf/synthetic_sections.zig b/src/link/Elf/synthetic_sections.zig index 2ab71d8c49..efad44f68b 100644 --- a/src/link/Elf/synthetic_sections.zig +++ b/src/link/Elf/synthetic_sections.zig @@ -1237,17 +1237,14 @@ pub const GnuHashSection = struct { return header_size + hash.num_bloom * 8 + hash.num_buckets * 4 + hash.num_exports * 4; } - pub fn write(hash: GnuHashSection, elf_file: *Elf, writer: anytype) !void { + pub fn write(hash: GnuHashSection, elf_file: *Elf, writer: *std.Io.Writer) !void { const exports = getExports(elf_file); const export_off = elf_file.dynsym.count() - hash.num_exports; - var counting = std.io.countingWriter(writer); - const cwriter = counting.writer(); - - try cwriter.writeInt(u32, hash.num_buckets, .little); - try cwriter.writeInt(u32, export_off, .little); - try cwriter.writeInt(u32, hash.num_bloom, .little); - try cwriter.writeInt(u32, bloom_shift, .little); + try writer.writeInt(u32, hash.num_buckets, .little); + try writer.writeInt(u32, export_off, .little); + try writer.writeInt(u32, hash.num_bloom, .little); + try writer.writeInt(u32, bloom_shift, .little); const comp = elf_file.base.comp; const gpa = comp.gpa; @@ -1271,7 +1268,7 @@ pub const GnuHashSection = struct { bloom[idx] |= @as(u64, 1) << @as(u6, @intCast((h >> bloom_shift) % 64)); } - try cwriter.writeAll(mem.sliceAsBytes(bloom)); + try writer.writeAll(mem.sliceAsBytes(bloom)); // Fill in the hash bucket indices const buckets = try gpa.alloc(u32, hash.num_buckets); @@ -1284,7 +1281,7 @@ pub const GnuHashSection = struct { } } - try cwriter.writeAll(mem.sliceAsBytes(buckets)); + try writer.writeAll(mem.sliceAsBytes(buckets)); // Finally, write the hash table const table = try gpa.alloc(u32, hash.num_exports); @@ -1300,9 +1297,7 @@ pub const GnuHashSection = struct { } } - try cwriter.writeAll(mem.sliceAsBytes(table)); - - assert(counting.bytes_written == hash.size()); + try writer.writeAll(mem.sliceAsBytes(table)); } pub fn hasher(name: [:0]const u8) u32 { diff --git a/src/link/MachO/dyld_info/Trie.zig b/src/link/MachO/dyld_info/Trie.zig index ce56101e54..d96f2a54c8 100644 --- a/src/link/MachO/dyld_info/Trie.zig +++ b/src/link/MachO/dyld_info/Trie.zig @@ -186,17 +186,18 @@ const FinalizeNodeResult = struct { /// Updates offset of this node in the output byte stream. fn finalizeNode(self: *Trie, node_index: Node.Index, offset_in_trie: u32) !FinalizeNodeResult { - var stream = std.io.countingWriter(std.io.null_writer); - const writer = stream.writer(); + var trash_buffer: [64]u8 = undefined; + var stream: std.Io.Writer.Discarding = .init(&trash_buffer); + const writer = &stream.writer; const slice = self.nodes.slice(); var node_size: u32 = 0; if (slice.items(.is_terminal)[node_index]) { const export_flags = slice.items(.export_flags)[node_index]; const vmaddr_offset = slice.items(.vmaddr_offset)[node_index]; - try leb.writeUleb128(writer, export_flags); - try leb.writeUleb128(writer, vmaddr_offset); - try leb.writeUleb128(writer, stream.bytes_written); + try writer.writeUleb128(export_flags); + try writer.writeUleb128(vmaddr_offset); + try writer.writeUleb128(stream.fullCount()); } else { node_size += 1; // 0x0 for non-terminal nodes } @@ -206,13 +207,13 @@ fn finalizeNode(self: *Trie, node_index: Node.Index, offset_in_trie: u32) !Final const edge = &self.edges.items[edge_index]; const next_node_offset = slice.items(.trie_offset)[edge.node]; node_size += @intCast(edge.label.len + 1); - try leb.writeUleb128(writer, next_node_offset); + try writer.writeUleb128(next_node_offset); } const trie_offset = slice.items(.trie_offset)[node_index]; const updated = offset_in_trie != trie_offset; slice.items(.trie_offset)[node_index] = offset_in_trie; - node_size += @intCast(stream.bytes_written); + node_size += @intCast(stream.fullCount()); return .{ .node_size = node_size, .updated = updated }; }