Merge pull request #24472 from ziglang/zig-fmt
zig fmt: update related functionality to new I/O API
This commit is contained in:
@@ -446,7 +446,6 @@ set(ZIG_STAGE2_SOURCES
|
||||
lib/std/dwarf/OP.zig
|
||||
lib/std/dwarf/TAG.zig
|
||||
lib/std/elf.zig
|
||||
lib/std/fifo.zig
|
||||
lib/std/fmt.zig
|
||||
lib/std/fmt/parse_float.zig
|
||||
lib/std/fs.zig
|
||||
@@ -462,7 +461,6 @@ set(ZIG_STAGE2_SOURCES
|
||||
lib/std/heap.zig
|
||||
lib/std/heap/arena_allocator.zig
|
||||
lib/std/json.zig
|
||||
lib/std/json/stringify.zig
|
||||
lib/std/leb128.zig
|
||||
lib/std/log.zig
|
||||
lib/std/macho.zig
|
||||
@@ -505,6 +503,7 @@ set(ZIG_STAGE2_SOURCES
|
||||
lib/std/unicode.zig
|
||||
lib/std/zig.zig
|
||||
lib/std/zig/Ast.zig
|
||||
lib/std/zig/Ast/Render.zig
|
||||
lib/std/zig/AstGen.zig
|
||||
lib/std/zig/AstRlAnnotate.zig
|
||||
lib/std/zig/LibCInstallation.zig
|
||||
@@ -513,7 +512,6 @@ set(ZIG_STAGE2_SOURCES
|
||||
lib/std/zig/WindowsSdk.zig
|
||||
lib/std/zig/Zir.zig
|
||||
lib/std/zig/c_builtins.zig
|
||||
lib/std/zig/render.zig
|
||||
lib/std/zig/string_literal.zig
|
||||
lib/std/zig/system.zig
|
||||
lib/std/zig/system/NativePaths.zig
|
||||
|
||||
@@ -1824,7 +1824,7 @@ pub fn main() !void {
|
||||
};
|
||||
defer tree.deinit(gpa);
|
||||
|
||||
const formatted = try tree.render(arena);
|
||||
const formatted = try tree.renderAlloc(arena);
|
||||
try std.fs.File.stdout().writeAll(formatted);
|
||||
return std.process.cleanExit();
|
||||
}
|
||||
|
||||
@@ -433,14 +433,18 @@ fn parse(file_name: []const u8, source: []u8) Oom!Ast {
|
||||
defer ast.deinit(gpa);
|
||||
|
||||
const token_offsets = ast.tokens.items(.start);
|
||||
var rendered_err: std.ArrayListUnmanaged(u8) = .{};
|
||||
defer rendered_err.deinit(gpa);
|
||||
var rendered_err: std.Io.Writer.Allocating = .init(gpa);
|
||||
defer rendered_err.deinit();
|
||||
for (ast.errors) |err| {
|
||||
const err_offset = token_offsets[err.token] + ast.errorOffset(err);
|
||||
const err_loc = std.zig.findLineColumn(ast.source, err_offset);
|
||||
rendered_err.clearRetainingCapacity();
|
||||
try ast.renderError(err, rendered_err.writer(gpa));
|
||||
log.err("{s}:{d}:{d}: {s}", .{ file_name, err_loc.line + 1, err_loc.column + 1, rendered_err.items });
|
||||
ast.renderError(err, &rendered_err.writer) catch |e| switch (e) {
|
||||
error.WriteFailed => return error.OutOfMemory,
|
||||
};
|
||||
log.err("{s}:{d}:{d}: {s}", .{
|
||||
file_name, err_loc.line + 1, err_loc.column + 1, rendered_err.getWritten(),
|
||||
});
|
||||
}
|
||||
return Ast.parse(gpa, "", .zig);
|
||||
}
|
||||
|
||||
@@ -246,33 +246,40 @@ pub fn appendRemaining(
|
||||
limit: Limit,
|
||||
) LimitedAllocError!void {
|
||||
assert(r.buffer.len != 0); // Needed to detect limit exceeded without losing data.
|
||||
const buffer = r.buffer;
|
||||
const buffer_contents = buffer[r.seek..r.end];
|
||||
const buffer_contents = r.buffer[r.seek..r.end];
|
||||
const copy_len = limit.minInt(buffer_contents.len);
|
||||
try list.ensureUnusedCapacity(gpa, copy_len);
|
||||
@memcpy(list.unusedCapacitySlice()[0..copy_len], buffer[0..copy_len]);
|
||||
list.items.len += copy_len;
|
||||
try list.appendSlice(gpa, r.buffer[0..copy_len]);
|
||||
r.seek += copy_len;
|
||||
if (copy_len == buffer_contents.len) {
|
||||
r.seek = 0;
|
||||
r.end = 0;
|
||||
}
|
||||
var remaining = limit.subtract(copy_len).?;
|
||||
if (buffer_contents.len - copy_len != 0) return error.StreamTooLong;
|
||||
r.seek = 0;
|
||||
r.end = 0;
|
||||
var remaining = @intFromEnum(limit) - copy_len;
|
||||
while (true) {
|
||||
try list.ensureUnusedCapacity(gpa, 1);
|
||||
const dest = remaining.slice(list.unusedCapacitySlice());
|
||||
const additional_buffer: []u8 = if (@intFromEnum(remaining) == dest.len) buffer else &.{};
|
||||
const n = readVec(r, &.{ dest, additional_buffer }) catch |err| switch (err) {
|
||||
error.EndOfStream => break,
|
||||
error.ReadFailed => return error.ReadFailed,
|
||||
};
|
||||
if (n > dest.len) {
|
||||
r.end = n - dest.len;
|
||||
list.items.len += dest.len;
|
||||
return error.StreamTooLong;
|
||||
const cap = list.unusedCapacitySlice();
|
||||
const dest = cap[0..@min(cap.len, remaining)];
|
||||
if (remaining - dest.len == 0) {
|
||||
// Additionally provides `buffer` to detect end.
|
||||
const new_remaining = readVecInner(r, &.{}, dest, remaining) catch |err| switch (err) {
|
||||
error.EndOfStream => {
|
||||
if (r.bufferedLen() != 0) return error.StreamTooLong;
|
||||
return;
|
||||
},
|
||||
error.ReadFailed => return error.ReadFailed,
|
||||
};
|
||||
list.items.len += remaining - new_remaining;
|
||||
remaining = new_remaining;
|
||||
} else {
|
||||
// Leave `buffer` empty, appending directly to `list`.
|
||||
var dest_w: Writer = .fixed(dest);
|
||||
const n = r.vtable.stream(r, &dest_w, .limited(dest.len)) catch |err| switch (err) {
|
||||
error.WriteFailed => unreachable, // Prevented by the limit.
|
||||
error.EndOfStream => return,
|
||||
error.ReadFailed => return error.ReadFailed,
|
||||
};
|
||||
list.items.len += n;
|
||||
remaining -= n;
|
||||
}
|
||||
list.items.len += n;
|
||||
remaining = remaining.subtract(n).?;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -313,62 +320,68 @@ pub fn readVecLimit(r: *Reader, data: []const []u8, limit: Limit) Error!usize {
|
||||
// buffer capacity requirements met.
|
||||
r.seek = 0;
|
||||
r.end = 0;
|
||||
const first = buf[copy_len..];
|
||||
const middle = data[i + 1 ..];
|
||||
var wrapper: Writer.VectorWrapper = .{
|
||||
.it = .{
|
||||
.first = first,
|
||||
.middle = middle,
|
||||
.last = r.buffer,
|
||||
},
|
||||
.writer = .{
|
||||
.buffer = if (first.len >= r.buffer.len) first else r.buffer,
|
||||
.vtable = Writer.VectorWrapper.vtable,
|
||||
},
|
||||
};
|
||||
var n = r.vtable.stream(r, &wrapper.writer, .limited(remaining)) catch |err| switch (err) {
|
||||
error.WriteFailed => {
|
||||
assert(!wrapper.used);
|
||||
if (wrapper.writer.buffer.ptr == first.ptr) {
|
||||
remaining -= wrapper.writer.end;
|
||||
} else {
|
||||
assert(wrapper.writer.end <= r.buffer.len);
|
||||
r.end = wrapper.writer.end;
|
||||
}
|
||||
break;
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
if (!wrapper.used) {
|
||||
if (wrapper.writer.buffer.ptr == first.ptr) {
|
||||
remaining -= n;
|
||||
} else {
|
||||
assert(n <= r.buffer.len);
|
||||
r.end = n;
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (n < first.len) {
|
||||
remaining -= n;
|
||||
break;
|
||||
}
|
||||
remaining -= first.len;
|
||||
n -= first.len;
|
||||
for (middle) |mid| {
|
||||
if (n < mid.len) {
|
||||
remaining -= n;
|
||||
break;
|
||||
}
|
||||
remaining -= mid.len;
|
||||
n -= mid.len;
|
||||
}
|
||||
assert(n <= r.buffer.len);
|
||||
r.end = n;
|
||||
remaining = try readVecInner(r, data[i + 1 ..], buf[copy_len..], remaining);
|
||||
break;
|
||||
}
|
||||
return @intFromEnum(limit) - remaining;
|
||||
}
|
||||
|
||||
fn readVecInner(r: *Reader, middle: []const []u8, first: []u8, remaining: usize) Error!usize {
|
||||
var wrapper: Writer.VectorWrapper = .{
|
||||
.it = .{
|
||||
.first = first,
|
||||
.middle = middle,
|
||||
.last = r.buffer,
|
||||
},
|
||||
.writer = .{
|
||||
.buffer = if (first.len >= r.buffer.len) first else r.buffer,
|
||||
.vtable = Writer.VectorWrapper.vtable,
|
||||
},
|
||||
};
|
||||
// If the limit may pass beyond user buffer into Reader buffer, use
|
||||
// unlimited, allowing the Reader buffer to fill.
|
||||
const limit: Limit = l: {
|
||||
var n: usize = first.len;
|
||||
for (middle) |m| n += m.len;
|
||||
break :l if (remaining >= n) .unlimited else .limited(remaining);
|
||||
};
|
||||
var n = r.vtable.stream(r, &wrapper.writer, limit) catch |err| switch (err) {
|
||||
error.WriteFailed => {
|
||||
assert(!wrapper.used);
|
||||
if (wrapper.writer.buffer.ptr == first.ptr) {
|
||||
return remaining - wrapper.writer.end;
|
||||
} else {
|
||||
assert(wrapper.writer.end <= r.buffer.len);
|
||||
r.end = wrapper.writer.end;
|
||||
return remaining;
|
||||
}
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
if (!wrapper.used) {
|
||||
if (wrapper.writer.buffer.ptr == first.ptr) {
|
||||
return remaining - n;
|
||||
} else {
|
||||
assert(n <= r.buffer.len);
|
||||
r.end = n;
|
||||
return remaining;
|
||||
}
|
||||
}
|
||||
if (n < first.len) return remaining - n;
|
||||
var result = remaining - first.len;
|
||||
n -= first.len;
|
||||
for (middle) |mid| {
|
||||
if (n < mid.len) {
|
||||
return result - n;
|
||||
}
|
||||
result -= mid.len;
|
||||
n -= mid.len;
|
||||
}
|
||||
assert(n <= r.buffer.len);
|
||||
r.end = n;
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn buffered(r: *Reader) []u8 {
|
||||
return r.buffer[r.seek..r.end];
|
||||
}
|
||||
|
||||
@@ -483,7 +483,7 @@ pub fn writeSplatAll(w: *Writer, data: [][]const u8, splat: usize) Error!void {
|
||||
|
||||
// Deal with any left over splats
|
||||
if (data.len != 0 and truncate < data[index].len * splat) {
|
||||
std.debug.assert(index == data.len - 1);
|
||||
assert(index == data.len - 1);
|
||||
var remaining_splat = splat;
|
||||
while (true) {
|
||||
remaining_splat -= truncate / data[index].len;
|
||||
@@ -840,11 +840,11 @@ pub inline fn writeStruct(w: *Writer, value: anytype, endian: std.builtin.Endian
|
||||
.auto => @compileError("ill-defined memory layout"),
|
||||
.@"extern" => {
|
||||
if (native_endian == endian) {
|
||||
return w.writeStruct(value);
|
||||
return w.writeAll(@ptrCast((&value)[0..1]));
|
||||
} else {
|
||||
var copy = value;
|
||||
std.mem.byteSwapAllFields(@TypeOf(value), ©);
|
||||
return w.writeStruct(copy);
|
||||
return w.writeAll(@ptrCast((©)[0..1]));
|
||||
}
|
||||
},
|
||||
.@"packed" => {
|
||||
@@ -2475,6 +2475,18 @@ pub const Allocating = struct {
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn ensureUnusedCapacity(a: *Allocating, additional_count: usize) Allocator.Error!void {
|
||||
var list = a.toArrayList();
|
||||
defer a.setArrayList(list);
|
||||
return list.ensureUnusedCapacity(a.allocator, additional_count);
|
||||
}
|
||||
|
||||
pub fn ensureTotalCapacity(a: *Allocating, new_capacity: usize) Allocator.Error!void {
|
||||
var list = a.toArrayList();
|
||||
defer a.setArrayList(list);
|
||||
return list.ensureTotalCapacity(a.allocator, new_capacity);
|
||||
}
|
||||
|
||||
pub fn toOwnedSlice(a: *Allocating) error{OutOfMemory}![]u8 {
|
||||
var list = a.toArrayList();
|
||||
defer a.setArrayList(list);
|
||||
@@ -2594,8 +2606,32 @@ test "allocating sendFile" {
|
||||
var file_reader = file_writer.moveToReader();
|
||||
try file_reader.seekTo(0);
|
||||
|
||||
var allocating: std.io.Writer.Allocating = .init(std.testing.allocator);
|
||||
var allocating: std.io.Writer.Allocating = .init(testing.allocator);
|
||||
defer allocating.deinit();
|
||||
|
||||
_ = try file_reader.interface.streamRemaining(&allocating.writer);
|
||||
}
|
||||
|
||||
test writeStruct {
|
||||
var buffer: [16]u8 = undefined;
|
||||
const S = extern struct { a: u64, b: u32, c: u32 };
|
||||
const s: S = .{ .a = 1, .b = 2, .c = 3 };
|
||||
{
|
||||
var w: Writer = .fixed(&buffer);
|
||||
try w.writeStruct(s, .little);
|
||||
try testing.expectEqualSlices(u8, &.{
|
||||
1, 0, 0, 0, 0, 0, 0, 0, //
|
||||
2, 0, 0, 0, //
|
||||
3, 0, 0, 0, //
|
||||
}, &buffer);
|
||||
}
|
||||
{
|
||||
var w: Writer = .fixed(&buffer);
|
||||
try w.writeStruct(s, .big);
|
||||
try testing.expectEqualSlices(u8, &.{
|
||||
0, 0, 0, 0, 0, 0, 0, 1, //
|
||||
0, 0, 0, 2, //
|
||||
0, 0, 0, 3, //
|
||||
}, &buffer);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,12 @@
|
||||
//! source lives here. These APIs are provided as-is and have absolutely no API
|
||||
//! guarantees whatsoever.
|
||||
|
||||
const std = @import("std.zig");
|
||||
const tokenizer = @import("zig/tokenizer.zig");
|
||||
const assert = std.debug.assert;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Writer = std.Io.Writer;
|
||||
|
||||
pub const ErrorBundle = @import("zig/ErrorBundle.zig");
|
||||
pub const Server = @import("zig/Server.zig");
|
||||
pub const Client = @import("zig/Client.zig");
|
||||
@@ -355,11 +361,6 @@ pub fn serializeCpuAlloc(ally: Allocator, cpu: std.Target.Cpu) Allocator.Error![
|
||||
return buffer.toOwnedSlice();
|
||||
}
|
||||
|
||||
const std = @import("std.zig");
|
||||
const tokenizer = @import("zig/tokenizer.zig");
|
||||
const assert = std.debug.assert;
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
/// Return a Formatter for a Zig identifier, escaping it with `@""` syntax if needed.
|
||||
///
|
||||
/// See also `fmtIdFlags`.
|
||||
@@ -425,7 +426,7 @@ pub const FormatId = struct {
|
||||
};
|
||||
|
||||
/// Print the string as a Zig identifier, escaping it with `@""` syntax if needed.
|
||||
fn render(ctx: FormatId, writer: *std.io.Writer) std.io.Writer.Error!void {
|
||||
fn render(ctx: FormatId, writer: *Writer) Writer.Error!void {
|
||||
const bytes = ctx.bytes;
|
||||
if (isValidId(bytes) and
|
||||
(ctx.flags.allow_primitive or !std.zig.isPrimitive(bytes)) and
|
||||
@@ -463,7 +464,7 @@ test fmtChar {
|
||||
}
|
||||
|
||||
/// Print the string as escaped contents of a double quoted string.
|
||||
pub fn stringEscape(bytes: []const u8, w: *std.io.Writer) std.io.Writer.Error!void {
|
||||
pub fn stringEscape(bytes: []const u8, w: *Writer) Writer.Error!void {
|
||||
for (bytes) |byte| switch (byte) {
|
||||
'\n' => try w.writeAll("\\n"),
|
||||
'\r' => try w.writeAll("\\r"),
|
||||
@@ -480,7 +481,7 @@ pub fn stringEscape(bytes: []const u8, w: *std.io.Writer) std.io.Writer.Error!vo
|
||||
}
|
||||
|
||||
/// Print the string as escaped contents of a single-quoted string.
|
||||
pub fn charEscape(bytes: []const u8, w: *std.io.Writer) std.io.Writer.Error!void {
|
||||
pub fn charEscape(bytes: []const u8, w: *Writer) Writer.Error!void {
|
||||
for (bytes) |byte| switch (byte) {
|
||||
'\n' => try w.writeAll("\\n"),
|
||||
'\r' => try w.writeAll("\\r"),
|
||||
@@ -529,20 +530,16 @@ test isUnderscore {
|
||||
try std.testing.expect(!isUnderscore("\\x5f"));
|
||||
}
|
||||
|
||||
pub fn readSourceFileToEndAlloc(gpa: Allocator, input: std.fs.File, size_hint: ?usize) ![:0]u8 {
|
||||
const source_code = input.readToEndAllocOptions(
|
||||
gpa,
|
||||
max_src_size,
|
||||
size_hint,
|
||||
.of(u8),
|
||||
0,
|
||||
) catch |err| switch (err) {
|
||||
error.ConnectionResetByPeer => unreachable,
|
||||
error.ConnectionTimedOut => unreachable,
|
||||
error.NotOpenForReading => unreachable,
|
||||
else => |e| return e,
|
||||
};
|
||||
errdefer gpa.free(source_code);
|
||||
pub fn readSourceFileToEndAlloc(gpa: Allocator, file_reader: *std.fs.File.Reader) ![:0]u8 {
|
||||
var buffer: std.ArrayListAlignedUnmanaged(u8, .@"2") = .empty;
|
||||
defer buffer.deinit(gpa);
|
||||
|
||||
if (file_reader.getSize()) |size| {
|
||||
const casted_size = std.math.cast(u32, size) orelse return error.StreamTooLong;
|
||||
try buffer.ensureTotalCapacityPrecise(gpa, casted_size);
|
||||
} else |_| {}
|
||||
|
||||
try file_reader.interface.appendRemaining(gpa, .@"2", &buffer, .limited(max_src_size));
|
||||
|
||||
// Detect unsupported file types with their Byte Order Mark
|
||||
const unsupported_boms = [_][]const u8{
|
||||
@@ -551,30 +548,23 @@ pub fn readSourceFileToEndAlloc(gpa: Allocator, input: std.fs.File, size_hint: ?
|
||||
"\xfe\xff", // UTF-16 big endian
|
||||
};
|
||||
for (unsupported_boms) |bom| {
|
||||
if (std.mem.startsWith(u8, source_code, bom)) {
|
||||
if (std.mem.startsWith(u8, buffer.items, bom)) {
|
||||
return error.UnsupportedEncoding;
|
||||
}
|
||||
}
|
||||
|
||||
// If the file starts with a UTF-16 little endian BOM, translate it to UTF-8
|
||||
if (std.mem.startsWith(u8, source_code, "\xff\xfe")) {
|
||||
if (source_code.len % 2 != 0) return error.InvalidEncoding;
|
||||
// TODO: after wrangle-writer-buffering branch is merged,
|
||||
// avoid this unnecessary allocation
|
||||
const aligned_copy = try gpa.alloc(u16, source_code.len / 2);
|
||||
defer gpa.free(aligned_copy);
|
||||
@memcpy(std.mem.sliceAsBytes(aligned_copy), source_code);
|
||||
const source_code_utf8 = std.unicode.utf16LeToUtf8AllocZ(gpa, aligned_copy) catch |err| switch (err) {
|
||||
if (std.mem.startsWith(u8, buffer.items, "\xff\xfe")) {
|
||||
if (buffer.items.len % 2 != 0) return error.InvalidEncoding;
|
||||
return std.unicode.utf16LeToUtf8AllocZ(gpa, @ptrCast(buffer.items)) catch |err| switch (err) {
|
||||
error.DanglingSurrogateHalf => error.UnsupportedEncoding,
|
||||
error.ExpectedSecondSurrogateHalf => error.UnsupportedEncoding,
|
||||
error.UnexpectedSecondSurrogateHalf => error.UnsupportedEncoding,
|
||||
else => |e| return e,
|
||||
};
|
||||
gpa.free(source_code);
|
||||
return source_code_utf8;
|
||||
}
|
||||
|
||||
return source_code;
|
||||
return buffer.toOwnedSliceSentinel(gpa, 0);
|
||||
}
|
||||
|
||||
pub fn printAstErrorsToStderr(gpa: Allocator, tree: Ast, path: []const u8, color: Color) !void {
|
||||
@@ -621,7 +611,7 @@ pub fn parseTargetQueryOrReportFatalError(
|
||||
var help_text = std.ArrayList(u8).init(allocator);
|
||||
defer help_text.deinit();
|
||||
for (diags.arch.?.allCpuModels()) |cpu| {
|
||||
help_text.writer().print(" {s}\n", .{cpu.name}) catch break :help;
|
||||
help_text.print(" {s}\n", .{cpu.name}) catch break :help;
|
||||
}
|
||||
std.log.info("available CPUs for architecture '{s}':\n{s}", .{
|
||||
@tagName(diags.arch.?), help_text.items,
|
||||
@@ -634,7 +624,7 @@ pub fn parseTargetQueryOrReportFatalError(
|
||||
var help_text = std.ArrayList(u8).init(allocator);
|
||||
defer help_text.deinit();
|
||||
for (diags.arch.?.allFeaturesList()) |feature| {
|
||||
help_text.writer().print(" {s}: {s}\n", .{ feature.name, feature.description }) catch break :help;
|
||||
help_text.print(" {s}: {s}\n", .{ feature.name, feature.description }) catch break :help;
|
||||
}
|
||||
std.log.info("available CPU features for architecture '{s}':\n{s}", .{
|
||||
@tagName(diags.arch.?), help_text.items,
|
||||
@@ -647,7 +637,7 @@ pub fn parseTargetQueryOrReportFatalError(
|
||||
var help_text = std.ArrayList(u8).init(allocator);
|
||||
defer help_text.deinit();
|
||||
inline for (@typeInfo(std.Target.ObjectFormat).@"enum".fields) |field| {
|
||||
help_text.writer().print(" {s}\n", .{field.name}) catch break :help;
|
||||
help_text.print(" {s}\n", .{field.name}) catch break :help;
|
||||
}
|
||||
std.log.info("available object formats:\n{s}", .{help_text.items});
|
||||
}
|
||||
@@ -658,7 +648,7 @@ pub fn parseTargetQueryOrReportFatalError(
|
||||
var help_text = std.ArrayList(u8).init(allocator);
|
||||
defer help_text.deinit();
|
||||
inline for (@typeInfo(std.Target.Cpu.Arch).@"enum".fields) |field| {
|
||||
help_text.writer().print(" {s}\n", .{field.name}) catch break :help;
|
||||
help_text.print(" {s}\n", .{field.name}) catch break :help;
|
||||
}
|
||||
std.log.info("available architectures:\n{s} native\n", .{help_text.items});
|
||||
}
|
||||
|
||||
@@ -4,6 +4,16 @@
|
||||
//! For Zon syntax, the root node is at nodes[0] and contains lhs as the node
|
||||
//! index of the main expression.
|
||||
|
||||
const std = @import("../std.zig");
|
||||
const assert = std.debug.assert;
|
||||
const testing = std.testing;
|
||||
const mem = std.mem;
|
||||
const Token = std.zig.Token;
|
||||
const Ast = @This();
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Parse = @import("Parse.zig");
|
||||
const Writer = std.Io.Writer;
|
||||
|
||||
/// Reference to externally-owned data.
|
||||
source: [:0]const u8,
|
||||
|
||||
@@ -128,12 +138,6 @@ pub fn deinit(tree: *Ast, gpa: Allocator) void {
|
||||
tree.* = undefined;
|
||||
}
|
||||
|
||||
pub const RenderError = error{
|
||||
/// Ran out of memory allocating call stack frames to complete rendering, or
|
||||
/// ran out of memory allocating space in the output buffer.
|
||||
OutOfMemory,
|
||||
};
|
||||
|
||||
pub const Mode = enum { zig, zon };
|
||||
|
||||
/// Result should be freed with tree.deinit() when there are
|
||||
@@ -199,27 +203,25 @@ pub fn parse(gpa: Allocator, source: [:0]const u8, mode: Mode) Allocator.Error!A
|
||||
|
||||
/// `gpa` is used for allocating the resulting formatted source code.
|
||||
/// Caller owns the returned slice of bytes, allocated with `gpa`.
|
||||
pub fn render(tree: Ast, gpa: Allocator) RenderError![]u8 {
|
||||
var buffer = std.ArrayList(u8).init(gpa);
|
||||
defer buffer.deinit();
|
||||
|
||||
try tree.renderToArrayList(&buffer, .{});
|
||||
return buffer.toOwnedSlice();
|
||||
pub fn renderAlloc(tree: Ast, gpa: Allocator) error{OutOfMemory}![]u8 {
|
||||
var aw: std.io.Writer.Allocating = .init(gpa);
|
||||
defer aw.deinit();
|
||||
render(tree, gpa, &aw.writer, .{}) catch |err| switch (err) {
|
||||
error.WriteFailed, error.OutOfMemory => return error.OutOfMemory,
|
||||
};
|
||||
return aw.toOwnedSlice();
|
||||
}
|
||||
|
||||
pub const Fixups = private_render.Fixups;
|
||||
pub const Render = @import("Ast/Render.zig");
|
||||
|
||||
pub fn renderToArrayList(tree: Ast, buffer: *std.ArrayList(u8), fixups: Fixups) RenderError!void {
|
||||
return @import("./render.zig").renderTree(buffer, tree, fixups);
|
||||
pub fn render(tree: Ast, gpa: Allocator, w: *Writer, fixups: Render.Fixups) Render.Error!void {
|
||||
return Render.renderTree(gpa, w, tree, fixups);
|
||||
}
|
||||
|
||||
/// Returns an extra offset for column and byte offset of errors that
|
||||
/// should point after the token in the error message.
|
||||
pub fn errorOffset(tree: Ast, parse_error: Error) u32 {
|
||||
return if (parse_error.token_is_prev)
|
||||
@as(u32, @intCast(tree.tokenSlice(parse_error.token).len))
|
||||
else
|
||||
0;
|
||||
return if (parse_error.token_is_prev) @intCast(tree.tokenSlice(parse_error.token).len) else 0;
|
||||
}
|
||||
|
||||
pub fn tokenLocation(self: Ast, start_offset: ByteOffset, token_index: TokenIndex) Location {
|
||||
@@ -318,254 +320,254 @@ pub fn rootDecls(tree: Ast) []const Node.Index {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
|
||||
pub fn renderError(tree: Ast, parse_error: Error, w: *Writer) Writer.Error!void {
|
||||
switch (parse_error.tag) {
|
||||
.asterisk_after_ptr_deref => {
|
||||
// Note that the token will point at the `.*` but ideally the source
|
||||
// location would point to the `*` after the `.*`.
|
||||
return stream.writeAll("'.*' cannot be followed by '*'; are you missing a space?");
|
||||
return w.writeAll("'.*' cannot be followed by '*'; are you missing a space?");
|
||||
},
|
||||
.chained_comparison_operators => {
|
||||
return stream.writeAll("comparison operators cannot be chained");
|
||||
return w.writeAll("comparison operators cannot be chained");
|
||||
},
|
||||
.decl_between_fields => {
|
||||
return stream.writeAll("declarations are not allowed between container fields");
|
||||
return w.writeAll("declarations are not allowed between container fields");
|
||||
},
|
||||
.expected_block => {
|
||||
return stream.print("expected block, found '{s}'", .{
|
||||
return w.print("expected block, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_block_or_assignment => {
|
||||
return stream.print("expected block or assignment, found '{s}'", .{
|
||||
return w.print("expected block or assignment, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_block_or_expr => {
|
||||
return stream.print("expected block or expression, found '{s}'", .{
|
||||
return w.print("expected block or expression, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_block_or_field => {
|
||||
return stream.print("expected block or field, found '{s}'", .{
|
||||
return w.print("expected block or field, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_container_members => {
|
||||
return stream.print("expected test, comptime, var decl, or container field, found '{s}'", .{
|
||||
return w.print("expected test, comptime, var decl, or container field, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_expr => {
|
||||
return stream.print("expected expression, found '{s}'", .{
|
||||
return w.print("expected expression, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_expr_or_assignment => {
|
||||
return stream.print("expected expression or assignment, found '{s}'", .{
|
||||
return w.print("expected expression or assignment, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_expr_or_var_decl => {
|
||||
return stream.print("expected expression or var decl, found '{s}'", .{
|
||||
return w.print("expected expression or var decl, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_fn => {
|
||||
return stream.print("expected function, found '{s}'", .{
|
||||
return w.print("expected function, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_inlinable => {
|
||||
return stream.print("expected 'while' or 'for', found '{s}'", .{
|
||||
return w.print("expected 'while' or 'for', found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_labelable => {
|
||||
return stream.print("expected 'while', 'for', 'inline', or '{{', found '{s}'", .{
|
||||
return w.print("expected 'while', 'for', 'inline', or '{{', found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_param_list => {
|
||||
return stream.print("expected parameter list, found '{s}'", .{
|
||||
return w.print("expected parameter list, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_prefix_expr => {
|
||||
return stream.print("expected prefix expression, found '{s}'", .{
|
||||
return w.print("expected prefix expression, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_primary_type_expr => {
|
||||
return stream.print("expected primary type expression, found '{s}'", .{
|
||||
return w.print("expected primary type expression, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_pub_item => {
|
||||
return stream.writeAll("expected function or variable declaration after pub");
|
||||
return w.writeAll("expected function or variable declaration after pub");
|
||||
},
|
||||
.expected_return_type => {
|
||||
return stream.print("expected return type expression, found '{s}'", .{
|
||||
return w.print("expected return type expression, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_semi_or_else => {
|
||||
return stream.writeAll("expected ';' or 'else' after statement");
|
||||
return w.writeAll("expected ';' or 'else' after statement");
|
||||
},
|
||||
.expected_semi_or_lbrace => {
|
||||
return stream.writeAll("expected ';' or block after function prototype");
|
||||
return w.writeAll("expected ';' or block after function prototype");
|
||||
},
|
||||
.expected_statement => {
|
||||
return stream.print("expected statement, found '{s}'", .{
|
||||
return w.print("expected statement, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_suffix_op => {
|
||||
return stream.print("expected pointer dereference, optional unwrap, or field access, found '{s}'", .{
|
||||
return w.print("expected pointer dereference, optional unwrap, or field access, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_type_expr => {
|
||||
return stream.print("expected type expression, found '{s}'", .{
|
||||
return w.print("expected type expression, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_var_decl => {
|
||||
return stream.print("expected variable declaration, found '{s}'", .{
|
||||
return w.print("expected variable declaration, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_var_decl_or_fn => {
|
||||
return stream.print("expected variable declaration or function, found '{s}'", .{
|
||||
return w.print("expected variable declaration or function, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_loop_payload => {
|
||||
return stream.print("expected loop payload, found '{s}'", .{
|
||||
return w.print("expected loop payload, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.expected_container => {
|
||||
return stream.print("expected a struct, enum or union, found '{s}'", .{
|
||||
return w.print("expected a struct, enum or union, found '{s}'", .{
|
||||
tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
|
||||
});
|
||||
},
|
||||
.extern_fn_body => {
|
||||
return stream.writeAll("extern functions have no body");
|
||||
return w.writeAll("extern functions have no body");
|
||||
},
|
||||
.extra_addrspace_qualifier => {
|
||||
return stream.writeAll("extra addrspace qualifier");
|
||||
return w.writeAll("extra addrspace qualifier");
|
||||
},
|
||||
.extra_align_qualifier => {
|
||||
return stream.writeAll("extra align qualifier");
|
||||
return w.writeAll("extra align qualifier");
|
||||
},
|
||||
.extra_allowzero_qualifier => {
|
||||
return stream.writeAll("extra allowzero qualifier");
|
||||
return w.writeAll("extra allowzero qualifier");
|
||||
},
|
||||
.extra_const_qualifier => {
|
||||
return stream.writeAll("extra const qualifier");
|
||||
return w.writeAll("extra const qualifier");
|
||||
},
|
||||
.extra_volatile_qualifier => {
|
||||
return stream.writeAll("extra volatile qualifier");
|
||||
return w.writeAll("extra volatile qualifier");
|
||||
},
|
||||
.ptr_mod_on_array_child_type => {
|
||||
return stream.print("pointer modifier '{s}' not allowed on array child type", .{
|
||||
return w.print("pointer modifier '{s}' not allowed on array child type", .{
|
||||
tree.tokenTag(parse_error.token).symbol(),
|
||||
});
|
||||
},
|
||||
.invalid_bit_range => {
|
||||
return stream.writeAll("bit range not allowed on slices and arrays");
|
||||
return w.writeAll("bit range not allowed on slices and arrays");
|
||||
},
|
||||
.same_line_doc_comment => {
|
||||
return stream.writeAll("same line documentation comment");
|
||||
return w.writeAll("same line documentation comment");
|
||||
},
|
||||
.unattached_doc_comment => {
|
||||
return stream.writeAll("unattached documentation comment");
|
||||
return w.writeAll("unattached documentation comment");
|
||||
},
|
||||
.test_doc_comment => {
|
||||
return stream.writeAll("documentation comments cannot be attached to tests");
|
||||
return w.writeAll("documentation comments cannot be attached to tests");
|
||||
},
|
||||
.comptime_doc_comment => {
|
||||
return stream.writeAll("documentation comments cannot be attached to comptime blocks");
|
||||
return w.writeAll("documentation comments cannot be attached to comptime blocks");
|
||||
},
|
||||
.varargs_nonfinal => {
|
||||
return stream.writeAll("function prototype has parameter after varargs");
|
||||
return w.writeAll("function prototype has parameter after varargs");
|
||||
},
|
||||
.expected_continue_expr => {
|
||||
return stream.writeAll("expected ':' before while continue expression");
|
||||
return w.writeAll("expected ':' before while continue expression");
|
||||
},
|
||||
|
||||
.expected_semi_after_decl => {
|
||||
return stream.writeAll("expected ';' after declaration");
|
||||
return w.writeAll("expected ';' after declaration");
|
||||
},
|
||||
.expected_semi_after_stmt => {
|
||||
return stream.writeAll("expected ';' after statement");
|
||||
return w.writeAll("expected ';' after statement");
|
||||
},
|
||||
.expected_comma_after_field => {
|
||||
return stream.writeAll("expected ',' after field");
|
||||
return w.writeAll("expected ',' after field");
|
||||
},
|
||||
.expected_comma_after_arg => {
|
||||
return stream.writeAll("expected ',' after argument");
|
||||
return w.writeAll("expected ',' after argument");
|
||||
},
|
||||
.expected_comma_after_param => {
|
||||
return stream.writeAll("expected ',' after parameter");
|
||||
return w.writeAll("expected ',' after parameter");
|
||||
},
|
||||
.expected_comma_after_initializer => {
|
||||
return stream.writeAll("expected ',' after initializer");
|
||||
return w.writeAll("expected ',' after initializer");
|
||||
},
|
||||
.expected_comma_after_switch_prong => {
|
||||
return stream.writeAll("expected ',' after switch prong");
|
||||
return w.writeAll("expected ',' after switch prong");
|
||||
},
|
||||
.expected_comma_after_for_operand => {
|
||||
return stream.writeAll("expected ',' after for operand");
|
||||
return w.writeAll("expected ',' after for operand");
|
||||
},
|
||||
.expected_comma_after_capture => {
|
||||
return stream.writeAll("expected ',' after for capture");
|
||||
return w.writeAll("expected ',' after for capture");
|
||||
},
|
||||
.expected_initializer => {
|
||||
return stream.writeAll("expected field initializer");
|
||||
return w.writeAll("expected field initializer");
|
||||
},
|
||||
.mismatched_binary_op_whitespace => {
|
||||
return stream.print("binary operator '{s}' has whitespace on one side, but not the other", .{tree.tokenTag(parse_error.token).lexeme().?});
|
||||
return w.print("binary operator '{s}' has whitespace on one side, but not the other", .{tree.tokenTag(parse_error.token).lexeme().?});
|
||||
},
|
||||
.invalid_ampersand_ampersand => {
|
||||
return stream.writeAll("ambiguous use of '&&'; use 'and' for logical AND, or change whitespace to ' & &' for bitwise AND");
|
||||
return w.writeAll("ambiguous use of '&&'; use 'and' for logical AND, or change whitespace to ' & &' for bitwise AND");
|
||||
},
|
||||
.c_style_container => {
|
||||
return stream.print("'{s} {s}' is invalid", .{
|
||||
return w.print("'{s} {s}' is invalid", .{
|
||||
parse_error.extra.expected_tag.symbol(), tree.tokenSlice(parse_error.token),
|
||||
});
|
||||
},
|
||||
.zig_style_container => {
|
||||
return stream.print("to declare a container do 'const {s} = {s}'", .{
|
||||
return w.print("to declare a container do 'const {s} = {s}'", .{
|
||||
tree.tokenSlice(parse_error.token), parse_error.extra.expected_tag.symbol(),
|
||||
});
|
||||
},
|
||||
.previous_field => {
|
||||
return stream.writeAll("field before declarations here");
|
||||
return w.writeAll("field before declarations here");
|
||||
},
|
||||
.next_field => {
|
||||
return stream.writeAll("field after declarations here");
|
||||
return w.writeAll("field after declarations here");
|
||||
},
|
||||
.expected_var_const => {
|
||||
return stream.writeAll("expected 'var' or 'const' before variable declaration");
|
||||
return w.writeAll("expected 'var' or 'const' before variable declaration");
|
||||
},
|
||||
.wrong_equal_var_decl => {
|
||||
return stream.writeAll("variable initialized with '==' instead of '='");
|
||||
return w.writeAll("variable initialized with '==' instead of '='");
|
||||
},
|
||||
.var_const_decl => {
|
||||
return stream.writeAll("use 'var' or 'const' to declare variable");
|
||||
return w.writeAll("use 'var' or 'const' to declare variable");
|
||||
},
|
||||
.extra_for_capture => {
|
||||
return stream.writeAll("extra capture in for loop");
|
||||
return w.writeAll("extra capture in for loop");
|
||||
},
|
||||
.for_input_not_captured => {
|
||||
return stream.writeAll("for input is not captured");
|
||||
return w.writeAll("for input is not captured");
|
||||
},
|
||||
|
||||
.invalid_byte => {
|
||||
const tok_slice = tree.source[tree.tokens.items(.start)[parse_error.token]..];
|
||||
return stream.print("{s} contains invalid byte: '{f}'", .{
|
||||
return w.print("{s} contains invalid byte: '{f}'", .{
|
||||
switch (tok_slice[0]) {
|
||||
'\'' => "character literal",
|
||||
'"', '\\' => "string literal",
|
||||
@@ -580,10 +582,10 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
|
||||
const found_tag = tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev));
|
||||
const expected_symbol = parse_error.extra.expected_tag.symbol();
|
||||
switch (found_tag) {
|
||||
.invalid => return stream.print("expected '{s}', found invalid bytes", .{
|
||||
.invalid => return w.print("expected '{s}', found invalid bytes", .{
|
||||
expected_symbol,
|
||||
}),
|
||||
else => return stream.print("expected '{s}', found '{s}'", .{
|
||||
else => return w.print("expected '{s}', found '{s}'", .{
|
||||
expected_symbol, found_tag.symbol(),
|
||||
}),
|
||||
}
|
||||
@@ -4136,17 +4138,7 @@ pub fn tokensToSpan(tree: *const Ast, start: Ast.TokenIndex, end: Ast.TokenIndex
|
||||
return Span{ .start = start_off, .end = end_off, .main = tree.tokenStart(main) };
|
||||
}
|
||||
|
||||
const std = @import("../std.zig");
|
||||
const assert = std.debug.assert;
|
||||
const testing = std.testing;
|
||||
const mem = std.mem;
|
||||
const Token = std.zig.Token;
|
||||
const Ast = @This();
|
||||
const Allocator = std.mem.Allocator;
|
||||
const Parse = @import("Parse.zig");
|
||||
const private_render = @import("./render.zig");
|
||||
|
||||
test {
|
||||
_ = Parse;
|
||||
_ = private_render;
|
||||
_ = Render;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const std = @import("../std.zig");
|
||||
const std = @import("../../std.zig");
|
||||
const assert = std.debug.assert;
|
||||
const mem = std.mem;
|
||||
const Allocator = std.mem.Allocator;
|
||||
@@ -6,13 +6,24 @@ const meta = std.meta;
|
||||
const Ast = std.zig.Ast;
|
||||
const Token = std.zig.Token;
|
||||
const primitives = std.zig.primitives;
|
||||
const Writer = std.io.Writer;
|
||||
|
||||
const Render = @This();
|
||||
|
||||
gpa: Allocator,
|
||||
ais: *AutoIndentingStream,
|
||||
tree: Ast,
|
||||
fixups: Fixups,
|
||||
|
||||
const indent_delta = 4;
|
||||
const asm_indent_delta = 2;
|
||||
|
||||
pub const Error = Ast.RenderError;
|
||||
|
||||
const Ais = AutoIndentingStream(std.ArrayList(u8).Writer);
|
||||
pub const Error = error{
|
||||
/// Ran out of memory allocating call stack frames to complete rendering.
|
||||
OutOfMemory,
|
||||
/// Transitive failure from
|
||||
WriteFailed,
|
||||
};
|
||||
|
||||
pub const Fixups = struct {
|
||||
/// The key is the mut token (`var`/`const`) of the variable declaration
|
||||
@@ -72,19 +83,12 @@ pub const Fixups = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const Render = struct {
|
||||
gpa: Allocator,
|
||||
ais: *Ais,
|
||||
tree: Ast,
|
||||
fixups: Fixups,
|
||||
};
|
||||
|
||||
pub fn renderTree(buffer: *std.ArrayList(u8), tree: Ast, fixups: Fixups) Error!void {
|
||||
pub fn renderTree(gpa: Allocator, w: *Writer, tree: Ast, fixups: Fixups) Error!void {
|
||||
assert(tree.errors.len == 0); // Cannot render an invalid tree.
|
||||
var auto_indenting_stream = Ais.init(buffer, indent_delta);
|
||||
var auto_indenting_stream: AutoIndentingStream = .init(gpa, w, indent_delta);
|
||||
defer auto_indenting_stream.deinit();
|
||||
var r: Render = .{
|
||||
.gpa = buffer.allocator,
|
||||
.gpa = gpa,
|
||||
.ais = &auto_indenting_stream,
|
||||
.tree = tree,
|
||||
.fixups = fixups,
|
||||
@@ -186,7 +190,7 @@ fn renderMember(
|
||||
if (opt_callconv_expr.unwrap()) |callconv_expr| {
|
||||
if (tree.nodeTag(callconv_expr) == .enum_literal) {
|
||||
if (mem.eql(u8, "@\"inline\"", tree.tokenSlice(tree.nodeMainToken(callconv_expr)))) {
|
||||
try ais.writer().writeAll("inline ");
|
||||
try ais.underlying_writer.writeAll("inline ");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -200,7 +204,7 @@ fn renderMember(
|
||||
const lbrace = tree.nodeMainToken(body_node);
|
||||
try renderToken(r, lbrace, .newline);
|
||||
try discardAllParams(r, fn_proto);
|
||||
try ais.writer().writeAll("@trap();");
|
||||
try ais.writeAll("@trap();");
|
||||
ais.popIndent();
|
||||
try ais.insertNewline();
|
||||
try renderToken(r, tree.lastToken(body_node), space); // rbrace
|
||||
@@ -216,10 +220,9 @@ fn renderMember(
|
||||
const name_ident = param.name_token.?;
|
||||
assert(tree.tokenTag(name_ident) == .identifier);
|
||||
if (r.fixups.unused_var_decls.contains(name_ident)) {
|
||||
const w = ais.writer();
|
||||
try w.writeAll("_ = ");
|
||||
try w.writeAll(tokenSliceForRender(r.tree, name_ident));
|
||||
try w.writeAll(";\n");
|
||||
try ais.writeAll("_ = ");
|
||||
try ais.writeAll(tokenSliceForRender(r.tree, name_ident));
|
||||
try ais.writeAll(";\n");
|
||||
}
|
||||
}
|
||||
var statements_buf: [2]Ast.Node.Index = undefined;
|
||||
@@ -312,7 +315,7 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
|
||||
const tree = r.tree;
|
||||
const ais = r.ais;
|
||||
if (r.fixups.replace_nodes_with_string.get(node)) |replacement| {
|
||||
try ais.writer().writeAll(replacement);
|
||||
try ais.writeAll(replacement);
|
||||
try renderOnlySpace(r, space);
|
||||
return;
|
||||
} else if (r.fixups.replace_nodes_with_node.get(node)) |replacement| {
|
||||
@@ -881,7 +884,7 @@ fn renderExpressionFixup(r: *Render, node: Ast.Node.Index, space: Space) Error!v
|
||||
const ais = r.ais;
|
||||
try renderExpression(r, node, space);
|
||||
if (r.fixups.append_string_after_node.get(node)) |bytes| {
|
||||
try ais.writer().writeAll(bytes);
|
||||
try ais.writeAll(bytes);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1086,10 +1089,10 @@ fn renderVarDecl(
|
||||
try renderVarDeclWithoutFixups(r, var_decl, ignore_comptime_token, space);
|
||||
if (r.fixups.unused_var_decls.contains(var_decl.ast.mut_token + 1)) {
|
||||
// Discard the variable like this: `_ = foo;`
|
||||
const w = r.ais.writer();
|
||||
try w.writeAll("_ = ");
|
||||
try w.writeAll(tokenSliceForRender(r.tree, var_decl.ast.mut_token + 1));
|
||||
try w.writeAll(";\n");
|
||||
const ais = r.ais;
|
||||
try ais.writeAll("_ = ");
|
||||
try ais.writeAll(tokenSliceForRender(r.tree, var_decl.ast.mut_token + 1));
|
||||
try ais.writeAll(";\n");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1567,7 +1570,7 @@ fn renderBuiltinCall(
|
||||
defer r.gpa.free(new_string);
|
||||
|
||||
try renderToken(r, builtin_token + 1, .none); // (
|
||||
try ais.writer().print("\"{f}\"", .{std.zig.fmtString(new_string)});
|
||||
try ais.print("\"{f}\"", .{std.zig.fmtString(new_string)});
|
||||
return renderToken(r, str_lit_token + 1, space); // )
|
||||
}
|
||||
}
|
||||
@@ -2125,13 +2128,13 @@ fn renderArrayInit(
|
||||
|
||||
const section_exprs = row_exprs[0..section_end];
|
||||
|
||||
var sub_expr_buffer = std.ArrayList(u8).init(gpa);
|
||||
var sub_expr_buffer: std.io.Writer.Allocating = .init(gpa);
|
||||
defer sub_expr_buffer.deinit();
|
||||
|
||||
const sub_expr_buffer_starts = try gpa.alloc(usize, section_exprs.len + 1);
|
||||
defer gpa.free(sub_expr_buffer_starts);
|
||||
|
||||
var auto_indenting_stream = Ais.init(&sub_expr_buffer, indent_delta);
|
||||
var auto_indenting_stream: AutoIndentingStream = .init(gpa, &sub_expr_buffer.writer, indent_delta);
|
||||
defer auto_indenting_stream.deinit();
|
||||
var sub_render: Render = .{
|
||||
.gpa = r.gpa,
|
||||
@@ -2145,13 +2148,14 @@ fn renderArrayInit(
|
||||
var single_line = true;
|
||||
var contains_newline = false;
|
||||
for (section_exprs, 0..) |expr, i| {
|
||||
const start = sub_expr_buffer.items.len;
|
||||
const start = sub_expr_buffer.getWritten().len;
|
||||
sub_expr_buffer_starts[i] = start;
|
||||
|
||||
if (i + 1 < section_exprs.len) {
|
||||
try renderExpression(&sub_render, expr, .none);
|
||||
const width = sub_expr_buffer.items.len - start;
|
||||
const this_contains_newline = mem.indexOfScalar(u8, sub_expr_buffer.items[start..], '\n') != null;
|
||||
const written = sub_expr_buffer.getWritten();
|
||||
const width = written.len - start;
|
||||
const this_contains_newline = mem.indexOfScalar(u8, written[start..], '\n') != null;
|
||||
contains_newline = contains_newline or this_contains_newline;
|
||||
expr_widths[i] = width;
|
||||
expr_newlines[i] = this_contains_newline;
|
||||
@@ -2173,8 +2177,9 @@ fn renderArrayInit(
|
||||
try renderExpression(&sub_render, expr, .comma);
|
||||
ais.popSpace();
|
||||
|
||||
const width = sub_expr_buffer.items.len - start - 2;
|
||||
const this_contains_newline = mem.indexOfScalar(u8, sub_expr_buffer.items[start .. sub_expr_buffer.items.len - 1], '\n') != null;
|
||||
const written = sub_expr_buffer.getWritten();
|
||||
const width = written.len - start - 2;
|
||||
const this_contains_newline = mem.indexOfScalar(u8, written[start .. written.len - 1], '\n') != null;
|
||||
contains_newline = contains_newline or this_contains_newline;
|
||||
expr_widths[i] = width;
|
||||
expr_newlines[i] = contains_newline;
|
||||
@@ -2185,20 +2190,20 @@ fn renderArrayInit(
|
||||
}
|
||||
}
|
||||
}
|
||||
sub_expr_buffer_starts[section_exprs.len] = sub_expr_buffer.items.len;
|
||||
sub_expr_buffer_starts[section_exprs.len] = sub_expr_buffer.getWritten().len;
|
||||
|
||||
// Render exprs in current section.
|
||||
column_counter = 0;
|
||||
for (section_exprs, 0..) |expr, i| {
|
||||
const start = sub_expr_buffer_starts[i];
|
||||
const end = sub_expr_buffer_starts[i + 1];
|
||||
const expr_text = sub_expr_buffer.items[start..end];
|
||||
const expr_text = sub_expr_buffer.getWritten()[start..end];
|
||||
if (!expr_newlines[i]) {
|
||||
try ais.writer().writeAll(expr_text);
|
||||
try ais.writeAll(expr_text);
|
||||
} else {
|
||||
var by_line = std.mem.splitScalar(u8, expr_text, '\n');
|
||||
var last_line_was_empty = false;
|
||||
try ais.writer().writeAll(by_line.first());
|
||||
try ais.writeAll(by_line.first());
|
||||
while (by_line.next()) |line| {
|
||||
if (std.mem.startsWith(u8, line, "//") and last_line_was_empty) {
|
||||
try ais.insertNewline();
|
||||
@@ -2206,7 +2211,7 @@ fn renderArrayInit(
|
||||
try ais.maybeInsertNewline();
|
||||
}
|
||||
last_line_was_empty = (line.len == 0);
|
||||
try ais.writer().writeAll(line);
|
||||
try ais.writeAll(line);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2220,7 +2225,7 @@ fn renderArrayInit(
|
||||
try renderToken(r, comma, .space); // ,
|
||||
assert(column_widths[column_counter % row_size] >= expr_widths[i]);
|
||||
const padding = column_widths[column_counter % row_size] - expr_widths[i];
|
||||
try ais.writer().writeByteNTimes(' ', padding);
|
||||
try ais.splatByteAll(' ', padding);
|
||||
|
||||
column_counter += 1;
|
||||
continue;
|
||||
@@ -2394,24 +2399,24 @@ fn renderAsmLegacy(
|
||||
try renderToken(r, first_clobber - 2, .none);
|
||||
try renderToken(r, first_clobber - 1, .space);
|
||||
|
||||
try ais.writer().writeAll(".{ ");
|
||||
try ais.writeAll(".{ ");
|
||||
|
||||
var tok_i = first_clobber;
|
||||
while (true) : (tok_i += 1) {
|
||||
try ais.writer().writeByte('.');
|
||||
try ais.writeByte('.');
|
||||
_ = try writeStringLiteralAsIdentifier(r, tok_i);
|
||||
try ais.writer().writeAll(" = true");
|
||||
try ais.writeAll(" = true");
|
||||
|
||||
tok_i += 1;
|
||||
switch (tree.tokenTag(tok_i)) {
|
||||
.r_paren => {
|
||||
try ais.writer().writeAll(" }");
|
||||
try ais.writeAll(" }");
|
||||
ais.popIndent();
|
||||
return renderToken(r, tok_i, space);
|
||||
},
|
||||
.comma => {
|
||||
if (tree.tokenTag(tok_i + 1) == .r_paren) {
|
||||
try ais.writer().writeAll(" }");
|
||||
try ais.writeAll(" }");
|
||||
ais.popIndent();
|
||||
return renderToken(r, tok_i + 1, space);
|
||||
} else {
|
||||
@@ -2506,16 +2511,16 @@ fn renderAsmLegacy(
|
||||
};
|
||||
|
||||
try renderToken(r, colon3, .space); // :
|
||||
try ais.writer().writeAll(".{ ");
|
||||
try ais.writeAll(".{ ");
|
||||
const first_clobber = asm_node.first_clobber.?;
|
||||
var tok_i = first_clobber;
|
||||
while (true) {
|
||||
switch (tree.tokenTag(tok_i + 1)) {
|
||||
.r_paren => {
|
||||
ais.setIndentDelta(indent_delta);
|
||||
try ais.writer().writeByte('.');
|
||||
try ais.writeByte('.');
|
||||
const lexeme_len = try writeStringLiteralAsIdentifier(r, tok_i);
|
||||
try ais.writer().writeAll(" = true }");
|
||||
try ais.writeAll(" = true }");
|
||||
try renderSpace(r, tok_i, lexeme_len, .newline);
|
||||
ais.popIndent();
|
||||
return renderToken(r, tok_i + 1, space);
|
||||
@@ -2524,17 +2529,17 @@ fn renderAsmLegacy(
|
||||
switch (tree.tokenTag(tok_i + 2)) {
|
||||
.r_paren => {
|
||||
ais.setIndentDelta(indent_delta);
|
||||
try ais.writer().writeByte('.');
|
||||
try ais.writeByte('.');
|
||||
const lexeme_len = try writeStringLiteralAsIdentifier(r, tok_i);
|
||||
try ais.writer().writeAll(" = true }");
|
||||
try ais.writeAll(" = true }");
|
||||
try renderSpace(r, tok_i, lexeme_len, .newline);
|
||||
ais.popIndent();
|
||||
return renderToken(r, tok_i + 2, space);
|
||||
},
|
||||
else => {
|
||||
try ais.writer().writeByte('.');
|
||||
try ais.writeByte('.');
|
||||
_ = try writeStringLiteralAsIdentifier(r, tok_i);
|
||||
try ais.writer().writeAll(" = true");
|
||||
try ais.writeAll(" = true");
|
||||
try renderToken(r, tok_i + 1, .space);
|
||||
tok_i += 2;
|
||||
},
|
||||
@@ -2799,7 +2804,7 @@ fn renderToken(r: *Render, token_index: Ast.TokenIndex, space: Space) Error!void
|
||||
const tree = r.tree;
|
||||
const ais = r.ais;
|
||||
const lexeme = tokenSliceForRender(tree, token_index);
|
||||
try ais.writer().writeAll(lexeme);
|
||||
try ais.writeAll(lexeme);
|
||||
try renderSpace(r, token_index, lexeme.len, space);
|
||||
}
|
||||
|
||||
@@ -2807,7 +2812,7 @@ fn renderTokenOverrideSpaceMode(r: *Render, token_index: Ast.TokenIndex, space:
|
||||
const tree = r.tree;
|
||||
const ais = r.ais;
|
||||
const lexeme = tokenSliceForRender(tree, token_index);
|
||||
try ais.writer().writeAll(lexeme);
|
||||
try ais.writeAll(lexeme);
|
||||
ais.enableSpaceMode(override_space);
|
||||
defer ais.disableSpaceMode();
|
||||
try renderSpace(r, token_index, lexeme.len, space);
|
||||
@@ -2822,7 +2827,7 @@ fn renderSpace(r: *Render, token_index: Ast.TokenIndex, lexeme_len: usize, space
|
||||
if (space == .skip) return;
|
||||
|
||||
if (space == .comma and next_token_tag != .comma) {
|
||||
try ais.writer().writeByte(',');
|
||||
try ais.writeByte(',');
|
||||
}
|
||||
if (space == .semicolon or space == .comma) ais.enableSpaceMode(space);
|
||||
defer ais.disableSpaceMode();
|
||||
@@ -2833,7 +2838,7 @@ fn renderSpace(r: *Render, token_index: Ast.TokenIndex, lexeme_len: usize, space
|
||||
);
|
||||
switch (space) {
|
||||
.none => {},
|
||||
.space => if (!comment) try ais.writer().writeByte(' '),
|
||||
.space => if (!comment) try ais.writeByte(' '),
|
||||
.newline => if (!comment) try ais.insertNewline(),
|
||||
|
||||
.comma => if (next_token_tag == .comma) {
|
||||
@@ -2845,7 +2850,7 @@ fn renderSpace(r: *Render, token_index: Ast.TokenIndex, lexeme_len: usize, space
|
||||
.comma_space => if (next_token_tag == .comma) {
|
||||
try renderToken(r, token_index + 1, .space);
|
||||
} else if (!comment) {
|
||||
try ais.writer().writeByte(' ');
|
||||
try ais.writeByte(' ');
|
||||
},
|
||||
|
||||
.semicolon => if (next_token_tag == .semicolon) {
|
||||
@@ -2862,11 +2867,11 @@ fn renderOnlySpace(r: *Render, space: Space) Error!void {
|
||||
const ais = r.ais;
|
||||
switch (space) {
|
||||
.none => {},
|
||||
.space => try ais.writer().writeByte(' '),
|
||||
.space => try ais.writeByte(' '),
|
||||
.newline => try ais.insertNewline(),
|
||||
.comma => try ais.writer().writeAll(",\n"),
|
||||
.comma_space => try ais.writer().writeAll(", "),
|
||||
.semicolon => try ais.writer().writeAll(";\n"),
|
||||
.comma => try ais.writeAll(",\n"),
|
||||
.comma_space => try ais.writeAll(", "),
|
||||
.semicolon => try ais.writeAll(";\n"),
|
||||
.skip => unreachable,
|
||||
}
|
||||
}
|
||||
@@ -2883,7 +2888,7 @@ fn renderIdentifier(r: *Render, token_index: Ast.TokenIndex, space: Space, quote
|
||||
const lexeme = tokenSliceForRender(tree, token_index);
|
||||
|
||||
if (r.fixups.rename_identifiers.get(lexeme)) |mangled| {
|
||||
try r.ais.writer().writeAll(mangled);
|
||||
try r.ais.writeAll(mangled);
|
||||
try renderSpace(r, token_index, lexeme.len, space);
|
||||
return;
|
||||
}
|
||||
@@ -2992,15 +2997,15 @@ fn renderQuotedIdentifier(r: *Render, token_index: Ast.TokenIndex, space: Space,
|
||||
const lexeme = tokenSliceForRender(tree, token_index);
|
||||
assert(lexeme.len >= 3 and lexeme[0] == '@');
|
||||
|
||||
if (!unquote) try ais.writer().writeAll("@\"");
|
||||
if (!unquote) try ais.writeAll("@\"");
|
||||
const contents = lexeme[2 .. lexeme.len - 1];
|
||||
try renderIdentifierContents(ais.writer(), contents);
|
||||
if (!unquote) try ais.writer().writeByte('\"');
|
||||
try renderIdentifierContents(ais, contents);
|
||||
if (!unquote) try ais.writeByte('\"');
|
||||
|
||||
try renderSpace(r, token_index, lexeme.len, space);
|
||||
}
|
||||
|
||||
fn renderIdentifierContents(writer: anytype, bytes: []const u8) !void {
|
||||
fn renderIdentifierContents(ais: *AutoIndentingStream, bytes: []const u8) !void {
|
||||
var pos: usize = 0;
|
||||
while (pos < bytes.len) {
|
||||
const byte = bytes[pos];
|
||||
@@ -3013,23 +3018,23 @@ fn renderIdentifierContents(writer: anytype, bytes: []const u8) !void {
|
||||
.success => |codepoint| {
|
||||
if (codepoint <= 0x7f) {
|
||||
const buf = [1]u8{@as(u8, @intCast(codepoint))};
|
||||
try std.fmt.format(writer, "{f}", .{std.zig.fmtString(&buf)});
|
||||
try ais.print("{f}", .{std.zig.fmtString(&buf)});
|
||||
} else {
|
||||
try writer.writeAll(escape_sequence);
|
||||
try ais.writeAll(escape_sequence);
|
||||
}
|
||||
},
|
||||
.failure => {
|
||||
try writer.writeAll(escape_sequence);
|
||||
try ais.writeAll(escape_sequence);
|
||||
},
|
||||
}
|
||||
},
|
||||
0x00...('\\' - 1), ('\\' + 1)...0x7f => {
|
||||
const buf = [1]u8{byte};
|
||||
try std.fmt.format(writer, "{f}", .{std.zig.fmtString(&buf)});
|
||||
try ais.print("{f}", .{std.zig.fmtString(&buf)});
|
||||
pos += 1;
|
||||
},
|
||||
0x80...0xff => {
|
||||
try writer.writeByte(byte);
|
||||
try ais.writeByte(byte);
|
||||
pos += 1;
|
||||
},
|
||||
}
|
||||
@@ -3091,7 +3096,7 @@ fn renderComments(r: *Render, start: usize, end: usize) Error!bool {
|
||||
} else if (index == start) {
|
||||
// Otherwise if the first comment is on the same line as
|
||||
// the token before it, prefix it with a single space.
|
||||
try ais.writer().writeByte(' ');
|
||||
try ais.writeByte(' ');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3108,11 +3113,11 @@ fn renderComments(r: *Render, start: usize, end: usize) Error!bool {
|
||||
ais.disabled_offset = null;
|
||||
} else if (ais.disabled_offset == null and mem.eql(u8, comment_content, "zig fmt: off")) {
|
||||
// Write with the canonical single space.
|
||||
try ais.writer().writeAll("// zig fmt: off\n");
|
||||
try ais.writeAll("// zig fmt: off\n");
|
||||
ais.disabled_offset = index;
|
||||
} else {
|
||||
// Write the comment minus trailing whitespace.
|
||||
try ais.writer().print("{s}\n", .{trimmed_comment});
|
||||
try ais.print("{s}\n", .{trimmed_comment});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3213,10 +3218,9 @@ fn discardAllParams(r: *Render, fn_proto_node: Ast.Node.Index) Error!void {
|
||||
while (it.next()) |param| {
|
||||
const name_ident = param.name_token.?;
|
||||
assert(tree.tokenTag(name_ident) == .identifier);
|
||||
const w = ais.writer();
|
||||
try w.writeAll("_ = ");
|
||||
try w.writeAll(tokenSliceForRender(r.tree, name_ident));
|
||||
try w.writeAll(";\n");
|
||||
try ais.writeAll("_ = ");
|
||||
try ais.writeAll(tokenSliceForRender(r.tree, name_ident));
|
||||
try ais.writeAll(";\n");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3238,11 +3242,11 @@ fn writeStringLiteralAsIdentifier(r: *Render, token_index: Ast.TokenIndex) !usiz
|
||||
const lexeme = tokenSliceForRender(tree, token_index);
|
||||
const unquoted = lexeme[1..][0 .. lexeme.len - 2];
|
||||
if (std.zig.isValidId(unquoted)) {
|
||||
try ais.writer().writeAll(unquoted);
|
||||
try ais.writeAll(unquoted);
|
||||
return unquoted.len;
|
||||
} else {
|
||||
try ais.writer().writeByte('@');
|
||||
try ais.writer().writeAll(lexeme);
|
||||
try ais.writeByte('@');
|
||||
try ais.writeAll(lexeme);
|
||||
return lexeme.len + 1;
|
||||
}
|
||||
}
|
||||
@@ -3269,11 +3273,11 @@ fn anythingBetween(tree: Ast, start_token: Ast.TokenIndex, end_token: Ast.TokenI
|
||||
return false;
|
||||
}
|
||||
|
||||
fn writeFixingWhitespace(writer: std.ArrayList(u8).Writer, slice: []const u8) Error!void {
|
||||
fn writeFixingWhitespace(w: *Writer, slice: []const u8) Error!void {
|
||||
for (slice) |byte| switch (byte) {
|
||||
'\t' => try writer.writeAll(" " ** indent_delta),
|
||||
'\t' => try w.splatByteAll(' ', indent_delta),
|
||||
'\r' => {},
|
||||
else => try writer.writeByte(byte),
|
||||
else => try w.writeByte(byte),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -3398,224 +3402,244 @@ fn rowSize(tree: Ast, exprs: []const Ast.Node.Index, rtoken: Ast.TokenIndex) usi
|
||||
/// of the appropriate indentation level for them with pushSpace/popSpace.
|
||||
/// This should be done whenever a scope that ends in a .semicolon or a
|
||||
/// .comma is introduced.
|
||||
fn AutoIndentingStream(comptime UnderlyingWriter: type) type {
|
||||
return struct {
|
||||
const Self = @This();
|
||||
pub const WriteError = UnderlyingWriter.Error;
|
||||
pub const Writer = std.io.GenericWriter(*Self, WriteError, write);
|
||||
const AutoIndentingStream = struct {
|
||||
underlying_writer: *Writer,
|
||||
|
||||
pub const IndentType = enum {
|
||||
normal,
|
||||
after_equals,
|
||||
binop,
|
||||
field_access,
|
||||
};
|
||||
const StackElem = struct {
|
||||
indent_type: IndentType,
|
||||
realized: bool,
|
||||
};
|
||||
const SpaceElem = struct {
|
||||
space: Space,
|
||||
indent_count: usize,
|
||||
};
|
||||
/// Offset into the source at which formatting has been disabled with
|
||||
/// a `zig fmt: off` comment.
|
||||
///
|
||||
/// If non-null, the AutoIndentingStream will not write any bytes
|
||||
/// to the underlying writer. It will however continue to track the
|
||||
/// indentation level.
|
||||
disabled_offset: ?usize = null,
|
||||
|
||||
underlying_writer: UnderlyingWriter,
|
||||
indent_count: usize = 0,
|
||||
indent_delta: usize,
|
||||
indent_stack: std.ArrayList(StackElem),
|
||||
space_stack: std.ArrayList(SpaceElem),
|
||||
space_mode: ?usize = null,
|
||||
disable_indent_committing: usize = 0,
|
||||
current_line_empty: bool = true,
|
||||
/// the most recently applied indent
|
||||
applied_indent: usize = 0,
|
||||
|
||||
/// Offset into the source at which formatting has been disabled with
|
||||
/// a `zig fmt: off` comment.
|
||||
///
|
||||
/// If non-null, the AutoIndentingStream will not write any bytes
|
||||
/// to the underlying writer. It will however continue to track the
|
||||
/// indentation level.
|
||||
disabled_offset: ?usize = null,
|
||||
|
||||
indent_count: usize = 0,
|
||||
indent_delta: usize,
|
||||
indent_stack: std.ArrayList(StackElem),
|
||||
space_stack: std.ArrayList(SpaceElem),
|
||||
space_mode: ?usize = null,
|
||||
disable_indent_committing: usize = 0,
|
||||
current_line_empty: bool = true,
|
||||
/// the most recently applied indent
|
||||
applied_indent: usize = 0,
|
||||
|
||||
pub fn init(buffer: *std.ArrayList(u8), indent_delta_: usize) Self {
|
||||
return .{
|
||||
.underlying_writer = buffer.writer(),
|
||||
.indent_delta = indent_delta_,
|
||||
.indent_stack = std.ArrayList(StackElem).init(buffer.allocator),
|
||||
.space_stack = std.ArrayList(SpaceElem).init(buffer.allocator),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
self.indent_stack.deinit();
|
||||
self.space_stack.deinit();
|
||||
}
|
||||
|
||||
pub fn writer(self: *Self) Writer {
|
||||
return .{ .context = self };
|
||||
}
|
||||
|
||||
pub fn write(self: *Self, bytes: []const u8) WriteError!usize {
|
||||
if (bytes.len == 0)
|
||||
return @as(usize, 0);
|
||||
|
||||
try self.applyIndent();
|
||||
return self.writeNoIndent(bytes);
|
||||
}
|
||||
|
||||
// Change the indent delta without changing the final indentation level
|
||||
pub fn setIndentDelta(self: *Self, new_indent_delta: usize) void {
|
||||
if (self.indent_delta == new_indent_delta) {
|
||||
return;
|
||||
} else if (self.indent_delta > new_indent_delta) {
|
||||
assert(self.indent_delta % new_indent_delta == 0);
|
||||
self.indent_count = self.indent_count * (self.indent_delta / new_indent_delta);
|
||||
} else {
|
||||
// assert that the current indentation (in spaces) in a multiple of the new delta
|
||||
assert((self.indent_count * self.indent_delta) % new_indent_delta == 0);
|
||||
self.indent_count = self.indent_count / (new_indent_delta / self.indent_delta);
|
||||
}
|
||||
self.indent_delta = new_indent_delta;
|
||||
}
|
||||
|
||||
fn writeNoIndent(self: *Self, bytes: []const u8) WriteError!usize {
|
||||
if (bytes.len == 0)
|
||||
return @as(usize, 0);
|
||||
|
||||
if (self.disabled_offset == null) try self.underlying_writer.writeAll(bytes);
|
||||
if (bytes[bytes.len - 1] == '\n')
|
||||
self.resetLine();
|
||||
return bytes.len;
|
||||
}
|
||||
|
||||
pub fn insertNewline(self: *Self) WriteError!void {
|
||||
_ = try self.writeNoIndent("\n");
|
||||
}
|
||||
|
||||
fn resetLine(self: *Self) void {
|
||||
self.current_line_empty = true;
|
||||
|
||||
if (self.disable_indent_committing > 0) return;
|
||||
|
||||
if (self.indent_stack.items.len > 0) {
|
||||
// By default, we realize the most recent indentation scope.
|
||||
var to_realize = self.indent_stack.items.len - 1;
|
||||
|
||||
if (self.indent_stack.items.len >= 2 and
|
||||
self.indent_stack.items[to_realize - 1].indent_type == .after_equals and
|
||||
self.indent_stack.items[to_realize - 1].realized and
|
||||
self.indent_stack.items[to_realize].indent_type == .binop)
|
||||
{
|
||||
// If we are in a .binop scope and our direct parent is .after_equals, don't indent.
|
||||
// This ensures correct indentation in the below example:
|
||||
//
|
||||
// const foo =
|
||||
// (x >= 'a' and x <= 'z') or //<-- we are here
|
||||
// (x >= 'A' and x <= 'Z');
|
||||
//
|
||||
return;
|
||||
}
|
||||
|
||||
if (self.indent_stack.items[to_realize].indent_type == .field_access) {
|
||||
// Only realize the top-most field_access in a chain.
|
||||
while (to_realize > 0 and self.indent_stack.items[to_realize - 1].indent_type == .field_access)
|
||||
to_realize -= 1;
|
||||
}
|
||||
|
||||
if (self.indent_stack.items[to_realize].realized) return;
|
||||
self.indent_stack.items[to_realize].realized = true;
|
||||
self.indent_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
/// Disables indentation level changes during the next newlines until re-enabled.
|
||||
pub fn disableIndentCommitting(self: *Self) void {
|
||||
self.disable_indent_committing += 1;
|
||||
}
|
||||
|
||||
pub fn enableIndentCommitting(self: *Self) void {
|
||||
assert(self.disable_indent_committing > 0);
|
||||
self.disable_indent_committing -= 1;
|
||||
}
|
||||
|
||||
pub fn pushSpace(self: *Self, space: Space) !void {
|
||||
try self.space_stack.append(.{ .space = space, .indent_count = self.indent_count });
|
||||
}
|
||||
|
||||
pub fn popSpace(self: *Self) void {
|
||||
_ = self.space_stack.pop();
|
||||
}
|
||||
|
||||
/// Sets current indentation level to be the same as that of the last pushSpace.
|
||||
pub fn enableSpaceMode(self: *Self, space: Space) void {
|
||||
if (self.space_stack.items.len == 0) return;
|
||||
const curr = self.space_stack.getLast();
|
||||
if (curr.space != space) return;
|
||||
self.space_mode = curr.indent_count;
|
||||
}
|
||||
|
||||
pub fn disableSpaceMode(self: *Self) void {
|
||||
self.space_mode = null;
|
||||
}
|
||||
|
||||
pub fn lastSpaceModeIndent(self: *Self) usize {
|
||||
if (self.space_stack.items.len == 0) return 0;
|
||||
return self.space_stack.getLast().indent_count * self.indent_delta;
|
||||
}
|
||||
|
||||
/// Insert a newline unless the current line is blank
|
||||
pub fn maybeInsertNewline(self: *Self) WriteError!void {
|
||||
if (!self.current_line_empty)
|
||||
try self.insertNewline();
|
||||
}
|
||||
|
||||
/// Push default indentation
|
||||
/// Doesn't actually write any indentation.
|
||||
/// Just primes the stream to be able to write the correct indentation if it needs to.
|
||||
pub fn pushIndent(self: *Self, indent_type: IndentType) !void {
|
||||
try self.indent_stack.append(.{ .indent_type = indent_type, .realized = false });
|
||||
}
|
||||
|
||||
/// Forces an indentation level to be realized.
|
||||
pub fn forcePushIndent(self: *Self, indent_type: IndentType) !void {
|
||||
try self.indent_stack.append(.{ .indent_type = indent_type, .realized = true });
|
||||
self.indent_count += 1;
|
||||
}
|
||||
|
||||
pub fn popIndent(self: *Self) void {
|
||||
if (self.indent_stack.pop().?.realized) {
|
||||
assert(self.indent_count > 0);
|
||||
self.indent_count -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn indentStackEmpty(self: *Self) bool {
|
||||
return self.indent_stack.items.len == 0;
|
||||
}
|
||||
|
||||
/// Writes ' ' bytes if the current line is empty
|
||||
fn applyIndent(self: *Self) WriteError!void {
|
||||
const current_indent = self.currentIndent();
|
||||
if (self.current_line_empty and current_indent > 0) {
|
||||
if (self.disabled_offset == null) {
|
||||
try self.underlying_writer.writeByteNTimes(' ', current_indent);
|
||||
}
|
||||
self.applied_indent = current_indent;
|
||||
}
|
||||
self.current_line_empty = false;
|
||||
}
|
||||
|
||||
/// Checks to see if the most recent indentation exceeds the currently pushed indents
|
||||
pub fn isLineOverIndented(self: *Self) bool {
|
||||
if (self.current_line_empty) return false;
|
||||
return self.applied_indent > self.currentIndent();
|
||||
}
|
||||
|
||||
fn currentIndent(self: *Self) usize {
|
||||
const indent_count = self.space_mode orelse self.indent_count;
|
||||
return indent_count * self.indent_delta;
|
||||
}
|
||||
pub const IndentType = enum {
|
||||
normal,
|
||||
after_equals,
|
||||
binop,
|
||||
field_access,
|
||||
};
|
||||
}
|
||||
const StackElem = struct {
|
||||
indent_type: IndentType,
|
||||
realized: bool,
|
||||
};
|
||||
const SpaceElem = struct {
|
||||
space: Space,
|
||||
indent_count: usize,
|
||||
};
|
||||
|
||||
pub fn init(gpa: Allocator, w: *Writer, starting_indent_delta: usize) AutoIndentingStream {
|
||||
return .{
|
||||
.underlying_writer = w,
|
||||
.indent_delta = starting_indent_delta,
|
||||
.indent_stack = .init(gpa),
|
||||
.space_stack = .init(gpa),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *AutoIndentingStream) void {
|
||||
self.indent_stack.deinit();
|
||||
self.space_stack.deinit();
|
||||
}
|
||||
|
||||
pub fn writeAll(ais: *AutoIndentingStream, bytes: []const u8) Error!void {
|
||||
if (bytes.len == 0) return;
|
||||
try ais.applyIndent();
|
||||
if (ais.disabled_offset == null) try ais.underlying_writer.writeAll(bytes);
|
||||
if (bytes[bytes.len - 1] == '\n') ais.resetLine();
|
||||
}
|
||||
|
||||
/// Assumes that if the printed data ends with a newline, it is directly
|
||||
/// contained in the format string.
|
||||
pub fn print(ais: *AutoIndentingStream, comptime format: []const u8, args: anytype) Error!void {
|
||||
try ais.applyIndent();
|
||||
if (ais.disabled_offset == null) try ais.underlying_writer.print(format, args);
|
||||
if (format[format.len - 1] == '\n') ais.resetLine();
|
||||
}
|
||||
|
||||
pub fn writeByte(ais: *AutoIndentingStream, byte: u8) Error!void {
|
||||
try ais.applyIndent();
|
||||
if (ais.disabled_offset == null) try ais.underlying_writer.writeByte(byte);
|
||||
assert(byte != '\n');
|
||||
}
|
||||
|
||||
pub fn splatByteAll(ais: *AutoIndentingStream, byte: u8, n: usize) Error!void {
|
||||
assert(byte != '\n');
|
||||
try ais.applyIndent();
|
||||
if (ais.disabled_offset == null) try ais.underlying_writer.splatByteAll(byte, n);
|
||||
}
|
||||
|
||||
// Change the indent delta without changing the final indentation level
|
||||
pub fn setIndentDelta(ais: *AutoIndentingStream, new_indent_delta: usize) void {
|
||||
if (ais.indent_delta == new_indent_delta) {
|
||||
return;
|
||||
} else if (ais.indent_delta > new_indent_delta) {
|
||||
assert(ais.indent_delta % new_indent_delta == 0);
|
||||
ais.indent_count = ais.indent_count * (ais.indent_delta / new_indent_delta);
|
||||
} else {
|
||||
// assert that the current indentation (in spaces) in a multiple of the new delta
|
||||
assert((ais.indent_count * ais.indent_delta) % new_indent_delta == 0);
|
||||
ais.indent_count = ais.indent_count / (new_indent_delta / ais.indent_delta);
|
||||
}
|
||||
ais.indent_delta = new_indent_delta;
|
||||
}
|
||||
|
||||
pub fn insertNewline(ais: *AutoIndentingStream) Error!void {
|
||||
if (ais.disabled_offset == null) try ais.underlying_writer.writeByte('\n');
|
||||
ais.resetLine();
|
||||
}
|
||||
|
||||
/// Insert a newline unless the current line is blank
|
||||
pub fn maybeInsertNewline(ais: *AutoIndentingStream) Error!void {
|
||||
if (!ais.current_line_empty)
|
||||
try ais.insertNewline();
|
||||
}
|
||||
|
||||
/// Push an indent that is automatically popped after being applied
|
||||
pub fn pushIndentOneShot(ais: *AutoIndentingStream) void {
|
||||
ais.indent_one_shot_count += 1;
|
||||
ais.pushIndent();
|
||||
}
|
||||
|
||||
/// Turns all one-shot indents into regular indents
|
||||
/// Returns number of indents that must now be manually popped
|
||||
pub fn lockOneShotIndent(ais: *AutoIndentingStream) usize {
|
||||
const locked_count = ais.indent_one_shot_count;
|
||||
ais.indent_one_shot_count = 0;
|
||||
return locked_count;
|
||||
}
|
||||
|
||||
/// Push an indent that should not take effect until the next line
|
||||
pub fn pushIndentNextLine(ais: *AutoIndentingStream) void {
|
||||
ais.indent_next_line += 1;
|
||||
ais.pushIndent();
|
||||
}
|
||||
|
||||
/// Checks to see if the most recent indentation exceeds the currently pushed indents
|
||||
pub fn isLineOverIndented(ais: *AutoIndentingStream) bool {
|
||||
if (ais.current_line_empty) return false;
|
||||
return ais.applied_indent > ais.currentIndent();
|
||||
}
|
||||
|
||||
fn resetLine(ais: *AutoIndentingStream) void {
|
||||
ais.current_line_empty = true;
|
||||
|
||||
if (ais.disable_indent_committing > 0) return;
|
||||
|
||||
if (ais.indent_stack.items.len > 0) {
|
||||
// By default, we realize the most recent indentation scope.
|
||||
var to_realize = ais.indent_stack.items.len - 1;
|
||||
|
||||
if (ais.indent_stack.items.len >= 2 and
|
||||
ais.indent_stack.items[to_realize - 1].indent_type == .after_equals and
|
||||
ais.indent_stack.items[to_realize - 1].realized and
|
||||
ais.indent_stack.items[to_realize].indent_type == .binop)
|
||||
{
|
||||
// If we are in a .binop scope and our direct parent is .after_equals, don't indent.
|
||||
// This ensures correct indentation in the below example:
|
||||
//
|
||||
// const foo =
|
||||
// (x >= 'a' and x <= 'z') or //<-- we are here
|
||||
// (x >= 'A' and x <= 'Z');
|
||||
//
|
||||
return;
|
||||
}
|
||||
|
||||
if (ais.indent_stack.items[to_realize].indent_type == .field_access) {
|
||||
// Only realize the top-most field_access in a chain.
|
||||
while (to_realize > 0 and ais.indent_stack.items[to_realize - 1].indent_type == .field_access)
|
||||
to_realize -= 1;
|
||||
}
|
||||
|
||||
if (ais.indent_stack.items[to_realize].realized) return;
|
||||
ais.indent_stack.items[to_realize].realized = true;
|
||||
ais.indent_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
/// Disables indentation level changes during the next newlines until re-enabled.
|
||||
pub fn disableIndentCommitting(ais: *AutoIndentingStream) void {
|
||||
ais.disable_indent_committing += 1;
|
||||
}
|
||||
|
||||
pub fn enableIndentCommitting(ais: *AutoIndentingStream) void {
|
||||
assert(ais.disable_indent_committing > 0);
|
||||
ais.disable_indent_committing -= 1;
|
||||
}
|
||||
|
||||
pub fn pushSpace(ais: *AutoIndentingStream, space: Space) !void {
|
||||
try ais.space_stack.append(.{ .space = space, .indent_count = ais.indent_count });
|
||||
}
|
||||
|
||||
pub fn popSpace(ais: *AutoIndentingStream) void {
|
||||
_ = ais.space_stack.pop();
|
||||
}
|
||||
|
||||
/// Sets current indentation level to be the same as that of the last pushSpace.
|
||||
pub fn enableSpaceMode(ais: *AutoIndentingStream, space: Space) void {
|
||||
if (ais.space_stack.items.len == 0) return;
|
||||
const curr = ais.space_stack.getLast();
|
||||
if (curr.space != space) return;
|
||||
ais.space_mode = curr.indent_count;
|
||||
}
|
||||
|
||||
pub fn disableSpaceMode(ais: *AutoIndentingStream) void {
|
||||
ais.space_mode = null;
|
||||
}
|
||||
|
||||
pub fn lastSpaceModeIndent(ais: *AutoIndentingStream) usize {
|
||||
if (ais.space_stack.items.len == 0) return 0;
|
||||
return ais.space_stack.getLast().indent_count * ais.indent_delta;
|
||||
}
|
||||
|
||||
/// Push default indentation
|
||||
/// Doesn't actually write any indentation.
|
||||
/// Just primes the stream to be able to write the correct indentation if it needs to.
|
||||
pub fn pushIndent(ais: *AutoIndentingStream, indent_type: IndentType) !void {
|
||||
try ais.indent_stack.append(.{ .indent_type = indent_type, .realized = false });
|
||||
}
|
||||
|
||||
/// Forces an indentation level to be realized.
|
||||
pub fn forcePushIndent(ais: *AutoIndentingStream, indent_type: IndentType) !void {
|
||||
try ais.indent_stack.append(.{ .indent_type = indent_type, .realized = true });
|
||||
ais.indent_count += 1;
|
||||
}
|
||||
|
||||
pub fn popIndent(ais: *AutoIndentingStream) void {
|
||||
if (ais.indent_stack.pop().?.realized) {
|
||||
assert(ais.indent_count > 0);
|
||||
ais.indent_count -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn indentStackEmpty(ais: *AutoIndentingStream) bool {
|
||||
return ais.indent_stack.items.len == 0;
|
||||
}
|
||||
|
||||
/// Writes ' ' bytes if the current line is empty
|
||||
fn applyIndent(ais: *AutoIndentingStream) Error!void {
|
||||
const current_indent = ais.currentIndent();
|
||||
if (ais.current_line_empty and current_indent > 0) {
|
||||
if (ais.disabled_offset == null) {
|
||||
try ais.underlying_writer.splatByteAll(' ', current_indent);
|
||||
}
|
||||
ais.applied_indent = current_indent;
|
||||
}
|
||||
ais.current_line_empty = false;
|
||||
}
|
||||
|
||||
fn currentIndent(ais: *AutoIndentingStream) usize {
|
||||
const indent_count = ais.space_mode orelse ais.indent_count;
|
||||
return indent_count * ais.indent_delta;
|
||||
}
|
||||
};
|
||||
@@ -11278,10 +11278,14 @@ fn parseStrLit(
|
||||
offset: u32,
|
||||
) InnerError!void {
|
||||
const raw_string = bytes[offset..];
|
||||
var buf_managed = buf.toManaged(astgen.gpa);
|
||||
const result = std.zig.string_literal.parseWrite(buf_managed.writer(), raw_string);
|
||||
buf.* = buf_managed.moveToUnmanaged();
|
||||
switch (try result) {
|
||||
const result = r: {
|
||||
var aw: std.io.Writer.Allocating = .fromArrayList(astgen.gpa, buf);
|
||||
defer buf.* = aw.toArrayList();
|
||||
break :r std.zig.string_literal.parseWrite(&aw.writer, raw_string) catch |err| switch (err) {
|
||||
error.WriteFailed => return error.OutOfMemory,
|
||||
};
|
||||
};
|
||||
switch (result) {
|
||||
.success => return,
|
||||
.failure => |err| return astgen.failWithStrLitError(err, token, bytes, offset),
|
||||
}
|
||||
@@ -11324,17 +11328,18 @@ fn appendErrorNodeNotes(
|
||||
notes: []const u32,
|
||||
) Allocator.Error!void {
|
||||
@branchHint(.cold);
|
||||
const gpa = astgen.gpa;
|
||||
const string_bytes = &astgen.string_bytes;
|
||||
const msg: Zir.NullTerminatedString = @enumFromInt(string_bytes.items.len);
|
||||
try string_bytes.writer(astgen.gpa).print(format ++ "\x00", args);
|
||||
try string_bytes.print(gpa, format ++ "\x00", args);
|
||||
const notes_index: u32 = if (notes.len != 0) blk: {
|
||||
const notes_start = astgen.extra.items.len;
|
||||
try astgen.extra.ensureTotalCapacity(astgen.gpa, notes_start + 1 + notes.len);
|
||||
try astgen.extra.ensureTotalCapacity(gpa, notes_start + 1 + notes.len);
|
||||
astgen.extra.appendAssumeCapacity(@intCast(notes.len));
|
||||
astgen.extra.appendSliceAssumeCapacity(notes);
|
||||
break :blk @intCast(notes_start);
|
||||
} else 0;
|
||||
try astgen.compile_errors.append(astgen.gpa, .{
|
||||
try astgen.compile_errors.append(gpa, .{
|
||||
.msg = msg,
|
||||
.node = node.toOptional(),
|
||||
.token = .none,
|
||||
@@ -11418,7 +11423,7 @@ fn appendErrorTokNotesOff(
|
||||
const gpa = astgen.gpa;
|
||||
const string_bytes = &astgen.string_bytes;
|
||||
const msg: Zir.NullTerminatedString = @enumFromInt(string_bytes.items.len);
|
||||
try string_bytes.writer(gpa).print(format ++ "\x00", args);
|
||||
try string_bytes.print(gpa, format ++ "\x00", args);
|
||||
const notes_index: u32 = if (notes.len != 0) blk: {
|
||||
const notes_start = astgen.extra.items.len;
|
||||
try astgen.extra.ensureTotalCapacity(gpa, notes_start + 1 + notes.len);
|
||||
@@ -11454,7 +11459,7 @@ fn errNoteTokOff(
|
||||
@branchHint(.cold);
|
||||
const string_bytes = &astgen.string_bytes;
|
||||
const msg: Zir.NullTerminatedString = @enumFromInt(string_bytes.items.len);
|
||||
try string_bytes.writer(astgen.gpa).print(format ++ "\x00", args);
|
||||
try string_bytes.print(astgen.gpa, format ++ "\x00", args);
|
||||
return astgen.addExtra(Zir.Inst.CompileErrors.Item{
|
||||
.msg = msg,
|
||||
.node = .none,
|
||||
@@ -11473,7 +11478,7 @@ fn errNoteNode(
|
||||
@branchHint(.cold);
|
||||
const string_bytes = &astgen.string_bytes;
|
||||
const msg: Zir.NullTerminatedString = @enumFromInt(string_bytes.items.len);
|
||||
try string_bytes.writer(astgen.gpa).print(format ++ "\x00", args);
|
||||
try string_bytes.print(astgen.gpa, format ++ "\x00", args);
|
||||
return astgen.addExtra(Zir.Inst.CompileErrors.Item{
|
||||
.msg = msg,
|
||||
.node = node.toOptional(),
|
||||
@@ -13715,13 +13720,14 @@ fn emitDbgStmtForceCurrentIndex(gz: *GenZir, lc: LineColumn) !void {
|
||||
} });
|
||||
}
|
||||
|
||||
fn lowerAstErrors(astgen: *AstGen) !void {
|
||||
fn lowerAstErrors(astgen: *AstGen) error{OutOfMemory}!void {
|
||||
const gpa = astgen.gpa;
|
||||
const tree = astgen.tree;
|
||||
assert(tree.errors.len > 0);
|
||||
|
||||
var msg: std.ArrayListUnmanaged(u8) = .empty;
|
||||
defer msg.deinit(gpa);
|
||||
var msg: std.io.Writer.Allocating = .init(gpa);
|
||||
defer msg.deinit();
|
||||
const msg_w = &msg.writer;
|
||||
|
||||
var notes: std.ArrayListUnmanaged(u32) = .empty;
|
||||
defer notes.deinit(gpa);
|
||||
@@ -13749,26 +13755,26 @@ fn lowerAstErrors(astgen: *AstGen) !void {
|
||||
break :blk idx - tok_start;
|
||||
};
|
||||
|
||||
const err: Ast.Error = .{
|
||||
const ast_err: Ast.Error = .{
|
||||
.tag = Ast.Error.Tag.invalid_byte,
|
||||
.token = tok,
|
||||
.extra = .{ .offset = bad_off },
|
||||
};
|
||||
msg.clearRetainingCapacity();
|
||||
try tree.renderError(err, msg.writer(gpa));
|
||||
return try astgen.appendErrorTokNotesOff(tok, bad_off, "{s}", .{msg.items}, notes.items);
|
||||
tree.renderError(ast_err, msg_w) catch return error.OutOfMemory;
|
||||
return try astgen.appendErrorTokNotesOff(tok, bad_off, "{s}", .{msg.getWritten()}, notes.items);
|
||||
}
|
||||
|
||||
var cur_err = tree.errors[0];
|
||||
for (tree.errors[1..]) |err| {
|
||||
if (err.is_note) {
|
||||
try tree.renderError(err, msg.writer(gpa));
|
||||
try notes.append(gpa, try astgen.errNoteTok(err.token, "{s}", .{msg.items}));
|
||||
tree.renderError(err, msg_w) catch return error.OutOfMemory;
|
||||
try notes.append(gpa, try astgen.errNoteTok(err.token, "{s}", .{msg.getWritten()}));
|
||||
} else {
|
||||
// Flush error
|
||||
const extra_offset = tree.errorOffset(cur_err);
|
||||
try tree.renderError(cur_err, msg.writer(gpa));
|
||||
try astgen.appendErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.items}, notes.items);
|
||||
tree.renderError(cur_err, msg_w) catch return error.OutOfMemory;
|
||||
try astgen.appendErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.getWritten()}, notes.items);
|
||||
notes.clearRetainingCapacity();
|
||||
cur_err = err;
|
||||
|
||||
@@ -13781,8 +13787,8 @@ fn lowerAstErrors(astgen: *AstGen) !void {
|
||||
|
||||
// Flush error
|
||||
const extra_offset = tree.errorOffset(cur_err);
|
||||
try tree.renderError(cur_err, msg.writer(gpa));
|
||||
try astgen.appendErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.items}, notes.items);
|
||||
tree.renderError(cur_err, msg_w) catch return error.OutOfMemory;
|
||||
try astgen.appendErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.getWritten()}, notes.items);
|
||||
}
|
||||
|
||||
const DeclarationName = union(enum) {
|
||||
|
||||
@@ -1,5 +1,16 @@
|
||||
//! Ingests an `Ast` and produces a `Zoir`.
|
||||
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const mem = std.mem;
|
||||
const Allocator = mem.Allocator;
|
||||
const StringIndexAdapter = std.hash_map.StringIndexAdapter;
|
||||
const StringIndexContext = std.hash_map.StringIndexContext;
|
||||
const ZonGen = @This();
|
||||
const Zoir = @import("Zoir.zig");
|
||||
const Ast = @import("Ast.zig");
|
||||
const Writer = std.io.Writer;
|
||||
|
||||
gpa: Allocator,
|
||||
tree: Ast,
|
||||
|
||||
@@ -446,37 +457,44 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
|
||||
}
|
||||
}
|
||||
|
||||
fn appendIdentStr(zg: *ZonGen, ident_token: Ast.TokenIndex) !u32 {
|
||||
fn appendIdentStr(zg: *ZonGen, ident_token: Ast.TokenIndex) error{ OutOfMemory, BadString }!u32 {
|
||||
const gpa = zg.gpa;
|
||||
const tree = zg.tree;
|
||||
assert(tree.tokenTag(ident_token) == .identifier);
|
||||
const ident_name = tree.tokenSlice(ident_token);
|
||||
if (!mem.startsWith(u8, ident_name, "@")) {
|
||||
const start = zg.string_bytes.items.len;
|
||||
try zg.string_bytes.appendSlice(zg.gpa, ident_name);
|
||||
try zg.string_bytes.appendSlice(gpa, ident_name);
|
||||
return @intCast(start);
|
||||
} else {
|
||||
const offset = 1;
|
||||
const start: u32 = @intCast(zg.string_bytes.items.len);
|
||||
const raw_string = zg.tree.tokenSlice(ident_token)[offset..];
|
||||
try zg.string_bytes.ensureUnusedCapacity(zg.gpa, raw_string.len);
|
||||
switch (try std.zig.string_literal.parseWrite(zg.string_bytes.writer(zg.gpa), raw_string)) {
|
||||
.success => {},
|
||||
.failure => |err| {
|
||||
try zg.lowerStrLitError(err, ident_token, raw_string, offset);
|
||||
return error.BadString;
|
||||
},
|
||||
}
|
||||
|
||||
const slice = zg.string_bytes.items[start..];
|
||||
if (mem.indexOfScalar(u8, slice, 0) != null) {
|
||||
try zg.addErrorTok(ident_token, "identifier cannot contain null bytes", .{});
|
||||
return error.BadString;
|
||||
} else if (slice.len == 0) {
|
||||
try zg.addErrorTok(ident_token, "identifier cannot be empty", .{});
|
||||
return error.BadString;
|
||||
}
|
||||
return start;
|
||||
}
|
||||
const offset = 1;
|
||||
const start: u32 = @intCast(zg.string_bytes.items.len);
|
||||
const raw_string = zg.tree.tokenSlice(ident_token)[offset..];
|
||||
try zg.string_bytes.ensureUnusedCapacity(gpa, raw_string.len);
|
||||
const result = r: {
|
||||
var aw: std.io.Writer.Allocating = .fromArrayList(gpa, &zg.string_bytes);
|
||||
defer zg.string_bytes = aw.toArrayList();
|
||||
break :r std.zig.string_literal.parseWrite(&aw.writer, raw_string) catch |err| switch (err) {
|
||||
error.WriteFailed => return error.OutOfMemory,
|
||||
};
|
||||
};
|
||||
switch (result) {
|
||||
.success => {},
|
||||
.failure => |err| {
|
||||
try zg.lowerStrLitError(err, ident_token, raw_string, offset);
|
||||
return error.BadString;
|
||||
},
|
||||
}
|
||||
|
||||
const slice = zg.string_bytes.items[start..];
|
||||
if (mem.indexOfScalar(u8, slice, 0) != null) {
|
||||
try zg.addErrorTok(ident_token, "identifier cannot contain null bytes", .{});
|
||||
return error.BadString;
|
||||
} else if (slice.len == 0) {
|
||||
try zg.addErrorTok(ident_token, "identifier cannot be empty", .{});
|
||||
return error.BadString;
|
||||
}
|
||||
return start;
|
||||
}
|
||||
|
||||
/// Estimates the size of a string node without parsing it.
|
||||
@@ -507,8 +525,8 @@ pub fn strLitSizeHint(tree: Ast, node: Ast.Node.Index) usize {
|
||||
pub fn parseStrLit(
|
||||
tree: Ast,
|
||||
node: Ast.Node.Index,
|
||||
writer: anytype,
|
||||
) error{OutOfMemory}!std.zig.string_literal.Result {
|
||||
writer: *Writer,
|
||||
) Writer.Error!std.zig.string_literal.Result {
|
||||
switch (tree.nodeTag(node)) {
|
||||
.string_literal => {
|
||||
const token = tree.nodeMainToken(node);
|
||||
@@ -543,15 +561,22 @@ const StringLiteralResult = union(enum) {
|
||||
slice: struct { start: u32, len: u32 },
|
||||
};
|
||||
|
||||
fn strLitAsString(zg: *ZonGen, str_node: Ast.Node.Index) !StringLiteralResult {
|
||||
fn strLitAsString(zg: *ZonGen, str_node: Ast.Node.Index) error{ OutOfMemory, BadString }!StringLiteralResult {
|
||||
if (!zg.options.parse_str_lits) return .{ .slice = .{ .start = 0, .len = 0 } };
|
||||
|
||||
const gpa = zg.gpa;
|
||||
const string_bytes = &zg.string_bytes;
|
||||
const str_index: u32 = @intCast(zg.string_bytes.items.len);
|
||||
const size_hint = strLitSizeHint(zg.tree, str_node);
|
||||
try string_bytes.ensureUnusedCapacity(zg.gpa, size_hint);
|
||||
switch (try parseStrLit(zg.tree, str_node, zg.string_bytes.writer(zg.gpa))) {
|
||||
try string_bytes.ensureUnusedCapacity(gpa, size_hint);
|
||||
const result = r: {
|
||||
var aw: std.io.Writer.Allocating = .fromArrayList(gpa, &zg.string_bytes);
|
||||
defer zg.string_bytes = aw.toArrayList();
|
||||
break :r parseStrLit(zg.tree, str_node, &aw.writer) catch |err| switch (err) {
|
||||
error.WriteFailed => return error.OutOfMemory,
|
||||
};
|
||||
};
|
||||
switch (result) {
|
||||
.success => {},
|
||||
.failure => |err| {
|
||||
const token = zg.tree.nodeMainToken(str_node);
|
||||
@@ -793,10 +818,7 @@ fn lowerNumberError(zg: *ZonGen, err: std.zig.number_literal.Error, token: Ast.T
|
||||
|
||||
fn errNoteNode(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, args: anytype) Allocator.Error!Zoir.CompileError.Note {
|
||||
const message_idx: u32 = @intCast(zg.string_bytes.items.len);
|
||||
const writer = zg.string_bytes.writer(zg.gpa);
|
||||
try writer.print(format, args);
|
||||
try writer.writeByte(0);
|
||||
|
||||
try zg.string_bytes.print(zg.gpa, format ++ "\x00", args);
|
||||
return .{
|
||||
.msg = @enumFromInt(message_idx),
|
||||
.token = .none,
|
||||
@@ -806,10 +828,7 @@ fn errNoteNode(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, a
|
||||
|
||||
fn errNoteTok(zg: *ZonGen, tok: Ast.TokenIndex, comptime format: []const u8, args: anytype) Allocator.Error!Zoir.CompileError.Note {
|
||||
const message_idx: u32 = @intCast(zg.string_bytes.items.len);
|
||||
const writer = zg.string_bytes.writer(zg.gpa);
|
||||
try writer.print(format, args);
|
||||
try writer.writeByte(0);
|
||||
|
||||
try zg.string_bytes.print(zg.gpa, format ++ "\x00", args);
|
||||
return .{
|
||||
.msg = @enumFromInt(message_idx),
|
||||
.token = .fromToken(tok),
|
||||
@@ -850,9 +869,7 @@ fn addErrorInner(
|
||||
try zg.error_notes.appendSlice(gpa, notes);
|
||||
|
||||
const message_idx: u32 = @intCast(zg.string_bytes.items.len);
|
||||
const writer = zg.string_bytes.writer(zg.gpa);
|
||||
try writer.print(format, args);
|
||||
try writer.writeByte(0);
|
||||
try zg.string_bytes.print(gpa, format ++ "\x00", args);
|
||||
|
||||
try zg.compile_errors.append(gpa, .{
|
||||
.msg = @enumFromInt(message_idx),
|
||||
@@ -868,8 +885,9 @@ fn lowerAstErrors(zg: *ZonGen) Allocator.Error!void {
|
||||
const tree = zg.tree;
|
||||
assert(tree.errors.len > 0);
|
||||
|
||||
var msg: std.ArrayListUnmanaged(u8) = .empty;
|
||||
defer msg.deinit(gpa);
|
||||
var msg: std.io.Writer.Allocating = .init(gpa);
|
||||
defer msg.deinit();
|
||||
const msg_bw = &msg.writer;
|
||||
|
||||
var notes: std.ArrayListUnmanaged(Zoir.CompileError.Note) = .empty;
|
||||
defer notes.deinit(gpa);
|
||||
@@ -877,18 +895,20 @@ fn lowerAstErrors(zg: *ZonGen) Allocator.Error!void {
|
||||
var cur_err = tree.errors[0];
|
||||
for (tree.errors[1..]) |err| {
|
||||
if (err.is_note) {
|
||||
try tree.renderError(err, msg.writer(gpa));
|
||||
try notes.append(gpa, try zg.errNoteTok(err.token, "{s}", .{msg.items}));
|
||||
tree.renderError(err, msg_bw) catch return error.OutOfMemory;
|
||||
try notes.append(gpa, try zg.errNoteTok(err.token, "{s}", .{msg.getWritten()}));
|
||||
} else {
|
||||
// Flush error
|
||||
try tree.renderError(cur_err, msg.writer(gpa));
|
||||
tree.renderError(cur_err, msg_bw) catch return error.OutOfMemory;
|
||||
const extra_offset = tree.errorOffset(cur_err);
|
||||
try zg.addErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.items}, notes.items);
|
||||
try zg.addErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.getWritten()}, notes.items);
|
||||
notes.clearRetainingCapacity();
|
||||
cur_err = err;
|
||||
|
||||
// TODO: `Parse` currently does not have good error recovery mechanisms, so the remaining errors could be bogus.
|
||||
// As such, we'll ignore all remaining errors for now. We should improve `Parse` so that we can report all the errors.
|
||||
// TODO: `Parse` currently does not have good error recovery
|
||||
// mechanisms, so the remaining errors could be bogus. As such,
|
||||
// we'll ignore all remaining errors for now. We should improve
|
||||
// `Parse` so that we can report all the errors.
|
||||
return;
|
||||
}
|
||||
msg.clearRetainingCapacity();
|
||||
@@ -896,16 +916,6 @@ fn lowerAstErrors(zg: *ZonGen) Allocator.Error!void {
|
||||
|
||||
// Flush error
|
||||
const extra_offset = tree.errorOffset(cur_err);
|
||||
try tree.renderError(cur_err, msg.writer(gpa));
|
||||
try zg.addErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.items}, notes.items);
|
||||
tree.renderError(cur_err, msg_bw) catch return error.OutOfMemory;
|
||||
try zg.addErrorTokNotesOff(cur_err.token, extra_offset, "{s}", .{msg.getWritten()}, notes.items);
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const mem = std.mem;
|
||||
const Allocator = mem.Allocator;
|
||||
const StringIndexAdapter = std.hash_map.StringIndexAdapter;
|
||||
const StringIndexContext = std.hash_map.StringIndexContext;
|
||||
const ZonGen = @This();
|
||||
const Zoir = @import("Zoir.zig");
|
||||
const Ast = @import("Ast.zig");
|
||||
|
||||
@@ -6367,7 +6367,9 @@ test "ampersand" {
|
||||
var fixed_buffer_mem: [100 * 1024]u8 = undefined;
|
||||
|
||||
fn testParse(source: [:0]const u8, allocator: mem.Allocator, anything_changed: *bool) ![]u8 {
|
||||
const stderr = std.fs.File.stderr().deprecatedWriter();
|
||||
var buffer: [64]u8 = undefined;
|
||||
const stderr = std.debug.lockStderrWriter(&buffer);
|
||||
defer std.debug.unlockStderrWriter();
|
||||
|
||||
var tree = try std.zig.Ast.parse(allocator, source, .zig);
|
||||
defer tree.deinit(allocator);
|
||||
@@ -6390,7 +6392,7 @@ fn testParse(source: [:0]const u8, allocator: mem.Allocator, anything_changed: *
|
||||
return error.ParseError;
|
||||
}
|
||||
|
||||
const formatted = try tree.render(allocator);
|
||||
const formatted = try tree.renderAlloc(allocator);
|
||||
anything_changed.* = !mem.eql(u8, formatted, source);
|
||||
return formatted;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
const std = @import("../std.zig");
|
||||
const assert = std.debug.assert;
|
||||
const utf8Encode = std.unicode.utf8Encode;
|
||||
const Writer = std.io.Writer;
|
||||
|
||||
pub const ParseError = error{
|
||||
OutOfMemory,
|
||||
@@ -315,9 +316,10 @@ test parseCharLiteral {
|
||||
);
|
||||
}
|
||||
|
||||
/// Parses `bytes` as a Zig string literal and writes the result to the `std.io.GenericWriter` type.
|
||||
/// Parses `bytes` as a Zig string literal and writes the result to the `Writer` type.
|
||||
///
|
||||
/// Asserts `bytes` has '"' at beginning and end.
|
||||
pub fn parseWrite(writer: anytype, bytes: []const u8) error{OutOfMemory}!Result {
|
||||
pub fn parseWrite(writer: *Writer, bytes: []const u8) Writer.Error!Result {
|
||||
assert(bytes.len >= 2 and bytes[0] == '"' and bytes[bytes.len - 1] == '"');
|
||||
|
||||
var index: usize = 1;
|
||||
@@ -333,18 +335,18 @@ pub fn parseWrite(writer: anytype, bytes: []const u8) error{OutOfMemory}!Result
|
||||
if (bytes[escape_char_index] == 'u') {
|
||||
var buf: [4]u8 = undefined;
|
||||
const len = utf8Encode(codepoint, &buf) catch {
|
||||
return Result{ .failure = .{ .invalid_unicode_codepoint = escape_char_index + 1 } };
|
||||
return .{ .failure = .{ .invalid_unicode_codepoint = escape_char_index + 1 } };
|
||||
};
|
||||
try writer.writeAll(buf[0..len]);
|
||||
} else {
|
||||
try writer.writeByte(@as(u8, @intCast(codepoint)));
|
||||
}
|
||||
},
|
||||
.failure => |err| return Result{ .failure = err },
|
||||
.failure => |err| return .{ .failure = err },
|
||||
}
|
||||
},
|
||||
'\n' => return Result{ .failure = .{ .invalid_character = index } },
|
||||
'"' => return Result.success,
|
||||
'\n' => return .{ .failure = .{ .invalid_character = index } },
|
||||
'"' => return .success,
|
||||
else => {
|
||||
try writer.writeByte(b);
|
||||
index += 1;
|
||||
@@ -356,11 +358,13 @@ pub fn parseWrite(writer: anytype, bytes: []const u8) error{OutOfMemory}!Result
|
||||
/// Higher level API. Does not return extra info about parse errors.
|
||||
/// Caller owns returned memory.
|
||||
pub fn parseAlloc(allocator: std.mem.Allocator, bytes: []const u8) ParseError![]u8 {
|
||||
var buf = std.ArrayList(u8).init(allocator);
|
||||
defer buf.deinit();
|
||||
|
||||
switch (try parseWrite(buf.writer(), bytes)) {
|
||||
.success => return buf.toOwnedSlice(),
|
||||
var aw: std.io.Writer.Allocating = .init(allocator);
|
||||
defer aw.deinit();
|
||||
const result = parseWrite(&aw.writer, bytes) catch |err| switch (err) {
|
||||
error.WriteFailed => return error.OutOfMemory,
|
||||
};
|
||||
switch (result) {
|
||||
.success => return aw.toOwnedSlice(),
|
||||
.failure => return error.InvalidLiteral,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -411,18 +411,22 @@ const Parser = struct {
|
||||
diag: ?*Diagnostics,
|
||||
options: Options,
|
||||
|
||||
fn parseExpr(self: *@This(), T: type, node: Zoir.Node.Index) error{ ParseZon, OutOfMemory }!T {
|
||||
const ParseExprError = error{ ParseZon, OutOfMemory };
|
||||
|
||||
fn parseExpr(self: *@This(), T: type, node: Zoir.Node.Index) ParseExprError!T {
|
||||
return self.parseExprInner(T, node) catch |err| switch (err) {
|
||||
error.WrongType => return self.failExpectedType(T, node),
|
||||
else => |e| return e,
|
||||
};
|
||||
}
|
||||
|
||||
const ParseExprInnerError = error{ ParseZon, OutOfMemory, WrongType };
|
||||
|
||||
fn parseExprInner(
|
||||
self: *@This(),
|
||||
T: type,
|
||||
node: Zoir.Node.Index,
|
||||
) error{ ParseZon, OutOfMemory, WrongType }!T {
|
||||
) ParseExprInnerError!T {
|
||||
if (T == Zoir.Node.Index) {
|
||||
return node;
|
||||
}
|
||||
@@ -611,15 +615,17 @@ const Parser = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn parseString(self: *@This(), T: type, node: Zoir.Node.Index) !T {
|
||||
fn parseString(self: *@This(), T: type, node: Zoir.Node.Index) ParseExprInnerError!T {
|
||||
const ast_node = node.getAstNode(self.zoir);
|
||||
const pointer = @typeInfo(T).pointer;
|
||||
var size_hint = ZonGen.strLitSizeHint(self.ast, ast_node);
|
||||
if (pointer.sentinel() != null) size_hint += 1;
|
||||
|
||||
var buf: std.ArrayListUnmanaged(u8) = try .initCapacity(self.gpa, size_hint);
|
||||
defer buf.deinit(self.gpa);
|
||||
switch (try ZonGen.parseStrLit(self.ast, ast_node, buf.writer(self.gpa))) {
|
||||
var aw: std.Io.Writer.Allocating = .init(self.gpa);
|
||||
try aw.ensureUnusedCapacity(size_hint);
|
||||
defer aw.deinit();
|
||||
const result = ZonGen.parseStrLit(self.ast, ast_node, &aw.writer) catch return error.OutOfMemory;
|
||||
switch (result) {
|
||||
.success => {},
|
||||
.failure => |err| {
|
||||
const token = self.ast.nodeMainToken(ast_node);
|
||||
@@ -638,9 +644,9 @@ const Parser = struct {
|
||||
}
|
||||
|
||||
if (pointer.sentinel() != null) {
|
||||
return buf.toOwnedSliceSentinel(self.gpa, 0);
|
||||
return aw.toOwnedSliceSentinel(0);
|
||||
} else {
|
||||
return buf.toOwnedSlice(self.gpa);
|
||||
return aw.toOwnedSlice();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5328,7 +5328,7 @@ pub fn cImport(comp: *Compilation, c_src: []const u8, owner_mod: *Package.Module
|
||||
var out_zig_file = try o_dir.createFile(cimport_zig_basename, .{});
|
||||
defer out_zig_file.close();
|
||||
|
||||
const formatted = try tree.render(comp.gpa);
|
||||
const formatted = try tree.renderAlloc(comp.gpa);
|
||||
defer comp.gpa.free(formatted);
|
||||
|
||||
try out_zig_file.writeAll(formatted);
|
||||
|
||||
@@ -471,10 +471,14 @@ const Parse = struct {
|
||||
offset: u32,
|
||||
) InnerError!void {
|
||||
const raw_string = bytes[offset..];
|
||||
var buf_managed = buf.toManaged(p.gpa);
|
||||
const result = std.zig.string_literal.parseWrite(buf_managed.writer(), raw_string);
|
||||
buf.* = buf_managed.moveToUnmanaged();
|
||||
switch (try result) {
|
||||
const result = r: {
|
||||
var aw: std.io.Writer.Allocating = .fromArrayList(p.gpa, buf);
|
||||
defer buf.* = aw.toArrayList();
|
||||
break :r std.zig.string_literal.parseWrite(&aw.writer, raw_string) catch |err| switch (err) {
|
||||
error.WriteFailed => return error.OutOfMemory,
|
||||
};
|
||||
};
|
||||
switch (result) {
|
||||
.success => {},
|
||||
.failure => |err| try p.appendStrLitError(err, token, bytes, offset),
|
||||
}
|
||||
|
||||
105
src/fmt.zig
105
src/fmt.zig
@@ -34,16 +34,13 @@ const Fmt = struct {
|
||||
color: Color,
|
||||
gpa: Allocator,
|
||||
arena: Allocator,
|
||||
out_buffer: std.ArrayList(u8),
|
||||
out_buffer: std.Io.Writer.Allocating,
|
||||
stdout_writer: *fs.File.Writer,
|
||||
|
||||
const SeenMap = std.AutoHashMap(fs.File.INode, void);
|
||||
};
|
||||
|
||||
pub fn run(
|
||||
gpa: Allocator,
|
||||
arena: Allocator,
|
||||
args: []const []const u8,
|
||||
) !void {
|
||||
pub fn run(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
|
||||
var color: Color = .auto;
|
||||
var stdin_flag = false;
|
||||
var check_flag = false;
|
||||
@@ -60,8 +57,7 @@ pub fn run(
|
||||
const arg = args[i];
|
||||
if (mem.startsWith(u8, arg, "-")) {
|
||||
if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
|
||||
const stdout = std.fs.File.stdout().deprecatedWriter();
|
||||
try stdout.writeAll(usage_fmt);
|
||||
try fs.File.stdout().writeAll(usage_fmt);
|
||||
return process.cleanExit();
|
||||
} else if (mem.eql(u8, arg, "--color")) {
|
||||
if (i + 1 >= args.len) {
|
||||
@@ -102,7 +98,9 @@ pub fn run(
|
||||
}
|
||||
|
||||
const stdin: fs.File = .stdin();
|
||||
const source_code = std.zig.readSourceFileToEndAlloc(gpa, stdin, null) catch |err| {
|
||||
var stdio_buffer: [1024]u8 = undefined;
|
||||
var file_reader: fs.File.Reader = stdin.reader(&stdio_buffer);
|
||||
const source_code = std.zig.readSourceFileToEndAlloc(gpa, &file_reader) catch |err| {
|
||||
fatal("unable to read stdin: {}", .{err});
|
||||
};
|
||||
defer gpa.free(source_code);
|
||||
@@ -146,7 +144,7 @@ pub fn run(
|
||||
try std.zig.printAstErrorsToStderr(gpa, tree, "<stdin>", color);
|
||||
process.exit(2);
|
||||
}
|
||||
const formatted = try tree.render(gpa);
|
||||
const formatted = try tree.renderAlloc(gpa);
|
||||
defer gpa.free(formatted);
|
||||
|
||||
if (check_flag) {
|
||||
@@ -154,13 +152,16 @@ pub fn run(
|
||||
process.exit(code);
|
||||
}
|
||||
|
||||
return std.fs.File.stdout().writeAll(formatted);
|
||||
return fs.File.stdout().writeAll(formatted);
|
||||
}
|
||||
|
||||
if (input_files.items.len == 0) {
|
||||
fatal("expected at least one source file argument", .{});
|
||||
}
|
||||
|
||||
var stdout_buffer: [4096]u8 = undefined;
|
||||
var stdout_writer = fs.File.stdout().writer(&stdout_buffer);
|
||||
|
||||
var fmt: Fmt = .{
|
||||
.gpa = gpa,
|
||||
.arena = arena,
|
||||
@@ -169,7 +170,8 @@ pub fn run(
|
||||
.check_ast = check_ast_flag,
|
||||
.force_zon = force_zon,
|
||||
.color = color,
|
||||
.out_buffer = std.ArrayList(u8).init(gpa),
|
||||
.out_buffer = .init(gpa),
|
||||
.stdout_writer = &stdout_writer,
|
||||
};
|
||||
defer fmt.seen.deinit();
|
||||
defer fmt.out_buffer.deinit();
|
||||
@@ -196,43 +198,10 @@ pub fn run(
|
||||
if (fmt.any_error) {
|
||||
process.exit(1);
|
||||
}
|
||||
try fmt.stdout_writer.interface.flush();
|
||||
}
|
||||
|
||||
const FmtError = error{
|
||||
SystemResources,
|
||||
OperationAborted,
|
||||
IoPending,
|
||||
BrokenPipe,
|
||||
Unexpected,
|
||||
WouldBlock,
|
||||
Canceled,
|
||||
FileClosed,
|
||||
DestinationAddressRequired,
|
||||
DiskQuota,
|
||||
FileTooBig,
|
||||
MessageTooBig,
|
||||
InputOutput,
|
||||
NoSpaceLeft,
|
||||
AccessDenied,
|
||||
OutOfMemory,
|
||||
RenameAcrossMountPoints,
|
||||
ReadOnlyFileSystem,
|
||||
LinkQuotaExceeded,
|
||||
FileBusy,
|
||||
EndOfStream,
|
||||
Unseekable,
|
||||
NotOpenForWriting,
|
||||
UnsupportedEncoding,
|
||||
InvalidEncoding,
|
||||
ConnectionResetByPeer,
|
||||
SocketNotConnected,
|
||||
LockViolation,
|
||||
NetNameDeleted,
|
||||
InvalidArgument,
|
||||
ProcessNotFound,
|
||||
} || fs.File.OpenError;
|
||||
|
||||
fn fmtPath(fmt: *Fmt, file_path: []const u8, check_mode: bool, dir: fs.Dir, sub_path: []const u8) FmtError!void {
|
||||
fn fmtPath(fmt: *Fmt, file_path: []const u8, check_mode: bool, dir: fs.Dir, sub_path: []const u8) !void {
|
||||
fmtPathFile(fmt, file_path, check_mode, dir, sub_path) catch |err| switch (err) {
|
||||
error.IsDir, error.AccessDenied => return fmtPathDir(fmt, file_path, check_mode, dir, sub_path),
|
||||
else => {
|
||||
@@ -249,7 +218,7 @@ fn fmtPathDir(
|
||||
check_mode: bool,
|
||||
parent_dir: fs.Dir,
|
||||
parent_sub_path: []const u8,
|
||||
) FmtError!void {
|
||||
) !void {
|
||||
var dir = try parent_dir.openDir(parent_sub_path, .{ .iterate = true });
|
||||
defer dir.close();
|
||||
|
||||
@@ -285,7 +254,7 @@ fn fmtPathFile(
|
||||
check_mode: bool,
|
||||
dir: fs.Dir,
|
||||
sub_path: []const u8,
|
||||
) FmtError!void {
|
||||
) !void {
|
||||
const source_file = try dir.openFile(sub_path, .{});
|
||||
var file_closed = false;
|
||||
errdefer if (!file_closed) source_file.close();
|
||||
@@ -295,12 +264,15 @@ fn fmtPathFile(
|
||||
if (stat.kind == .directory)
|
||||
return error.IsDir;
|
||||
|
||||
var read_buffer: [1024]u8 = undefined;
|
||||
var file_reader: fs.File.Reader = source_file.reader(&read_buffer);
|
||||
file_reader.size = stat.size;
|
||||
|
||||
const gpa = fmt.gpa;
|
||||
const source_code = try std.zig.readSourceFileToEndAlloc(
|
||||
gpa,
|
||||
source_file,
|
||||
std.math.cast(usize, stat.size) orelse return error.FileTooBig,
|
||||
);
|
||||
const source_code = std.zig.readSourceFileToEndAlloc(gpa, &file_reader) catch |err| switch (err) {
|
||||
error.ReadFailed => return file_reader.err.?,
|
||||
else => |e| return e,
|
||||
};
|
||||
defer gpa.free(source_code);
|
||||
|
||||
source_file.close();
|
||||
@@ -363,24 +335,33 @@ fn fmtPathFile(
|
||||
}
|
||||
|
||||
// As a heuristic, we make enough capacity for the same as the input source.
|
||||
fmt.out_buffer.shrinkRetainingCapacity(0);
|
||||
fmt.out_buffer.clearRetainingCapacity();
|
||||
try fmt.out_buffer.ensureTotalCapacity(source_code.len);
|
||||
|
||||
try tree.renderToArrayList(&fmt.out_buffer, .{});
|
||||
if (mem.eql(u8, fmt.out_buffer.items, source_code))
|
||||
tree.render(gpa, &fmt.out_buffer.writer, .{}) catch |err| switch (err) {
|
||||
error.WriteFailed, error.OutOfMemory => return error.OutOfMemory,
|
||||
};
|
||||
if (mem.eql(u8, fmt.out_buffer.getWritten(), source_code))
|
||||
return;
|
||||
|
||||
if (check_mode) {
|
||||
const stdout = std.fs.File.stdout().deprecatedWriter();
|
||||
try stdout.print("{s}\n", .{file_path});
|
||||
try fmt.stdout_writer.interface.print("{s}\n", .{file_path});
|
||||
fmt.any_error = true;
|
||||
} else {
|
||||
var af = try dir.atomicFile(sub_path, .{ .mode = stat.mode });
|
||||
defer af.deinit();
|
||||
|
||||
try af.file.writeAll(fmt.out_buffer.items);
|
||||
try af.file.writeAll(fmt.out_buffer.getWritten());
|
||||
try af.finish();
|
||||
const stdout = std.fs.File.stdout().deprecatedWriter();
|
||||
try stdout.print("{s}\n", .{file_path});
|
||||
try fmt.stdout_writer.interface.print("{s}\n", .{file_path});
|
||||
}
|
||||
}
|
||||
|
||||
/// Provided for debugging/testing purposes; unused by the compiler.
|
||||
pub fn main() !void {
|
||||
const gpa = std.heap.smp_allocator;
|
||||
var arena_instance = std.heap.ArenaAllocator.init(gpa);
|
||||
const arena = arena_instance.allocator();
|
||||
const args = try process.argsAlloc(arena);
|
||||
return run(gpa, arena, args[1..]);
|
||||
}
|
||||
|
||||
24
src/main.zig
24
src/main.zig
@@ -4550,7 +4550,7 @@ fn cmdTranslateC(
|
||||
error.SemanticAnalyzeFail => break :f .{ .error_bundle = errors },
|
||||
};
|
||||
defer tree.deinit(comp.gpa);
|
||||
break :f .{ .success = try tree.render(arena) };
|
||||
break :f .{ .success = try tree.renderAlloc(arena) };
|
||||
},
|
||||
};
|
||||
|
||||
@@ -6058,7 +6058,8 @@ fn cmdAstCheck(
|
||||
};
|
||||
} else fs.File.stdin();
|
||||
defer if (zig_source_path != null) f.close();
|
||||
break :s std.zig.readSourceFileToEndAlloc(arena, f, null) catch |err| {
|
||||
var file_reader: fs.File.Reader = f.reader(&stdio_buffer);
|
||||
break :s std.zig.readSourceFileToEndAlloc(arena, &file_reader) catch |err| {
|
||||
fatal("unable to load file '{s}' for ast-check: {s}", .{ display_path, @errorName(err) });
|
||||
};
|
||||
};
|
||||
@@ -6416,14 +6417,16 @@ fn cmdChangelist(
|
||||
var f = fs.cwd().openFile(old_source_path, .{}) catch |err|
|
||||
fatal("unable to open old source file '{s}': {s}", .{ old_source_path, @errorName(err) });
|
||||
defer f.close();
|
||||
break :source std.zig.readSourceFileToEndAlloc(arena, f, std.zig.max_src_size) catch |err|
|
||||
var file_reader: fs.File.Reader = f.reader(&stdio_buffer);
|
||||
break :source std.zig.readSourceFileToEndAlloc(arena, &file_reader) catch |err|
|
||||
fatal("unable to read old source file '{s}': {s}", .{ old_source_path, @errorName(err) });
|
||||
};
|
||||
const new_source = source: {
|
||||
var f = fs.cwd().openFile(new_source_path, .{}) catch |err|
|
||||
fatal("unable to open new source file '{s}': {s}", .{ new_source_path, @errorName(err) });
|
||||
defer f.close();
|
||||
break :source std.zig.readSourceFileToEndAlloc(arena, f, std.zig.max_src_size) catch |err|
|
||||
var file_reader: fs.File.Reader = f.reader(&stdio_buffer);
|
||||
break :source std.zig.readSourceFileToEndAlloc(arena, &file_reader) catch |err|
|
||||
fatal("unable to read new source file '{s}': {s}", .{ new_source_path, @errorName(err) });
|
||||
};
|
||||
|
||||
@@ -6946,7 +6949,7 @@ fn cmdFetch(
|
||||
ast.deinit(gpa);
|
||||
}
|
||||
|
||||
var fixups: Ast.Fixups = .{};
|
||||
var fixups: Ast.Render.Fixups = .{};
|
||||
defer fixups.deinit(gpa);
|
||||
|
||||
var saved_path_or_url = path_or_url;
|
||||
@@ -7047,12 +7050,13 @@ fn cmdFetch(
|
||||
try fixups.append_string_after_node.put(gpa, manifest.version_node, dependencies_text);
|
||||
}
|
||||
|
||||
var rendered = std.ArrayList(u8).init(gpa);
|
||||
defer rendered.deinit();
|
||||
try ast.renderToArrayList(&rendered, fixups);
|
||||
var aw: std.Io.Writer.Allocating = .init(gpa);
|
||||
defer aw.deinit();
|
||||
try ast.render(gpa, &aw.writer, fixups);
|
||||
const rendered = aw.getWritten();
|
||||
|
||||
build_root.directory.handle.writeFile(.{ .sub_path = Package.Manifest.basename, .data = rendered.items }) catch |err| {
|
||||
fatal("unable to write {s} file: {s}", .{ Package.Manifest.basename, @errorName(err) });
|
||||
build_root.directory.handle.writeFile(.{ .sub_path = Package.Manifest.basename, .data = rendered }) catch |err| {
|
||||
fatal("unable to write {s} file: {t}", .{ Package.Manifest.basename, err });
|
||||
};
|
||||
|
||||
return cleanExit();
|
||||
|
||||
@@ -120,7 +120,7 @@ pub fn main() !void {
|
||||
error_bundle.renderToStdErr(color.renderOptions());
|
||||
}
|
||||
|
||||
const formatted_output = try tree.render(allocator);
|
||||
const formatted_output = try tree.renderAlloc(allocator);
|
||||
_ = try std.fs.File.stdout().write(formatted_output);
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user