compiler: JIT zig fmt

See #19063
This commit is contained in:
Andrew Kelley
2024-02-26 22:26:19 -07:00
parent b116063e02
commit d661f0f35b
9 changed files with 786 additions and 678 deletions

View File

@@ -13,7 +13,7 @@ const Step = std.Build.Step;
pub const dependencies = @import("@dependencies");
pub fn main() !void {
// Here we use an ArenaAllocator backed by a DirectAllocator because a build is a short-lived,
// Here we use an ArenaAllocator backed by a page allocator because a build is a short-lived,
// one shot program. We don't need to waste time freeing memory and finding places to squish
// bytes into. So we free everything all at once at the very end.
var single_threaded_arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);

View File

@@ -1,6 +1,3 @@
/// Implementation of `zig fmt`.
pub const fmt = @import("zig/fmt.zig");
pub const ErrorBundle = @import("zig/ErrorBundle.zig");
pub const Server = @import("zig/Server.zig");
pub const Client = @import("zig/Client.zig");
@@ -30,6 +27,36 @@ pub const c_translation = @import("zig/c_translation.zig");
pub const SrcHasher = std.crypto.hash.Blake3;
pub const SrcHash = [16]u8;
pub const Color = enum {
/// Determine whether stderr is a terminal or not automatically.
auto,
/// Assume stderr is not a terminal.
off,
/// Assume stderr is a terminal.
on,
pub fn get_tty_conf(color: Color) std.io.tty.Config {
return switch (color) {
.auto => std.io.tty.detectConfig(std.io.getStdErr()),
.on => .escape_codes,
.off => .no_color,
};
}
pub fn renderOptions(color: Color) std.zig.ErrorBundle.RenderOptions {
const ttyconf = get_tty_conf(color);
return .{
.ttyconf = ttyconf,
.include_source_line = ttyconf != .no_color,
.include_reference_trace = ttyconf != .no_color,
};
}
};
/// There are many assumptions in the entire codebase that Zig source files can
/// be byte-indexed with a u32 integer.
pub const max_src_size = std.math.maxInt(u32);
pub fn hashSrc(src: []const u8) SrcHash {
var out: SrcHash = undefined;
SrcHasher.hash(src, &out, .{});
@@ -801,6 +828,78 @@ test isValidId {
try std.testing.expect(isValidId("i386"));
}
pub fn readSourceFileToEndAlloc(
allocator: Allocator,
input: std.fs.File,
size_hint: ?usize,
) ![:0]u8 {
const source_code = input.readToEndAllocOptions(
allocator,
max_src_size,
size_hint,
@alignOf(u16),
0,
) catch |err| switch (err) {
error.ConnectionResetByPeer => unreachable,
error.ConnectionTimedOut => unreachable,
error.NotOpenForReading => unreachable,
else => |e| return e,
};
errdefer allocator.free(source_code);
// Detect unsupported file types with their Byte Order Mark
const unsupported_boms = [_][]const u8{
"\xff\xfe\x00\x00", // UTF-32 little endian
"\xfe\xff\x00\x00", // UTF-32 big endian
"\xfe\xff", // UTF-16 big endian
};
for (unsupported_boms) |bom| {
if (std.mem.startsWith(u8, source_code, bom)) {
return error.UnsupportedEncoding;
}
}
// If the file starts with a UTF-16 little endian BOM, translate it to UTF-8
if (std.mem.startsWith(u8, source_code, "\xff\xfe")) {
const source_code_utf16_le = std.mem.bytesAsSlice(u16, source_code);
const source_code_utf8 = std.unicode.utf16LeToUtf8AllocZ(allocator, source_code_utf16_le) catch |err| switch (err) {
error.DanglingSurrogateHalf => error.UnsupportedEncoding,
error.ExpectedSecondSurrogateHalf => error.UnsupportedEncoding,
error.UnexpectedSecondSurrogateHalf => error.UnsupportedEncoding,
else => |e| return e,
};
allocator.free(source_code);
return source_code_utf8;
}
return source_code;
}
pub fn printAstErrorsToStderr(gpa: Allocator, tree: Ast, path: []const u8, color: Color) !void {
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
try wip_errors.init(gpa);
defer wip_errors.deinit();
try putAstErrorsIntoBundle(gpa, tree, path, &wip_errors);
var error_bundle = try wip_errors.toOwnedBundle("");
defer error_bundle.deinit(gpa);
error_bundle.renderToStdErr(color.renderOptions());
}
pub fn putAstErrorsIntoBundle(
gpa: Allocator,
tree: Ast,
path: []const u8,
wip_errors: *std.zig.ErrorBundle.Wip,
) Allocator.Error!void {
var zir = try AstGen.generate(gpa, tree);
defer zir.deinit(gpa);
try wip_errors.addZirErrorMessages(zir, tree, tree.source, path);
}
test {
_ = Ast;
_ = AstRlAnnotate;
@@ -808,9 +907,12 @@ test {
_ = Client;
_ = ErrorBundle;
_ = Server;
_ = fmt;
_ = number_literal;
_ = primitives;
_ = string_literal;
_ = system;
// This is not standard library API; it is the standalone executable
// implementation of `zig fmt`.
_ = @import("zig/fmt.zig");
}

View File

@@ -32,6 +32,12 @@ pub const Location = struct {
line_end: usize,
};
pub const Span = struct {
start: u32,
end: u32,
main: u32,
};
pub fn deinit(tree: *Ast, gpa: Allocator) void {
tree.tokens.deinit(gpa);
tree.nodes.deinit(gpa);
@@ -3533,6 +3539,39 @@ pub const Node = struct {
};
};
pub fn nodeToSpan(tree: *const Ast, node: u32) Span {
return tokensToSpan(
tree,
tree.firstToken(node),
tree.lastToken(node),
tree.nodes.items(.main_token)[node],
);
}
pub fn tokenToSpan(tree: *const Ast, token: Ast.TokenIndex) Span {
return tokensToSpan(tree, token, token, token);
}
pub fn tokensToSpan(tree: *const Ast, start: Ast.TokenIndex, end: Ast.TokenIndex, main: Ast.TokenIndex) Span {
const token_starts = tree.tokens.items(.start);
var start_tok = start;
var end_tok = end;
if (tree.tokensOnSameLine(start, end)) {
// do nothing
} else if (tree.tokensOnSameLine(start, main)) {
end_tok = main;
} else if (tree.tokensOnSameLine(main, end)) {
start_tok = main;
} else {
start_tok = main;
end_tok = main;
}
const start_off = token_starts[start_tok];
const end_off = token_starts[end_tok] + @as(u32, @intCast(tree.tokenSlice(end_tok).len));
return Span{ .start = start_off, .end = end_off, .main = token_starts[main] };
}
const std = @import("../std.zig");
const assert = std.debug.assert;
const testing = std.testing;
@@ -3544,5 +3583,6 @@ const Parse = @import("Parse.zig");
const private_render = @import("./render.zig");
test {
testing.refAllDecls(@This());
_ = Parse;
_ = private_render;
}

View File

@@ -459,6 +459,90 @@ pub const Wip = struct {
return @intCast(wip.extra.items.len - notes_len);
}
pub fn addZirErrorMessages(
eb: *ErrorBundle.Wip,
zir: std.zig.Zir,
tree: std.zig.Ast,
source: [:0]const u8,
src_path: []const u8,
) !void {
const Zir = std.zig.Zir;
const payload_index = zir.extra[@intFromEnum(Zir.ExtraIndex.compile_errors)];
assert(payload_index != 0);
const header = zir.extraData(Zir.Inst.CompileErrors, payload_index);
const items_len = header.data.items_len;
var extra_index = header.end;
for (0..items_len) |_| {
const item = zir.extraData(Zir.Inst.CompileErrors.Item, extra_index);
extra_index = item.end;
const err_span = blk: {
if (item.data.node != 0) {
break :blk tree.nodeToSpan(item.data.node);
}
const token_starts = tree.tokens.items(.start);
const start = token_starts[item.data.token] + item.data.byte_offset;
const end = start + @as(u32, @intCast(tree.tokenSlice(item.data.token).len)) - item.data.byte_offset;
break :blk std.zig.Ast.Span{ .start = start, .end = end, .main = start };
};
const err_loc = std.zig.findLineColumn(source, err_span.main);
{
const msg = zir.nullTerminatedString(item.data.msg);
try eb.addRootErrorMessage(.{
.msg = try eb.addString(msg),
.src_loc = try eb.addSourceLocation(.{
.src_path = try eb.addString(src_path),
.span_start = err_span.start,
.span_main = err_span.main,
.span_end = err_span.end,
.line = @intCast(err_loc.line),
.column = @intCast(err_loc.column),
.source_line = try eb.addString(err_loc.source_line),
}),
.notes_len = item.data.notesLen(zir),
});
}
if (item.data.notes != 0) {
const notes_start = try eb.reserveNotes(item.data.notes);
const block = zir.extraData(Zir.Inst.Block, item.data.notes);
const body = zir.extra[block.end..][0..block.data.body_len];
for (notes_start.., body) |note_i, body_elem| {
const note_item = zir.extraData(Zir.Inst.CompileErrors.Item, body_elem);
const msg = zir.nullTerminatedString(note_item.data.msg);
const span = blk: {
if (note_item.data.node != 0) {
break :blk tree.nodeToSpan(note_item.data.node);
}
const token_starts = tree.tokens.items(.start);
const start = token_starts[note_item.data.token] + note_item.data.byte_offset;
const end = start + @as(u32, @intCast(tree.tokenSlice(note_item.data.token).len)) - item.data.byte_offset;
break :blk std.zig.Ast.Span{ .start = start, .end = end, .main = start };
};
const loc = std.zig.findLineColumn(source, span.main);
eb.extra.items[note_i] = @intFromEnum(try eb.addErrorMessage(.{
.msg = try eb.addString(msg),
.src_loc = try eb.addSourceLocation(.{
.src_path = try eb.addString(src_path),
.span_start = span.start,
.span_main = span.main,
.span_end = span.end,
.line = @intCast(loc.line),
.column = @intCast(loc.column),
.source_line = if (loc.eql(err_loc))
0
else
try eb.addString(loc.source_line),
}),
.notes_len = 0, // TODO rework this function to be recursive
}));
}
}
}
}
fn addOtherMessage(wip: *Wip, other: ErrorBundle, msg_index: MessageIndex) !MessageIndex {
const other_msg = other.getErrorMessage(msg_index);
const src_loc = try wip.addOtherSourceLocation(other, other_msg.src_loc);

View File

@@ -1 +1,342 @@
const std = @import("../std.zig");
const std = @import("std");
const mem = std.mem;
const fs = std.fs;
const process = std.process;
const Allocator = std.mem.Allocator;
const warn = std.log.warn;
const Color = std.zig.Color;
const usage_fmt =
\\Usage: zig fmt [file]...
\\
\\ Formats the input files and modifies them in-place.
\\ Arguments can be files or directories, which are searched
\\ recursively.
\\
\\Options:
\\ -h, --help Print this help and exit
\\ --color [auto|off|on] Enable or disable colored error messages
\\ --stdin Format code from stdin; output to stdout
\\ --check List non-conforming files and exit with an error
\\ if the list is non-empty
\\ --ast-check Run zig ast-check on every file
\\ --exclude [file] Exclude file or directory from formatting
\\
\\
;
const Fmt = struct {
seen: SeenMap,
any_error: bool,
check_ast: bool,
color: Color,
gpa: Allocator,
arena: Allocator,
out_buffer: std.ArrayList(u8),
const SeenMap = std.AutoHashMap(fs.File.INode, void);
};
pub fn main() !void {
var arena_instance = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena_instance.deinit();
const arena = arena_instance.allocator();
const gpa = arena;
const args = try process.argsAlloc(arena);
var color: Color = .auto;
var stdin_flag: bool = false;
var check_flag: bool = false;
var check_ast_flag: bool = false;
var input_files = std.ArrayList([]const u8).init(gpa);
defer input_files.deinit();
var excluded_files = std.ArrayList([]const u8).init(gpa);
defer excluded_files.deinit();
{
var i: usize = 1;
while (i < args.len) : (i += 1) {
const arg = args[i];
if (mem.startsWith(u8, arg, "-")) {
if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
const stdout = std.io.getStdOut().writer();
try stdout.writeAll(usage_fmt);
return process.cleanExit();
} else if (mem.eql(u8, arg, "--color")) {
if (i + 1 >= args.len) {
fatal("expected [auto|on|off] after --color", .{});
}
i += 1;
const next_arg = args[i];
color = std.meta.stringToEnum(Color, next_arg) orelse {
fatal("expected [auto|on|off] after --color, found '{s}'", .{next_arg});
};
} else if (mem.eql(u8, arg, "--stdin")) {
stdin_flag = true;
} else if (mem.eql(u8, arg, "--check")) {
check_flag = true;
} else if (mem.eql(u8, arg, "--ast-check")) {
check_ast_flag = true;
} else if (mem.eql(u8, arg, "--exclude")) {
if (i + 1 >= args.len) {
fatal("expected parameter after --exclude", .{});
}
i += 1;
const next_arg = args[i];
try excluded_files.append(next_arg);
} else {
fatal("unrecognized parameter: '{s}'", .{arg});
}
} else {
try input_files.append(arg);
}
}
}
if (stdin_flag) {
if (input_files.items.len != 0) {
fatal("cannot use --stdin with positional arguments", .{});
}
const stdin = std.io.getStdIn();
const source_code = std.zig.readSourceFileToEndAlloc(gpa, stdin, null) catch |err| {
fatal("unable to read stdin: {}", .{err});
};
defer gpa.free(source_code);
var tree = std.zig.Ast.parse(gpa, source_code, .zig) catch |err| {
fatal("error parsing stdin: {}", .{err});
};
defer tree.deinit(gpa);
if (check_ast_flag) {
var zir = try std.zig.AstGen.generate(gpa, tree);
if (zir.hasCompileErrors()) {
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
try wip_errors.init(gpa);
defer wip_errors.deinit();
try wip_errors.addZirErrorMessages(zir, tree, source_code, "<stdin>");
var error_bundle = try wip_errors.toOwnedBundle("");
defer error_bundle.deinit(gpa);
error_bundle.renderToStdErr(color.renderOptions());
process.exit(2);
}
} else if (tree.errors.len != 0) {
try std.zig.printAstErrorsToStderr(gpa, tree, "<stdin>", color);
process.exit(2);
}
const formatted = try tree.render(gpa);
defer gpa.free(formatted);
if (check_flag) {
const code: u8 = @intFromBool(mem.eql(u8, formatted, source_code));
process.exit(code);
}
return std.io.getStdOut().writeAll(formatted);
}
if (input_files.items.len == 0) {
fatal("expected at least one source file argument", .{});
}
var fmt = Fmt{
.gpa = gpa,
.arena = arena,
.seen = Fmt.SeenMap.init(gpa),
.any_error = false,
.check_ast = check_ast_flag,
.color = color,
.out_buffer = std.ArrayList(u8).init(gpa),
};
defer fmt.seen.deinit();
defer fmt.out_buffer.deinit();
// Mark any excluded files/directories as already seen,
// so that they are skipped later during actual processing
for (excluded_files.items) |file_path| {
const stat = fs.cwd().statFile(file_path) catch |err| switch (err) {
error.FileNotFound => continue,
// On Windows, statFile does not work for directories
error.IsDir => dir: {
var dir = try fs.cwd().openDir(file_path, .{});
defer dir.close();
break :dir try dir.stat();
},
else => |e| return e,
};
try fmt.seen.put(stat.inode, {});
}
for (input_files.items) |file_path| {
try fmtPath(&fmt, file_path, check_flag, fs.cwd(), file_path);
}
if (fmt.any_error) {
process.exit(1);
}
}
const FmtError = error{
SystemResources,
OperationAborted,
IoPending,
BrokenPipe,
Unexpected,
WouldBlock,
FileClosed,
DestinationAddressRequired,
DiskQuota,
FileTooBig,
InputOutput,
NoSpaceLeft,
AccessDenied,
OutOfMemory,
RenameAcrossMountPoints,
ReadOnlyFileSystem,
LinkQuotaExceeded,
FileBusy,
EndOfStream,
Unseekable,
NotOpenForWriting,
UnsupportedEncoding,
ConnectionResetByPeer,
SocketNotConnected,
LockViolation,
NetNameDeleted,
InvalidArgument,
} || fs.File.OpenError;
fn fmtPath(fmt: *Fmt, file_path: []const u8, check_mode: bool, dir: fs.Dir, sub_path: []const u8) FmtError!void {
fmtPathFile(fmt, file_path, check_mode, dir, sub_path) catch |err| switch (err) {
error.IsDir, error.AccessDenied => return fmtPathDir(fmt, file_path, check_mode, dir, sub_path),
else => {
warn("unable to format '{s}': {s}", .{ file_path, @errorName(err) });
fmt.any_error = true;
return;
},
};
}
fn fmtPathDir(
fmt: *Fmt,
file_path: []const u8,
check_mode: bool,
parent_dir: fs.Dir,
parent_sub_path: []const u8,
) FmtError!void {
var dir = try parent_dir.openDir(parent_sub_path, .{ .iterate = true });
defer dir.close();
const stat = try dir.stat();
if (try fmt.seen.fetchPut(stat.inode, {})) |_| return;
var dir_it = dir.iterate();
while (try dir_it.next()) |entry| {
const is_dir = entry.kind == .directory;
if (is_dir and (mem.eql(u8, entry.name, "zig-cache") or mem.eql(u8, entry.name, "zig-out"))) continue;
if (is_dir or entry.kind == .file and (mem.endsWith(u8, entry.name, ".zig") or mem.endsWith(u8, entry.name, ".zon"))) {
const full_path = try fs.path.join(fmt.gpa, &[_][]const u8{ file_path, entry.name });
defer fmt.gpa.free(full_path);
if (is_dir) {
try fmtPathDir(fmt, full_path, check_mode, dir, entry.name);
} else {
fmtPathFile(fmt, full_path, check_mode, dir, entry.name) catch |err| {
warn("unable to format '{s}': {s}", .{ full_path, @errorName(err) });
fmt.any_error = true;
return;
};
}
}
}
}
fn fmtPathFile(
fmt: *Fmt,
file_path: []const u8,
check_mode: bool,
dir: fs.Dir,
sub_path: []const u8,
) FmtError!void {
const source_file = try dir.openFile(sub_path, .{});
var file_closed = false;
errdefer if (!file_closed) source_file.close();
const stat = try source_file.stat();
if (stat.kind == .directory)
return error.IsDir;
const gpa = fmt.gpa;
const source_code = try std.zig.readSourceFileToEndAlloc(
gpa,
source_file,
std.math.cast(usize, stat.size) orelse return error.FileTooBig,
);
defer gpa.free(source_code);
source_file.close();
file_closed = true;
// Add to set after no longer possible to get error.IsDir.
if (try fmt.seen.fetchPut(stat.inode, {})) |_| return;
var tree = try std.zig.Ast.parse(gpa, source_code, .zig);
defer tree.deinit(gpa);
if (tree.errors.len != 0) {
try std.zig.printAstErrorsToStderr(gpa, tree, file_path, fmt.color);
fmt.any_error = true;
return;
}
if (fmt.check_ast) {
if (stat.size > std.zig.max_src_size)
return error.FileTooBig;
var zir = try std.zig.AstGen.generate(gpa, tree);
defer zir.deinit(gpa);
if (zir.hasCompileErrors()) {
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
try wip_errors.init(gpa);
defer wip_errors.deinit();
try wip_errors.addZirErrorMessages(zir, tree, source_code, file_path);
var error_bundle = try wip_errors.toOwnedBundle("");
defer error_bundle.deinit(gpa);
error_bundle.renderToStdErr(fmt.color.renderOptions());
fmt.any_error = true;
}
}
// As a heuristic, we make enough capacity for the same as the input source.
fmt.out_buffer.shrinkRetainingCapacity(0);
try fmt.out_buffer.ensureTotalCapacity(source_code.len);
try tree.renderToArrayList(&fmt.out_buffer, .{});
if (mem.eql(u8, fmt.out_buffer.items, source_code))
return;
if (check_mode) {
const stdout = std.io.getStdOut().writer();
try stdout.print("{s}\n", .{file_path});
fmt.any_error = true;
} else {
var af = try dir.atomicFile(sub_path, .{ .mode = stat.mode });
defer af.deinit();
try af.file.writeAll(fmt.out_buffer.items);
try af.finish();
const stdout = std.io.getStdOut().writer();
try stdout.print("{s}\n", .{file_path});
}
}
fn fatal(comptime format: []const u8, args: anytype) noreturn {
std.log.err(format, args);
process.exit(1);
}

View File

@@ -3322,85 +3322,10 @@ pub fn addZirErrorMessages(eb: *ErrorBundle.Wip, file: *Module.File) !void {
assert(file.zir_loaded);
assert(file.tree_loaded);
assert(file.source_loaded);
const payload_index = file.zir.extra[@intFromEnum(Zir.ExtraIndex.compile_errors)];
assert(payload_index != 0);
const gpa = eb.gpa;
const header = file.zir.extraData(Zir.Inst.CompileErrors, payload_index);
const items_len = header.data.items_len;
var extra_index = header.end;
for (0..items_len) |_| {
const item = file.zir.extraData(Zir.Inst.CompileErrors.Item, extra_index);
extra_index = item.end;
const err_span = blk: {
if (item.data.node != 0) {
break :blk Module.SrcLoc.nodeToSpan(&file.tree, item.data.node);
}
const token_starts = file.tree.tokens.items(.start);
const start = token_starts[item.data.token] + item.data.byte_offset;
const end = start + @as(u32, @intCast(file.tree.tokenSlice(item.data.token).len)) - item.data.byte_offset;
break :blk Module.SrcLoc.Span{ .start = start, .end = end, .main = start };
};
const err_loc = std.zig.findLineColumn(file.source, err_span.main);
{
const msg = file.zir.nullTerminatedString(item.data.msg);
const src_path = try file.fullPath(gpa);
defer gpa.free(src_path);
try eb.addRootErrorMessage(.{
.msg = try eb.addString(msg),
.src_loc = try eb.addSourceLocation(.{
.src_path = try eb.addString(src_path),
.span_start = err_span.start,
.span_main = err_span.main,
.span_end = err_span.end,
.line = @as(u32, @intCast(err_loc.line)),
.column = @as(u32, @intCast(err_loc.column)),
.source_line = try eb.addString(err_loc.source_line),
}),
.notes_len = item.data.notesLen(file.zir),
});
}
if (item.data.notes != 0) {
const notes_start = try eb.reserveNotes(item.data.notes);
const block = file.zir.extraData(Zir.Inst.Block, item.data.notes);
const body = file.zir.extra[block.end..][0..block.data.body_len];
for (notes_start.., body) |note_i, body_elem| {
const note_item = file.zir.extraData(Zir.Inst.CompileErrors.Item, body_elem);
const msg = file.zir.nullTerminatedString(note_item.data.msg);
const span = blk: {
if (note_item.data.node != 0) {
break :blk Module.SrcLoc.nodeToSpan(&file.tree, note_item.data.node);
}
const token_starts = file.tree.tokens.items(.start);
const start = token_starts[note_item.data.token] + note_item.data.byte_offset;
const end = start + @as(u32, @intCast(file.tree.tokenSlice(note_item.data.token).len)) - item.data.byte_offset;
break :blk Module.SrcLoc.Span{ .start = start, .end = end, .main = start };
};
const loc = std.zig.findLineColumn(file.source, span.main);
const src_path = try file.fullPath(gpa);
defer gpa.free(src_path);
eb.extra.items[note_i] = @intFromEnum(try eb.addErrorMessage(.{
.msg = try eb.addString(msg),
.src_loc = try eb.addSourceLocation(.{
.src_path = try eb.addString(src_path),
.span_start = span.start,
.span_main = span.main,
.span_end = span.end,
.line = @as(u32, @intCast(loc.line)),
.column = @as(u32, @intCast(loc.column)),
.source_line = if (loc.eql(err_loc))
0
else
try eb.addString(loc.source_line),
}),
.notes_len = 0, // TODO rework this function to be recursive
}));
}
}
}
const src_path = try file.fullPath(gpa);
defer gpa.free(src_path);
return eb.addZirErrorMessages(file.zir, file.tree, file.source, src_path);
}
pub fn performAllTheWork(

View File

@@ -1255,11 +1255,7 @@ pub const SrcLoc = struct {
return @bitCast(offset + @as(i32, @bitCast(src_loc.parent_decl_node)));
}
pub const Span = struct {
start: u32,
end: u32,
main: u32,
};
pub const Span = Ast.Span;
pub fn span(src_loc: SrcLoc, gpa: Allocator) !Span {
switch (src_loc.lazy) {
@@ -1276,7 +1272,7 @@ pub const SrcLoc = struct {
},
.node_abs => |node| {
const tree = try src_loc.file_scope.getTree(gpa);
return nodeToSpan(tree, node);
return tree.nodeToSpan(node);
},
.byte_offset => |byte_off| {
const tree = try src_loc.file_scope.getTree(gpa);
@@ -1297,25 +1293,24 @@ pub const SrcLoc = struct {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
assert(src_loc.file_scope.tree_loaded);
return nodeToSpan(tree, node);
return tree.nodeToSpan(node);
},
.node_offset_main_token => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
const main_token = tree.nodes.items(.main_token)[node];
return tokensToSpan(tree, main_token, main_token, main_token);
return tree.tokensToSpan(main_token, main_token, main_token);
},
.node_offset_bin_op => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
assert(src_loc.file_scope.tree_loaded);
return nodeToSpan(tree, node);
return tree.nodeToSpan(node);
},
.node_offset_initializer => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
return tokensToSpan(
tree,
return tree.tokensToSpan(
tree.firstToken(node) - 3,
tree.lastToken(node),
tree.nodes.items(.main_token)[node] - 2,
@@ -1333,12 +1328,12 @@ pub const SrcLoc = struct {
=> tree.fullVarDecl(node).?,
.@"usingnamespace" => {
const node_data = tree.nodes.items(.data);
return nodeToSpan(tree, node_data[node].lhs);
return tree.nodeToSpan(node_data[node].lhs);
},
else => unreachable,
};
if (full.ast.type_node != 0) {
return nodeToSpan(tree, full.ast.type_node);
return tree.nodeToSpan(full.ast.type_node);
}
const tok_index = full.ast.mut_token + 1; // the name token
const start = tree.tokens.items(.start)[tok_index];
@@ -1349,25 +1344,25 @@ pub const SrcLoc = struct {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
const full = tree.fullVarDecl(node).?;
return nodeToSpan(tree, full.ast.align_node);
return tree.nodeToSpan(full.ast.align_node);
},
.node_offset_var_decl_section => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
const full = tree.fullVarDecl(node).?;
return nodeToSpan(tree, full.ast.section_node);
return tree.nodeToSpan(full.ast.section_node);
},
.node_offset_var_decl_addrspace => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
const full = tree.fullVarDecl(node).?;
return nodeToSpan(tree, full.ast.addrspace_node);
return tree.nodeToSpan(full.ast.addrspace_node);
},
.node_offset_var_decl_init => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
const full = tree.fullVarDecl(node).?;
return nodeToSpan(tree, full.ast.init_node);
return tree.nodeToSpan(full.ast.init_node);
},
.node_offset_builtin_call_arg0 => |n| return src_loc.byteOffsetBuiltinCallArg(gpa, n, 0),
.node_offset_builtin_call_arg1 => |n| return src_loc.byteOffsetBuiltinCallArg(gpa, n, 1),
@@ -1408,13 +1403,13 @@ pub const SrcLoc = struct {
node = node_datas[node].lhs;
}
return nodeToSpan(tree, node);
return tree.nodeToSpan(node);
},
.node_offset_array_access_index => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node_datas = tree.nodes.items(.data);
const node = src_loc.declRelativeToNodeIndex(node_off);
return nodeToSpan(tree, node_datas[node].rhs);
return tree.nodeToSpan(node_datas[node].rhs);
},
.node_offset_slice_ptr,
.node_offset_slice_start,
@@ -1431,14 +1426,14 @@ pub const SrcLoc = struct {
.node_offset_slice_sentinel => full.ast.sentinel,
else => unreachable,
};
return nodeToSpan(tree, part_node);
return tree.nodeToSpan(part_node);
},
.node_offset_call_func => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullCall(&buf, node).?;
return nodeToSpan(tree, full.ast.fn_expr);
return tree.nodeToSpan(full.ast.fn_expr);
},
.node_offset_field_name => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
@@ -1477,13 +1472,13 @@ pub const SrcLoc = struct {
.node_offset_deref_ptr => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
return nodeToSpan(tree, node);
return tree.nodeToSpan(node);
},
.node_offset_asm_source => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
const full = tree.fullAsm(node).?;
return nodeToSpan(tree, full.ast.template);
return tree.nodeToSpan(full.ast.template);
},
.node_offset_asm_ret_ty => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
@@ -1491,7 +1486,7 @@ pub const SrcLoc = struct {
const full = tree.fullAsm(node).?;
const asm_output = full.outputs[0];
const node_datas = tree.nodes.items(.data);
return nodeToSpan(tree, node_datas[asm_output].lhs);
return tree.nodeToSpan(node_datas[asm_output].lhs);
},
.node_offset_if_cond => |node_off| {
@@ -1514,21 +1509,21 @@ pub const SrcLoc = struct {
const inputs = tree.fullFor(node).?.ast.inputs;
const start = tree.firstToken(inputs[0]);
const end = tree.lastToken(inputs[inputs.len - 1]);
return tokensToSpan(tree, start, end, start);
return tree.tokensToSpan(start, end, start);
},
.@"orelse" => node,
.@"catch" => node,
else => unreachable,
};
return nodeToSpan(tree, src_node);
return tree.nodeToSpan(src_node);
},
.for_input => |for_input| {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(for_input.for_node_offset);
const for_full = tree.fullFor(node).?;
const src_node = for_full.ast.inputs[for_input.input_index];
return nodeToSpan(tree, src_node);
return tree.nodeToSpan(src_node);
},
.for_capture_from_input => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
@@ -1554,12 +1549,12 @@ pub const SrcLoc = struct {
},
.identifier => {
if (count == 0)
return tokensToSpan(tree, tok, tok + 1, tok);
return tree.tokensToSpan(tok, tok + 1, tok);
tok += 1;
},
.asterisk => {
if (count == 0)
return tokensToSpan(tree, tok, tok + 2, tok);
return tree.tokensToSpan(tok, tok + 2, tok);
tok += 1;
},
else => unreachable,
@@ -1591,7 +1586,7 @@ pub const SrcLoc = struct {
.array_init_comma,
=> {
const full = tree.fullArrayInit(&buf, call_args_node).?.ast.elements;
return nodeToSpan(tree, full[call_arg.arg_index]);
return tree.nodeToSpan(full[call_arg.arg_index]);
},
.struct_init_one,
.struct_init_one_comma,
@@ -1603,12 +1598,12 @@ pub const SrcLoc = struct {
.struct_init_comma,
=> {
const full = tree.fullStructInit(&buf, call_args_node).?.ast.fields;
return nodeToSpan(tree, full[call_arg.arg_index]);
return tree.nodeToSpan(full[call_arg.arg_index]);
},
else => return nodeToSpan(tree, call_args_node),
else => return tree.nodeToSpan(call_args_node),
}
};
return nodeToSpan(tree, call_full.ast.params[call_arg.arg_index]);
return tree.nodeToSpan(call_full.ast.params[call_arg.arg_index]);
},
.fn_proto_param => |fn_proto_param| {
const tree = try src_loc.file_scope.getTree(gpa);
@@ -1619,12 +1614,11 @@ pub const SrcLoc = struct {
var i: usize = 0;
while (it.next()) |param| : (i += 1) {
if (i == fn_proto_param.param_index) {
if (param.anytype_ellipsis3) |token| return tokenToSpan(tree, token);
if (param.anytype_ellipsis3) |token| return tree.tokenToSpan(token);
const first_token = param.comptime_noalias orelse
param.name_token orelse
tree.firstToken(param.type_expr);
return tokensToSpan(
tree,
return tree.tokensToSpan(
first_token,
tree.lastToken(param.type_expr),
first_token,
@@ -1637,13 +1631,13 @@ pub const SrcLoc = struct {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
const node_datas = tree.nodes.items(.data);
return nodeToSpan(tree, node_datas[node].lhs);
return tree.nodeToSpan(node_datas[node].lhs);
},
.node_offset_bin_rhs => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
const node_datas = tree.nodes.items(.data);
return nodeToSpan(tree, node_datas[node].rhs);
return tree.nodeToSpan(node_datas[node].rhs);
},
.array_cat_lhs, .array_cat_rhs => |cat| {
const tree = try src_loc.file_scope.getTree(gpa);
@@ -1667,9 +1661,9 @@ pub const SrcLoc = struct {
.array_init_comma,
=> {
const full = tree.fullArrayInit(&buf, arr_node).?.ast.elements;
return nodeToSpan(tree, full[cat.elem_index]);
return tree.nodeToSpan(full[cat.elem_index]);
},
else => return nodeToSpan(tree, arr_node),
else => return tree.nodeToSpan(arr_node),
}
},
@@ -1677,7 +1671,7 @@ pub const SrcLoc = struct {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
const node_datas = tree.nodes.items(.data);
return nodeToSpan(tree, node_datas[node].lhs);
return tree.nodeToSpan(node_datas[node].lhs);
},
.node_offset_switch_special_prong => |node_off| {
@@ -1696,7 +1690,7 @@ pub const SrcLoc = struct {
mem.eql(u8, tree.tokenSlice(main_tokens[case.ast.values[0]]), "_"));
if (!is_special) continue;
return nodeToSpan(tree, case_node);
return tree.nodeToSpan(case_node);
} else unreachable;
},
@@ -1718,7 +1712,7 @@ pub const SrcLoc = struct {
for (case.ast.values) |item_node| {
if (node_tags[item_node] == .switch_range) {
return nodeToSpan(tree, item_node);
return tree.nodeToSpan(item_node);
}
}
} else unreachable;
@@ -1754,28 +1748,28 @@ pub const SrcLoc = struct {
const node = src_loc.declRelativeToNodeIndex(node_off);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
return nodeToSpan(tree, full.ast.align_expr);
return tree.nodeToSpan(full.ast.align_expr);
},
.node_offset_fn_type_addrspace => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
return nodeToSpan(tree, full.ast.addrspace_expr);
return tree.nodeToSpan(full.ast.addrspace_expr);
},
.node_offset_fn_type_section => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
return nodeToSpan(tree, full.ast.section_expr);
return tree.nodeToSpan(full.ast.section_expr);
},
.node_offset_fn_type_cc => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node = src_loc.declRelativeToNodeIndex(node_off);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
return nodeToSpan(tree, full.ast.callconv_expr);
return tree.nodeToSpan(full.ast.callconv_expr);
},
.node_offset_fn_type_ret_ty => |node_off| {
@@ -1783,7 +1777,7 @@ pub const SrcLoc = struct {
const node = src_loc.declRelativeToNodeIndex(node_off);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
return nodeToSpan(tree, full.ast.return_type);
return tree.nodeToSpan(full.ast.return_type);
},
.node_offset_param => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
@@ -1795,8 +1789,7 @@ pub const SrcLoc = struct {
.colon, .identifier, .keyword_comptime, .keyword_noalias => first_tok -= 1,
else => break,
};
return tokensToSpan(
tree,
return tree.tokensToSpan(
first_tok,
tree.lastToken(node),
first_tok,
@@ -1813,8 +1806,7 @@ pub const SrcLoc = struct {
.colon, .identifier, .keyword_comptime, .keyword_noalias => first_tok -= 1,
else => break,
};
return tokensToSpan(
tree,
return tree.tokensToSpan(
first_tok,
tok_index,
first_tok,
@@ -1825,7 +1817,7 @@ pub const SrcLoc = struct {
const tree = try src_loc.file_scope.getTree(gpa);
const node_datas = tree.nodes.items(.data);
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
return nodeToSpan(tree, node_datas[parent_node].rhs);
return tree.nodeToSpan(node_datas[parent_node].rhs);
},
.node_offset_lib_name => |node_off| {
@@ -1844,70 +1836,70 @@ pub const SrcLoc = struct {
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
const full = tree.fullArrayType(parent_node).?;
return nodeToSpan(tree, full.ast.elem_count);
return tree.nodeToSpan(full.ast.elem_count);
},
.node_offset_array_type_sentinel => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
const full = tree.fullArrayType(parent_node).?;
return nodeToSpan(tree, full.ast.sentinel);
return tree.nodeToSpan(full.ast.sentinel);
},
.node_offset_array_type_elem => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
const full = tree.fullArrayType(parent_node).?;
return nodeToSpan(tree, full.ast.elem_type);
return tree.nodeToSpan(full.ast.elem_type);
},
.node_offset_un_op => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const node_datas = tree.nodes.items(.data);
const node = src_loc.declRelativeToNodeIndex(node_off);
return nodeToSpan(tree, node_datas[node].lhs);
return tree.nodeToSpan(node_datas[node].lhs);
},
.node_offset_ptr_elem => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
const full = tree.fullPtrType(parent_node).?;
return nodeToSpan(tree, full.ast.child_type);
return tree.nodeToSpan(full.ast.child_type);
},
.node_offset_ptr_sentinel => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
const full = tree.fullPtrType(parent_node).?;
return nodeToSpan(tree, full.ast.sentinel);
return tree.nodeToSpan(full.ast.sentinel);
},
.node_offset_ptr_align => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
const full = tree.fullPtrType(parent_node).?;
return nodeToSpan(tree, full.ast.align_node);
return tree.nodeToSpan(full.ast.align_node);
},
.node_offset_ptr_addrspace => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
const full = tree.fullPtrType(parent_node).?;
return nodeToSpan(tree, full.ast.addrspace_node);
return tree.nodeToSpan(full.ast.addrspace_node);
},
.node_offset_ptr_bitoffset => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
const full = tree.fullPtrType(parent_node).?;
return nodeToSpan(tree, full.ast.bit_range_start);
return tree.nodeToSpan(full.ast.bit_range_start);
},
.node_offset_ptr_hostsize => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const parent_node = src_loc.declRelativeToNodeIndex(node_off);
const full = tree.fullPtrType(parent_node).?;
return nodeToSpan(tree, full.ast.bit_range_end);
return tree.nodeToSpan(full.ast.bit_range_end);
},
.node_offset_container_tag => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
@@ -1917,13 +1909,12 @@ pub const SrcLoc = struct {
switch (node_tags[parent_node]) {
.container_decl_arg, .container_decl_arg_trailing => {
const full = tree.containerDeclArg(parent_node);
return nodeToSpan(tree, full.ast.arg);
return tree.nodeToSpan(full.ast.arg);
},
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => {
const full = tree.taggedUnionEnumTag(parent_node);
return tokensToSpan(
tree,
return tree.tokensToSpan(
tree.firstToken(full.ast.arg) - 2,
tree.lastToken(full.ast.arg) + 1,
tree.nodes.items(.main_token)[full.ast.arg],
@@ -1942,7 +1933,7 @@ pub const SrcLoc = struct {
.container_field_init => tree.containerFieldInit(parent_node),
else => unreachable,
};
return nodeToSpan(tree, full.ast.value_expr);
return tree.nodeToSpan(full.ast.value_expr);
},
.node_offset_init_ty => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
@@ -1950,7 +1941,7 @@ pub const SrcLoc = struct {
var buf: [2]Ast.Node.Index = undefined;
const full = tree.fullArrayInit(&buf, parent_node).?;
return nodeToSpan(tree, full.ast.type_expr);
return tree.nodeToSpan(full.ast.type_expr);
},
.node_offset_store_ptr => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
@@ -1960,9 +1951,9 @@ pub const SrcLoc = struct {
switch (node_tags[node]) {
.assign => {
return nodeToSpan(tree, node_datas[node].lhs);
return tree.nodeToSpan(node_datas[node].lhs);
},
else => return nodeToSpan(tree, node),
else => return tree.nodeToSpan(node),
}
},
.node_offset_store_operand => |node_off| {
@@ -1973,9 +1964,9 @@ pub const SrcLoc = struct {
switch (node_tags[node]) {
.assign => {
return nodeToSpan(tree, node_datas[node].rhs);
return tree.nodeToSpan(node_datas[node].rhs);
},
else => return nodeToSpan(tree, node),
else => return tree.nodeToSpan(node),
}
},
.node_offset_return_operand => |node_off| {
@@ -1984,9 +1975,9 @@ pub const SrcLoc = struct {
const node_tags = tree.nodes.items(.tag);
const node_datas = tree.nodes.items(.data);
if (node_tags[node] == .@"return" and node_datas[node].lhs != 0) {
return nodeToSpan(tree, node_datas[node].lhs);
return tree.nodeToSpan(node_datas[node].lhs);
}
return nodeToSpan(tree, node);
return tree.nodeToSpan(node);
},
}
}
@@ -2010,40 +2001,7 @@ pub const SrcLoc = struct {
.builtin_call, .builtin_call_comma => tree.extra_data[node_datas[node].lhs + arg_index],
else => unreachable,
};
return nodeToSpan(tree, param);
}
pub fn nodeToSpan(tree: *const Ast, node: u32) Span {
return tokensToSpan(
tree,
tree.firstToken(node),
tree.lastToken(node),
tree.nodes.items(.main_token)[node],
);
}
fn tokenToSpan(tree: *const Ast, token: Ast.TokenIndex) Span {
return tokensToSpan(tree, token, token, token);
}
fn tokensToSpan(tree: *const Ast, start: Ast.TokenIndex, end: Ast.TokenIndex, main: Ast.TokenIndex) Span {
const token_starts = tree.tokens.items(.start);
var start_tok = start;
var end_tok = end;
if (tree.tokensOnSameLine(start, end)) {
// do nothing
} else if (tree.tokensOnSameLine(start, main)) {
end_tok = main;
} else if (tree.tokensOnSameLine(main, end)) {
start_tok = main;
} else {
start_tok = main;
end_tok = main;
}
const start_off = token_starts[start_tok];
const end_off = token_starts[end_tok] + @as(u32, @intCast(tree.tokenSlice(end_tok).len));
return Span{ .start = start_off, .end = end_off, .main = token_starts[main] };
return tree.nodeToSpan(param);
}
};

View File

@@ -592,7 +592,7 @@ fn loadManifest(f: *Fetch, pkg_root: Package.Path) RunError!void {
if (ast.errors.len > 0) {
const file_path = try std.fmt.allocPrint(arena, "{}" ++ Manifest.basename, .{pkg_root});
try main.putAstErrorsIntoBundle(arena, ast.*, file_path, eb);
try std.zig.putAstErrorsIntoBundle(arena, ast.*, file_path, eb);
return error.FetchFailed;
}
@@ -1690,7 +1690,6 @@ const Cache = std.Build.Cache;
const ThreadPool = std.Thread.Pool;
const WaitGroup = std.Thread.WaitGroup;
const Fetch = @This();
const main = @import("../main.zig");
const git = @import("Fetch/git.zig");
const Package = @import("../Package.zig");
const Manifest = Package.Manifest;

View File

@@ -8,6 +8,7 @@ const process = std.process;
const Allocator = mem.Allocator;
const ArrayList = std.ArrayList;
const Ast = std.zig.Ast;
const Color = std.zig.Color;
const warn = std.log.warn;
const ThreadPool = std.Thread.Pool;
const cleanExit = std.process.cleanExit;
@@ -66,18 +67,8 @@ pub fn fatal(comptime format: []const u8, args: anytype) noreturn {
process.exit(1);
}
/// There are many assumptions in the entire codebase that Zig source files can
/// be byte-indexed with a u32 integer.
const max_src_size = std.math.maxInt(u32);
const debug_extensions_enabled = builtin.mode == .Debug;
const Color = enum {
auto,
off,
on,
};
const normal_usage =
\\Usage: zig [command] [options]
\\
@@ -4501,7 +4492,7 @@ fn updateModule(comp: *Compilation, color: Color) !void {
defer errors.deinit(comp.gpa);
if (errors.errorMessageCount() > 0) {
errors.renderToStdErr(renderOptions(color));
errors.renderToStdErr(color.renderOptions());
return error.SemanticAnalyzeFail;
}
}
@@ -4601,7 +4592,7 @@ fn cmdTranslateC(comp: *Compilation, arena: Allocator, fancy_output: ?*Compilati
p.errors = errors;
return;
} else {
errors.renderToStdErr(renderOptions(color));
errors.renderToStdErr(color.renderOptions());
process.exit(1);
}
},
@@ -5528,7 +5519,7 @@ fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
if (fetch.error_bundle.root_list.items.len > 0) {
var errors = try fetch.error_bundle.toOwnedBundle("");
errors.renderToStdErr(renderOptions(color));
errors.renderToStdErr(color.renderOptions());
process.exit(1);
}
@@ -5719,470 +5710,155 @@ fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
}
}
fn readSourceFileToEndAlloc(
allocator: Allocator,
input: *const fs.File,
size_hint: ?usize,
) ![:0]u8 {
const source_code = input.readToEndAllocOptions(
allocator,
max_src_size,
size_hint,
@alignOf(u16),
0,
) catch |err| switch (err) {
error.ConnectionResetByPeer => unreachable,
error.ConnectionTimedOut => unreachable,
error.NotOpenForReading => unreachable,
else => |e| return e,
};
errdefer allocator.free(source_code);
// Detect unsupported file types with their Byte Order Mark
const unsupported_boms = [_][]const u8{
"\xff\xfe\x00\x00", // UTF-32 little endian
"\xfe\xff\x00\x00", // UTF-32 big endian
"\xfe\xff", // UTF-16 big endian
};
for (unsupported_boms) |bom| {
if (mem.startsWith(u8, source_code, bom)) {
return error.UnsupportedEncoding;
}
}
// If the file starts with a UTF-16 little endian BOM, translate it to UTF-8
if (mem.startsWith(u8, source_code, "\xff\xfe")) {
const source_code_utf16_le = mem.bytesAsSlice(u16, source_code);
const source_code_utf8 = std.unicode.utf16LeToUtf8AllocZ(allocator, source_code_utf16_le) catch |err| switch (err) {
error.DanglingSurrogateHalf => error.UnsupportedEncoding,
error.ExpectedSecondSurrogateHalf => error.UnsupportedEncoding,
error.UnexpectedSecondSurrogateHalf => error.UnsupportedEncoding,
else => |e| return e,
};
allocator.free(source_code);
return source_code_utf8;
}
return source_code;
}
const usage_fmt =
\\Usage: zig fmt [file]...
\\
\\ Formats the input files and modifies them in-place.
\\ Arguments can be files or directories, which are searched
\\ recursively.
\\
\\Options:
\\ -h, --help Print this help and exit
\\ --color [auto|off|on] Enable or disable colored error messages
\\ --stdin Format code from stdin; output to stdout
\\ --check List non-conforming files and exit with an error
\\ if the list is non-empty
\\ --ast-check Run zig ast-check on every file
\\ --exclude [file] Exclude file or directory from formatting
\\
\\
;
const Fmt = struct {
seen: SeenMap,
any_error: bool,
check_ast: bool,
color: Color,
gpa: Allocator,
arena: Allocator,
out_buffer: std.ArrayList(u8),
const SeenMap = std.AutoHashMap(fs.File.INode, void);
};
fn cmdFmt(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
var color: Color = .auto;
var stdin_flag: bool = false;
var check_flag: bool = false;
var check_ast_flag: bool = false;
var input_files = ArrayList([]const u8).init(gpa);
defer input_files.deinit();
var excluded_files = ArrayList([]const u8).init(gpa);
defer excluded_files.deinit();
const color: Color = .auto;
{
var i: usize = 0;
while (i < args.len) : (i += 1) {
const arg = args[i];
if (mem.startsWith(u8, arg, "-")) {
if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
const stdout = io.getStdOut().writer();
try stdout.writeAll(usage_fmt);
return cleanExit();
} else if (mem.eql(u8, arg, "--color")) {
if (i + 1 >= args.len) {
fatal("expected [auto|on|off] after --color", .{});
}
i += 1;
const next_arg = args[i];
color = std.meta.stringToEnum(Color, next_arg) orelse {
fatal("expected [auto|on|off] after --color, found '{s}'", .{next_arg});
};
} else if (mem.eql(u8, arg, "--stdin")) {
stdin_flag = true;
} else if (mem.eql(u8, arg, "--check")) {
check_flag = true;
} else if (mem.eql(u8, arg, "--ast-check")) {
check_ast_flag = true;
} else if (mem.eql(u8, arg, "--exclude")) {
if (i + 1 >= args.len) {
fatal("expected parameter after --exclude", .{});
}
i += 1;
const next_arg = args[i];
try excluded_files.append(next_arg);
} else {
fatal("unrecognized parameter: '{s}'", .{arg});
}
} else {
try input_files.append(arg);
}
}
}
if (stdin_flag) {
if (input_files.items.len != 0) {
fatal("cannot use --stdin with positional arguments", .{});
}
const stdin = io.getStdIn();
const source_code = readSourceFileToEndAlloc(gpa, &stdin, null) catch |err| {
fatal("unable to read stdin: {}", .{err});
};
defer gpa.free(source_code);
var tree = Ast.parse(gpa, source_code, .zig) catch |err| {
fatal("error parsing stdin: {}", .{err});
};
defer tree.deinit(gpa);
if (check_ast_flag) {
var file: Module.File = .{
.status = .never_loaded,
.source_loaded = true,
.zir_loaded = false,
.sub_file_path = "<stdin>",
.source = source_code,
.stat = undefined,
.tree = tree,
.tree_loaded = true,
.zir = undefined,
.mod = undefined,
.root_decl = .none,
};
file.mod = try Package.Module.createLimited(arena, .{
.root = Package.Path.cwd(),
.root_src_path = file.sub_file_path,
.fully_qualified_name = "root",
});
file.zir = try AstGen.generate(gpa, file.tree);
file.zir_loaded = true;
defer file.zir.deinit(gpa);
if (file.zir.hasCompileErrors()) {
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
try wip_errors.init(gpa);
defer wip_errors.deinit();
try Compilation.addZirErrorMessages(&wip_errors, &file);
var error_bundle = try wip_errors.toOwnedBundle("");
defer error_bundle.deinit(gpa);
error_bundle.renderToStdErr(renderOptions(color));
process.exit(2);
}
} else if (tree.errors.len != 0) {
try printAstErrorsToStderr(gpa, tree, "<stdin>", color);
process.exit(2);
}
const formatted = try tree.render(gpa);
defer gpa.free(formatted);
if (check_flag) {
const code: u8 = @intFromBool(mem.eql(u8, formatted, source_code));
process.exit(code);
}
return io.getStdOut().writeAll(formatted);
}
if (input_files.items.len == 0) {
fatal("expected at least one source file argument", .{});
}
var fmt = Fmt{
.gpa = gpa,
.arena = arena,
.seen = Fmt.SeenMap.init(gpa),
.any_error = false,
.check_ast = check_ast_flag,
.color = color,
.out_buffer = std.ArrayList(u8).init(gpa),
const target_query: std.Target.Query = .{};
const resolved_target: Package.Module.ResolvedTarget = .{
.result = resolveTargetQueryOrFatal(target_query),
.is_native_os = true,
.is_native_abi = true,
};
defer fmt.seen.deinit();
defer fmt.out_buffer.deinit();
// Mark any excluded files/directories as already seen,
// so that they are skipped later during actual processing
for (excluded_files.items) |file_path| {
const stat = fs.cwd().statFile(file_path) catch |err| switch (err) {
error.FileNotFound => continue,
// On Windows, statFile does not work for directories
error.IsDir => dir: {
var dir = try fs.cwd().openDir(file_path, .{});
defer dir.close();
break :dir try dir.stat();
},
else => |e| return e,
};
try fmt.seen.put(stat.inode, {});
}
const exe_basename = try std.zig.binNameAlloc(arena, .{
.root_name = "fmt",
.target = resolved_target.result,
.output_mode = .Exe,
});
const emit_bin: Compilation.EmitLoc = .{
.directory = null, // Use the global zig-cache.
.basename = exe_basename,
};
for (input_files.items) |file_path| {
try fmtPath(&fmt, file_path, check_flag, fs.cwd(), file_path);
}
if (fmt.any_error) {
process.exit(1);
}
}
const self_exe_path = introspect.findZigExePath(arena) catch |err| {
fatal("unable to find self exe path: {s}", .{@errorName(err)});
};
const FmtError = error{
SystemResources,
OperationAborted,
IoPending,
BrokenPipe,
Unexpected,
WouldBlock,
FileClosed,
DestinationAddressRequired,
DiskQuota,
FileTooBig,
InputOutput,
NoSpaceLeft,
AccessDenied,
OutOfMemory,
RenameAcrossMountPoints,
ReadOnlyFileSystem,
LinkQuotaExceeded,
FileBusy,
EndOfStream,
Unseekable,
NotOpenForWriting,
UnsupportedEncoding,
ConnectionResetByPeer,
SocketNotConnected,
LockViolation,
NetNameDeleted,
InvalidArgument,
} || fs.File.OpenError;
const override_lib_dir: ?[]const u8 = try EnvVar.ZIG_LIB_DIR.get(arena);
const override_global_cache_dir: ?[]const u8 = try EnvVar.ZIG_GLOBAL_CACHE_DIR.get(arena);
fn fmtPath(fmt: *Fmt, file_path: []const u8, check_mode: bool, dir: fs.Dir, sub_path: []const u8) FmtError!void {
fmtPathFile(fmt, file_path, check_mode, dir, sub_path) catch |err| switch (err) {
error.IsDir, error.AccessDenied => return fmtPathDir(fmt, file_path, check_mode, dir, sub_path),
else => {
warn("unable to format '{s}': {s}", .{ file_path, @errorName(err) });
fmt.any_error = true;
return;
var zig_lib_directory: Compilation.Directory = if (override_lib_dir) |lib_dir| .{
.path = lib_dir,
.handle = fs.cwd().openDir(lib_dir, .{}) catch |err| {
fatal("unable to open zig lib directory from 'zig-lib-dir' argument: '{s}': {s}", .{ lib_dir, @errorName(err) });
},
} else introspect.findZigLibDirFromSelfExe(arena, self_exe_path) catch |err| {
fatal("unable to find zig installation directory '{s}': {s}", .{ self_exe_path, @errorName(err) });
};
}
defer zig_lib_directory.handle.close();
fn fmtPathDir(
fmt: *Fmt,
file_path: []const u8,
check_mode: bool,
parent_dir: fs.Dir,
parent_sub_path: []const u8,
) FmtError!void {
var dir = try parent_dir.openDir(parent_sub_path, .{ .iterate = true });
defer dir.close();
var global_cache_directory: Compilation.Directory = l: {
const p = override_global_cache_dir orelse try introspect.resolveGlobalCacheDir(arena);
break :l .{
.handle = try fs.cwd().makeOpenPath(p, .{}),
.path = p,
};
};
defer global_cache_directory.handle.close();
const stat = try dir.stat();
if (try fmt.seen.fetchPut(stat.inode, {})) |_| return;
var thread_pool: ThreadPool = undefined;
try thread_pool.init(.{ .allocator = gpa });
defer thread_pool.deinit();
var dir_it = dir.iterate();
while (try dir_it.next()) |entry| {
const is_dir = entry.kind == .directory;
var child_argv: std.ArrayListUnmanaged([]const u8) = .{};
try child_argv.ensureUnusedCapacity(arena, args.len + 1);
if (is_dir and (mem.eql(u8, entry.name, "zig-cache") or mem.eql(u8, entry.name, "zig-out"))) continue;
if (is_dir or entry.kind == .file and (mem.endsWith(u8, entry.name, ".zig") or mem.endsWith(u8, entry.name, ".zon"))) {
const full_path = try fs.path.join(fmt.gpa, &[_][]const u8{ file_path, entry.name });
defer fmt.gpa.free(full_path);
if (is_dir) {
try fmtPathDir(fmt, full_path, check_mode, dir, entry.name);
} else {
fmtPathFile(fmt, full_path, check_mode, dir, entry.name) catch |err| {
warn("unable to format '{s}': {s}", .{ full_path, @errorName(err) });
fmt.any_error = true;
return;
};
}
}
}
}
fn fmtPathFile(
fmt: *Fmt,
file_path: []const u8,
check_mode: bool,
dir: fs.Dir,
sub_path: []const u8,
) FmtError!void {
const source_file = try dir.openFile(sub_path, .{});
var file_closed = false;
errdefer if (!file_closed) source_file.close();
const stat = try source_file.stat();
if (stat.kind == .directory)
return error.IsDir;
const gpa = fmt.gpa;
const source_code = try readSourceFileToEndAlloc(
gpa,
&source_file,
std.math.cast(usize, stat.size) orelse return error.FileTooBig,
);
defer gpa.free(source_code);
source_file.close();
file_closed = true;
// Add to set after no longer possible to get error.IsDir.
if (try fmt.seen.fetchPut(stat.inode, {})) |_| return;
var tree = try Ast.parse(gpa, source_code, .zig);
defer tree.deinit(gpa);
if (tree.errors.len != 0) {
try printAstErrorsToStderr(gpa, tree, file_path, fmt.color);
fmt.any_error = true;
return;
}
if (fmt.check_ast) {
var file: Module.File = .{
.status = .never_loaded,
.source_loaded = true,
.zir_loaded = false,
.sub_file_path = file_path,
.source = source_code,
.stat = .{
.size = stat.size,
.inode = stat.inode,
.mtime = stat.mtime,
// We want to release all the locks before executing the child process, so we make a nice
// big block here to ensure the cleanup gets run when we extract out our argv.
{
const main_mod_paths: Package.Module.CreateOptions.Paths = .{
.root = .{
.root_dir = zig_lib_directory,
.sub_path = "std/zig",
},
.tree = tree,
.tree_loaded = true,
.zir = undefined,
.mod = undefined,
.root_decl = .none,
.root_src_path = "fmt.zig",
};
file.mod = try Package.Module.createLimited(fmt.arena, .{
.root = Package.Path.cwd(),
.root_src_path = file.sub_file_path,
.fully_qualified_name = "root",
const config = try Compilation.Config.resolve(.{
.output_mode = .Exe,
.root_optimize_mode = .ReleaseFast,
.resolved_target = resolved_target,
.have_zcu = true,
.emit_bin = true,
.is_test = false,
});
if (stat.size > max_src_size)
return error.FileTooBig;
file.zir = try AstGen.generate(gpa, file.tree);
file.zir_loaded = true;
defer file.zir.deinit(gpa);
if (file.zir.hasCompileErrors()) {
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
try wip_errors.init(gpa);
defer wip_errors.deinit();
try Compilation.addZirErrorMessages(&wip_errors, &file);
var error_bundle = try wip_errors.toOwnedBundle("");
defer error_bundle.deinit(gpa);
error_bundle.renderToStdErr(renderOptions(fmt.color));
fmt.any_error = true;
}
}
// As a heuristic, we make enough capacity for the same as the input source.
fmt.out_buffer.shrinkRetainingCapacity(0);
try fmt.out_buffer.ensureTotalCapacity(source_code.len);
try tree.renderToArrayList(&fmt.out_buffer, .{});
if (mem.eql(u8, fmt.out_buffer.items, source_code))
return;
if (check_mode) {
const stdout = io.getStdOut().writer();
try stdout.print("{s}\n", .{file_path});
fmt.any_error = true;
} else {
var af = try dir.atomicFile(sub_path, .{ .mode = stat.mode });
defer af.deinit();
try af.file.writeAll(fmt.out_buffer.items);
try af.finish();
const stdout = io.getStdOut().writer();
try stdout.print("{s}\n", .{file_path});
}
}
fn printAstErrorsToStderr(gpa: Allocator, tree: Ast, path: []const u8, color: Color) !void {
var wip_errors: std.zig.ErrorBundle.Wip = undefined;
try wip_errors.init(gpa);
defer wip_errors.deinit();
try putAstErrorsIntoBundle(gpa, tree, path, &wip_errors);
var error_bundle = try wip_errors.toOwnedBundle("");
defer error_bundle.deinit(gpa);
error_bundle.renderToStdErr(renderOptions(color));
}
pub fn putAstErrorsIntoBundle(
gpa: Allocator,
tree: Ast,
path: []const u8,
wip_errors: *std.zig.ErrorBundle.Wip,
) Allocator.Error!void {
var file: Module.File = .{
.status = .never_loaded,
.source_loaded = true,
.zir_loaded = false,
.sub_file_path = path,
.source = tree.source,
.stat = .{
.size = 0,
.inode = 0,
.mtime = 0,
},
.tree = tree,
.tree_loaded = true,
.zir = undefined,
.mod = try Package.Module.createLimited(gpa, .{
.root = Package.Path.cwd(),
.root_src_path = path,
const root_mod = try Package.Module.create(arena, .{
.global_cache_directory = global_cache_directory,
.paths = main_mod_paths,
.fully_qualified_name = "root",
}),
.root_decl = .none,
};
defer gpa.destroy(file.mod);
.cc_argv = &.{},
.inherited = .{
.resolved_target = resolved_target,
.optimize_mode = .ReleaseFast,
},
.global = config,
.parent = null,
.builtin_mod = null,
});
file.zir = try AstGen.generate(gpa, file.tree);
file.zir_loaded = true;
defer file.zir.deinit(gpa);
const comp = Compilation.create(gpa, arena, .{
.zig_lib_directory = zig_lib_directory,
.local_cache_directory = global_cache_directory,
.global_cache_directory = global_cache_directory,
.root_name = "fmt",
.config = config,
.root_mod = root_mod,
.main_mod = root_mod,
.emit_bin = emit_bin,
.emit_h = null,
.self_exe_path = self_exe_path,
.thread_pool = &thread_pool,
.cache_mode = .whole,
}) catch |err| {
fatal("unable to create compilation: {s}", .{@errorName(err)});
};
defer comp.destroy();
try Compilation.addZirErrorMessages(wip_errors, &file);
updateModule(comp, color) catch |err| switch (err) {
error.SemanticAnalyzeFail => process.exit(2),
else => |e| return e,
};
const fmt_exe = try global_cache_directory.join(arena, &.{comp.cache_use.whole.bin_sub_path.?});
child_argv.appendAssumeCapacity(fmt_exe);
}
child_argv.appendSliceAssumeCapacity(args);
if (process.can_execv) {
const err = process.execv(gpa, child_argv.items);
const cmd = try std.mem.join(arena, " ", child_argv.items);
fatal("the following command failed to execve with '{s}':\n{s}", .{
@errorName(err),
cmd,
});
}
if (!process.can_spawn) {
const cmd = try std.mem.join(arena, " ", child_argv.items);
fatal("the following command cannot be executed ({s} does not support spawning a child process):\n{s}", .{
@tagName(builtin.os.tag), cmd,
});
}
var child = std.ChildProcess.init(child_argv.items, gpa);
child.stdin_behavior = .Inherit;
child.stdout_behavior = .Inherit;
child.stderr_behavior = .Inherit;
const term = try child.spawnAndWait();
switch (term) {
.Exited => |code| {
if (code == 0) return cleanExit();
const cmd = try std.mem.join(arena, " ", child_argv.items);
fatal("the following build command failed with exit code {d}:\n{s}", .{ code, cmd });
},
else => {
const cmd = try std.mem.join(arena, " ", child_argv.items);
fatal("the following build command crashed:\n{s}", .{cmd});
},
}
}
const info_zen =
@@ -6710,7 +6386,7 @@ fn cmdAstCheck(
const stat = try f.stat();
if (stat.size > max_src_size)
if (stat.size > std.zig.max_src_size)
return error.FileTooBig;
const source = try arena.allocSentinel(u8, @as(usize, @intCast(stat.size)), 0);
@@ -6728,7 +6404,7 @@ fn cmdAstCheck(
};
} else {
const stdin = io.getStdIn();
const source = readSourceFileToEndAlloc(arena, &stdin, null) catch |err| {
const source = std.zig.readSourceFileToEndAlloc(arena, stdin, null) catch |err| {
fatal("unable to read stdin: {}", .{err});
};
file.sub_file_path = "<stdin>";
@@ -6758,7 +6434,7 @@ fn cmdAstCheck(
try Compilation.addZirErrorMessages(&wip_errors, &file);
var error_bundle = try wip_errors.toOwnedBundle("");
defer error_bundle.deinit(gpa);
error_bundle.renderToStdErr(renderOptions(color));
error_bundle.renderToStdErr(color.renderOptions());
process.exit(1);
}
@@ -6889,7 +6565,7 @@ fn cmdChangelist(
const stat = try f.stat();
if (stat.size > max_src_size)
if (stat.size > std.zig.max_src_size)
return error.FileTooBig;
var file: Module.File = .{
@@ -6938,7 +6614,7 @@ fn cmdChangelist(
try Compilation.addZirErrorMessages(&wip_errors, &file);
var error_bundle = try wip_errors.toOwnedBundle("");
defer error_bundle.deinit(gpa);
error_bundle.renderToStdErr(renderOptions(color));
error_bundle.renderToStdErr(color.renderOptions());
process.exit(1);
}
@@ -6949,7 +6625,7 @@ fn cmdChangelist(
const new_stat = try new_f.stat();
if (new_stat.size > max_src_size)
if (new_stat.size > std.zig.max_src_size)
return error.FileTooBig;
const new_source = try arena.allocSentinel(u8, @as(usize, @intCast(new_stat.size)), 0);
@@ -6973,7 +6649,7 @@ fn cmdChangelist(
try Compilation.addZirErrorMessages(&wip_errors, &file);
var error_bundle = try wip_errors.toOwnedBundle("");
defer error_bundle.deinit(gpa);
error_bundle.renderToStdErr(renderOptions(color));
error_bundle.renderToStdErr(color.renderOptions());
process.exit(1);
}
@@ -7241,23 +6917,6 @@ const ClangSearchSanitizer = struct {
};
};
fn get_tty_conf(color: Color) std.io.tty.Config {
return switch (color) {
.auto => std.io.tty.detectConfig(std.io.getStdErr()),
.on => .escape_codes,
.off => .no_color,
};
}
fn renderOptions(color: Color) std.zig.ErrorBundle.RenderOptions {
const ttyconf = get_tty_conf(color);
return .{
.ttyconf = ttyconf,
.include_source_line = ttyconf != .no_color,
.include_reference_trace = ttyconf != .no_color,
};
}
fn accessLibPath(
test_path: *std.ArrayList(u8),
checked_paths: *std.ArrayList(u8),
@@ -7498,7 +7157,7 @@ fn cmdFetch(
if (fetch.error_bundle.root_list.items.len > 0) {
var errors = try fetch.error_bundle.toOwnedBundle("");
errors.renderToStdErr(renderOptions(color));
errors.renderToStdErr(color.renderOptions());
process.exit(1);
}
@@ -7790,7 +7449,7 @@ fn loadManifest(
errdefer ast.deinit(gpa);
if (ast.errors.len > 0) {
try printAstErrorsToStderr(gpa, ast, Package.Manifest.basename, options.color);
try std.zig.printAstErrorsToStderr(gpa, ast, Package.Manifest.basename, options.color);
process.exit(2);
}
@@ -7807,7 +7466,7 @@ fn loadManifest(
var error_bundle = try wip_errors.toOwnedBundle("");
defer error_bundle.deinit(gpa);
error_bundle.renderToStdErr(renderOptions(options.color));
error_bundle.renderToStdErr(options.color.renderOptions());
process.exit(2);
}