Merge pull request #18207 from ziglang/elf-error-handler
elf: report errors for some detected malformed object contents
This commit is contained in:
188
src/link/Elf.zig
188
src/link/Elf.zig
@@ -1041,9 +1041,14 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
|
||||
}
|
||||
|
||||
for (positionals.items) |obj| {
|
||||
var parse_ctx: ParseErrorCtx = .{ .detected_cpu_arch = undefined };
|
||||
self.parsePositional(obj.path, obj.must_link, &parse_ctx) catch |err|
|
||||
try self.handleAndReportParseError(obj.path, err, &parse_ctx);
|
||||
self.parsePositional(obj.path, obj.must_link) catch |err| switch (err) {
|
||||
error.MalformedObject, error.MalformedArchive, error.InvalidCpuArch => continue, // already reported
|
||||
else => |e| try self.reportParseError(
|
||||
obj.path,
|
||||
"unexpected error: parsing input file failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
var system_libs = std.ArrayList(SystemLib).init(arena);
|
||||
@@ -1122,9 +1127,14 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
|
||||
}
|
||||
|
||||
for (system_libs.items) |lib| {
|
||||
var parse_ctx: ParseErrorCtx = .{ .detected_cpu_arch = undefined };
|
||||
self.parseLibrary(lib, false, &parse_ctx) catch |err|
|
||||
try self.handleAndReportParseError(lib.path, err, &parse_ctx);
|
||||
self.parseLibrary(lib, false) catch |err| switch (err) {
|
||||
error.MalformedObject, error.MalformedArchive, error.InvalidCpuArch => continue, // already reported
|
||||
else => |e| try self.reportParseError(
|
||||
lib.path,
|
||||
"unexpected error: parsing library failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
// Finally, as the last input objects we add compiler_rt and CSU postlude (if any).
|
||||
@@ -1140,11 +1150,18 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
|
||||
if (csu.crtn) |v| try positionals.append(.{ .path = v });
|
||||
|
||||
for (positionals.items) |obj| {
|
||||
var parse_ctx: ParseErrorCtx = .{ .detected_cpu_arch = undefined };
|
||||
self.parsePositional(obj.path, obj.must_link, &parse_ctx) catch |err|
|
||||
try self.handleAndReportParseError(obj.path, err, &parse_ctx);
|
||||
self.parsePositional(obj.path, obj.must_link) catch |err| switch (err) {
|
||||
error.MalformedObject, error.MalformedArchive, error.InvalidCpuArch => continue, // already reported
|
||||
else => |e| try self.reportParseError(
|
||||
obj.path,
|
||||
"unexpected error: parsing input file failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (self.misc_errors.items.len > 0) return error.FlushFailure;
|
||||
|
||||
// Init all objects
|
||||
for (self.objects.items) |index| {
|
||||
try self.file(index).?.object.init(self);
|
||||
@@ -1153,6 +1170,8 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
|
||||
try self.file(index).?.shared_object.init(self);
|
||||
}
|
||||
|
||||
if (self.misc_errors.items.len > 0) return error.FlushFailure;
|
||||
|
||||
// Dedup shared objects
|
||||
{
|
||||
var seen_dsos = std.StringHashMap(void).init(gpa);
|
||||
@@ -1279,6 +1298,8 @@ pub fn flushModule(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node
|
||||
self.error_flags.no_entry_point_found = false;
|
||||
try self.writeElfHeader();
|
||||
}
|
||||
|
||||
if (self.misc_errors.items.len > 0) return error.FlushFailure;
|
||||
}
|
||||
|
||||
pub fn flushStaticLib(self: *Elf, comp: *Compilation, module_obj_path: ?[]const u8) link.File.FlushError!void {
|
||||
@@ -1300,11 +1321,18 @@ pub fn flushStaticLib(self: *Elf, comp: *Compilation, module_obj_path: ?[]const
|
||||
if (module_obj_path) |path| try positionals.append(.{ .path = path });
|
||||
|
||||
for (positionals.items) |obj| {
|
||||
var parse_ctx: ParseErrorCtx = .{ .detected_cpu_arch = undefined };
|
||||
self.parsePositional(obj.path, obj.must_link, &parse_ctx) catch |err|
|
||||
try self.handleAndReportParseError(obj.path, err, &parse_ctx);
|
||||
self.parsePositional(obj.path, obj.must_link) catch |err| switch (err) {
|
||||
error.MalformedObject, error.MalformedArchive, error.InvalidCpuArch => continue, // already reported
|
||||
else => |e| try self.reportParseError(
|
||||
obj.path,
|
||||
"unexpected error: parsing input file failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (self.misc_errors.items.len > 0) return error.FlushFailure;
|
||||
|
||||
// First, we flush relocatable object file generated with our backends.
|
||||
if (self.zigObjectPtr()) |zig_object| {
|
||||
zig_object.resolveSymbols(self);
|
||||
@@ -1316,6 +1344,7 @@ pub fn flushStaticLib(self: *Elf, comp: *Compilation, module_obj_path: ?[]const
|
||||
try zig_object.addAtomsToRelaSections(self);
|
||||
try self.updateSectionSizesObject();
|
||||
|
||||
try self.allocateAllocSectionsObject();
|
||||
try self.allocateNonAllocSections();
|
||||
|
||||
if (build_options.enable_logging) {
|
||||
@@ -1325,14 +1354,15 @@ pub fn flushStaticLib(self: *Elf, comp: *Compilation, module_obj_path: ?[]const
|
||||
try self.writeSyntheticSectionsObject();
|
||||
try self.writeShdrTable();
|
||||
try self.writeElfHeader();
|
||||
|
||||
// TODO we can avoid reading in the file contents we just wrote if we give the linker
|
||||
// ability to write directly to a buffer.
|
||||
try zig_object.readFileContents(self);
|
||||
}
|
||||
|
||||
var files = std.ArrayList(File.Index).init(gpa);
|
||||
defer files.deinit();
|
||||
try files.ensureTotalCapacityPrecise(self.objects.items.len + 1);
|
||||
// Note to self: we currently must have ZigObject written out first as we write the object
|
||||
// file into the same file descriptor and then re-read its contents.
|
||||
// TODO implement writing ZigObject to a buffer instead of file.
|
||||
if (self.zigObjectPtr()) |zig_object| files.appendAssumeCapacity(zig_object.index);
|
||||
for (self.objects.items) |index| files.appendAssumeCapacity(index);
|
||||
|
||||
@@ -1353,7 +1383,7 @@ pub fn flushStaticLib(self: *Elf, comp: *Compilation, module_obj_path: ?[]const
|
||||
for (files.items) |index| {
|
||||
const file_ptr = self.file(index).?;
|
||||
try file_ptr.updateArStrtab(gpa, &ar_strtab);
|
||||
file_ptr.updateArSize(self);
|
||||
file_ptr.updateArSize();
|
||||
}
|
||||
|
||||
// Update file offsets of contributing objects.
|
||||
@@ -1405,13 +1435,15 @@ pub fn flushStaticLib(self: *Elf, comp: *Compilation, module_obj_path: ?[]const
|
||||
// Write object files
|
||||
for (files.items) |index| {
|
||||
if (!mem.isAligned(buffer.items.len, 2)) try buffer.writer().writeByte(0);
|
||||
try self.file(index).?.writeAr(self, buffer.writer());
|
||||
try self.file(index).?.writeAr(buffer.writer());
|
||||
}
|
||||
|
||||
assert(buffer.items.len == total_size);
|
||||
|
||||
try self.base.file.?.setEndPos(total_size);
|
||||
try self.base.file.?.pwriteAll(buffer.items, 0);
|
||||
|
||||
if (self.misc_errors.items.len > 0) return error.FlushFailure;
|
||||
}
|
||||
|
||||
pub fn flushObject(self: *Elf, comp: *Compilation, module_obj_path: ?[]const u8) link.File.FlushError!void {
|
||||
@@ -1432,16 +1464,25 @@ pub fn flushObject(self: *Elf, comp: *Compilation, module_obj_path: ?[]const u8)
|
||||
if (module_obj_path) |path| try positionals.append(.{ .path = path });
|
||||
|
||||
for (positionals.items) |obj| {
|
||||
var parse_ctx: ParseErrorCtx = .{ .detected_cpu_arch = undefined };
|
||||
self.parsePositional(obj.path, obj.must_link, &parse_ctx) catch |err|
|
||||
try self.handleAndReportParseError(obj.path, err, &parse_ctx);
|
||||
self.parsePositional(obj.path, obj.must_link) catch |err| switch (err) {
|
||||
error.MalformedObject, error.MalformedArchive, error.InvalidCpuArch => continue, // already reported
|
||||
else => |e| try self.reportParseError(
|
||||
obj.path,
|
||||
"unexpected error: parsing input file failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (self.misc_errors.items.len > 0) return error.FlushFailure;
|
||||
|
||||
// Init all objects
|
||||
for (self.objects.items) |index| {
|
||||
try self.file(index).?.object.init(self);
|
||||
}
|
||||
|
||||
if (self.misc_errors.items.len > 0) return error.FlushFailure;
|
||||
|
||||
// Now, we are ready to resolve the symbols across all input files.
|
||||
// We will first resolve the files in the ZigObject, next in the parsed
|
||||
// input Object files.
|
||||
@@ -1473,6 +1514,8 @@ pub fn flushObject(self: *Elf, comp: *Compilation, module_obj_path: ?[]const u8)
|
||||
try self.writeSyntheticSectionsObject();
|
||||
try self.writeShdrTable();
|
||||
try self.writeElfHeader();
|
||||
|
||||
if (self.misc_errors.items.len > 0) return error.FlushFailure;
|
||||
}
|
||||
|
||||
/// --verbose-link output
|
||||
@@ -1760,7 +1803,8 @@ fn dumpArgv(self: *Elf, comp: *Compilation) !void {
|
||||
}
|
||||
|
||||
const ParseError = error{
|
||||
UnknownFileType,
|
||||
MalformedObject,
|
||||
MalformedArchive,
|
||||
InvalidCpuArch,
|
||||
OutOfMemory,
|
||||
Overflow,
|
||||
@@ -1771,34 +1815,30 @@ const ParseError = error{
|
||||
InvalidCharacter,
|
||||
} || LdScript.Error || std.os.AccessError || std.os.SeekError || std.fs.File.OpenError || std.fs.File.ReadError;
|
||||
|
||||
fn parsePositional(self: *Elf, path: []const u8, must_link: bool, ctx: *ParseErrorCtx) ParseError!void {
|
||||
fn parsePositional(self: *Elf, path: []const u8, must_link: bool) ParseError!void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
if (try Object.isObject(path)) {
|
||||
try self.parseObject(path, ctx);
|
||||
try self.parseObject(path);
|
||||
} else {
|
||||
try self.parseLibrary(.{ .path = path }, must_link, ctx);
|
||||
try self.parseLibrary(.{ .path = path }, must_link);
|
||||
}
|
||||
}
|
||||
|
||||
fn parseLibrary(self: *Elf, lib: SystemLib, must_link: bool, ctx: *ParseErrorCtx) ParseError!void {
|
||||
fn parseLibrary(self: *Elf, lib: SystemLib, must_link: bool) ParseError!void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
if (try Archive.isArchive(lib.path)) {
|
||||
try self.parseArchive(lib.path, must_link, ctx);
|
||||
try self.parseArchive(lib.path, must_link);
|
||||
} else if (try SharedObject.isSharedObject(lib.path)) {
|
||||
try self.parseSharedObject(lib, ctx);
|
||||
try self.parseSharedObject(lib);
|
||||
} else {
|
||||
// TODO if the script has a top-level comment identifying it as GNU ld script,
|
||||
// then report parse errors. Otherwise return UnknownFileType.
|
||||
self.parseLdScript(lib, ctx) catch |err| switch (err) {
|
||||
else => return error.UnknownFileType,
|
||||
};
|
||||
try self.parseLdScript(lib);
|
||||
}
|
||||
}
|
||||
|
||||
fn parseObject(self: *Elf, path: []const u8, ctx: *ParseErrorCtx) ParseError!void {
|
||||
fn parseObject(self: *Elf, path: []const u8) ParseError!void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
@@ -1816,12 +1856,9 @@ fn parseObject(self: *Elf, path: []const u8, ctx: *ParseErrorCtx) ParseError!voi
|
||||
|
||||
const object = self.file(index).?.object;
|
||||
try object.parse(self);
|
||||
|
||||
ctx.detected_cpu_arch = object.header.?.e_machine.toTargetCpuArch().?;
|
||||
if (ctx.detected_cpu_arch != self.base.options.target.cpu.arch) return error.InvalidCpuArch;
|
||||
}
|
||||
|
||||
fn parseArchive(self: *Elf, path: []const u8, must_link: bool, ctx: *ParseErrorCtx) ParseError!void {
|
||||
fn parseArchive(self: *Elf, path: []const u8, must_link: bool) ParseError!void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
@@ -1844,13 +1881,10 @@ fn parseArchive(self: *Elf, path: []const u8, must_link: bool, ctx: *ParseErrorC
|
||||
object.alive = must_link;
|
||||
try object.parse(self);
|
||||
try self.objects.append(gpa, index);
|
||||
|
||||
ctx.detected_cpu_arch = object.header.?.e_machine.toTargetCpuArch().?;
|
||||
if (ctx.detected_cpu_arch != self.base.options.target.cpu.arch) return error.InvalidCpuArch;
|
||||
}
|
||||
}
|
||||
|
||||
fn parseSharedObject(self: *Elf, lib: SystemLib, ctx: *ParseErrorCtx) ParseError!void {
|
||||
fn parseSharedObject(self: *Elf, lib: SystemLib) ParseError!void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
@@ -1870,12 +1904,9 @@ fn parseSharedObject(self: *Elf, lib: SystemLib, ctx: *ParseErrorCtx) ParseError
|
||||
|
||||
const shared_object = self.file(index).?.shared_object;
|
||||
try shared_object.parse(self);
|
||||
|
||||
ctx.detected_cpu_arch = shared_object.header.?.e_machine.toTargetCpuArch().?;
|
||||
if (ctx.detected_cpu_arch != self.base.options.target.cpu.arch) return error.InvalidCpuArch;
|
||||
}
|
||||
|
||||
fn parseLdScript(self: *Elf, lib: SystemLib, ctx: *ParseErrorCtx) ParseError!void {
|
||||
fn parseLdScript(self: *Elf, lib: SystemLib) ParseError!void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
@@ -1885,15 +1916,10 @@ fn parseLdScript(self: *Elf, lib: SystemLib, ctx: *ParseErrorCtx) ParseError!voi
|
||||
const data = try in_file.readToEndAlloc(gpa, std.math.maxInt(u32));
|
||||
defer gpa.free(data);
|
||||
|
||||
var script = LdScript{};
|
||||
var script = LdScript{ .path = lib.path };
|
||||
defer script.deinit(gpa);
|
||||
try script.parse(data, self);
|
||||
|
||||
if (script.cpu_arch) |cpu_arch| {
|
||||
ctx.detected_cpu_arch = cpu_arch;
|
||||
if (ctx.detected_cpu_arch != self.base.options.target.cpu.arch) return error.InvalidCpuArch;
|
||||
}
|
||||
|
||||
const lib_dirs = self.base.options.lib_dirs;
|
||||
|
||||
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
|
||||
@@ -1947,11 +1973,17 @@ fn parseLdScript(self: *Elf, lib: SystemLib, ctx: *ParseErrorCtx) ParseError!voi
|
||||
}
|
||||
|
||||
const full_path = test_path.items;
|
||||
var scr_ctx: ParseErrorCtx = .{ .detected_cpu_arch = undefined };
|
||||
self.parseLibrary(.{
|
||||
.needed = scr_obj.needed,
|
||||
.path = full_path,
|
||||
}, false, &scr_ctx) catch |err| try self.handleAndReportParseError(full_path, err, &scr_ctx);
|
||||
}, false) catch |err| switch (err) {
|
||||
error.MalformedObject, error.MalformedArchive, error.InvalidCpuArch => continue, // already reported
|
||||
else => |e| try self.reportParseError(
|
||||
full_path,
|
||||
"unexpected error: parsing library failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2177,7 +2209,7 @@ fn scanRelocs(self: *Elf) !void {
|
||||
try object.scanRelocs(self, &undefs);
|
||||
}
|
||||
|
||||
try self.reportUndefined(&undefs);
|
||||
try self.reportUndefinedSymbols(&undefs);
|
||||
|
||||
for (self.symbols.items, 0..) |*sym, i| {
|
||||
const index = @as(u32, @intCast(i));
|
||||
@@ -2789,7 +2821,6 @@ fn linkWithLLD(self: *Elf, comp: *Compilation, prog_node: *std.Progress.Node) !v
|
||||
}));
|
||||
} else {
|
||||
self.error_flags.missing_libc = true;
|
||||
return error.FlushFailure;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2945,6 +2976,7 @@ fn writeShdrTable(self: *Elf) !void {
|
||||
defer gpa.free(buf);
|
||||
|
||||
for (buf, 0..) |*shdr, i| {
|
||||
assert(self.shdrs.items[i].sh_offset != math.maxInt(u64));
|
||||
shdr.* = shdrTo32(self.shdrs.items[i]);
|
||||
if (foreign_endian) {
|
||||
mem.byteSwapAllFields(elf.Elf32_Shdr, shdr);
|
||||
@@ -2957,6 +2989,7 @@ fn writeShdrTable(self: *Elf) !void {
|
||||
defer gpa.free(buf);
|
||||
|
||||
for (buf, 0..) |*shdr, i| {
|
||||
assert(self.shdrs.items[i].sh_offset != math.maxInt(u64));
|
||||
shdr.* = self.shdrs.items[i];
|
||||
if (foreign_endian) {
|
||||
mem.byteSwapAllFields(elf.Elf64_Shdr, shdr);
|
||||
@@ -3007,6 +3040,8 @@ fn writePhdrTable(self: *Elf) !void {
|
||||
}
|
||||
|
||||
fn writeElfHeader(self: *Elf) !void {
|
||||
if (self.misc_errors.items.len > 0) return; // We had errors, so skip flushing to render the output unusable
|
||||
|
||||
var hdr_buf: [@sizeOf(elf.Elf64_Ehdr)]u8 = undefined;
|
||||
|
||||
var index: usize = 0;
|
||||
@@ -4740,7 +4775,7 @@ fn writeAtoms(self: *Elf) !void {
|
||||
try self.base.file.?.pwriteAll(buffer, sh_offset);
|
||||
}
|
||||
|
||||
try self.reportUndefined(&undefs);
|
||||
try self.reportUndefinedSymbols(&undefs);
|
||||
}
|
||||
|
||||
fn writeAtomsObject(self: *Elf) !void {
|
||||
@@ -6003,7 +6038,7 @@ pub fn insertDynString(self: *Elf, name: []const u8) error{OutOfMemory}!u32 {
|
||||
return off;
|
||||
}
|
||||
|
||||
fn reportUndefined(self: *Elf, undefs: anytype) !void {
|
||||
fn reportUndefinedSymbols(self: *Elf, undefs: anytype) !void {
|
||||
const gpa = self.base.allocator;
|
||||
const max_notes = 4;
|
||||
|
||||
@@ -6045,33 +6080,7 @@ fn reportMissingLibraryError(
|
||||
}
|
||||
}
|
||||
|
||||
const ParseErrorCtx = struct {
|
||||
detected_cpu_arch: std.Target.Cpu.Arch,
|
||||
};
|
||||
|
||||
fn handleAndReportParseError(
|
||||
self: *Elf,
|
||||
path: []const u8,
|
||||
err: ParseError,
|
||||
ctx: *const ParseErrorCtx,
|
||||
) error{OutOfMemory}!void {
|
||||
const cpu_arch = self.base.options.target.cpu.arch;
|
||||
switch (err) {
|
||||
error.UnknownFileType => try self.reportParseError(path, "unknown file type", .{}),
|
||||
error.InvalidCpuArch => try self.reportParseError(
|
||||
path,
|
||||
"invalid cpu architecture: expected '{s}', but found '{s}'",
|
||||
.{ @tagName(cpu_arch), @tagName(ctx.detected_cpu_arch) },
|
||||
),
|
||||
else => |e| try self.reportParseError(
|
||||
path,
|
||||
"unexpected error: parsing object failed with error {s}",
|
||||
.{@errorName(e)},
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn reportParseError(
|
||||
pub fn reportParseError(
|
||||
self: *Elf,
|
||||
path: []const u8,
|
||||
comptime format: []const u8,
|
||||
@@ -6082,6 +6091,17 @@ fn reportParseError(
|
||||
try err.addNote(self, "while parsing {s}", .{path});
|
||||
}
|
||||
|
||||
pub fn reportParseError2(
|
||||
self: *Elf,
|
||||
file_index: File.Index,
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
var err = try self.addErrorWithNotes(1);
|
||||
try err.addMsg(self, format, args);
|
||||
try err.addNote(self, "while parsing {}", .{self.file(file_index).?.fmtPath()});
|
||||
}
|
||||
|
||||
const FormatShdrCtx = struct {
|
||||
elf_file: *Elf,
|
||||
shdr: elf.Elf64_Shdr,
|
||||
|
||||
@@ -33,12 +33,10 @@ pub fn parse(self: *Archive, elf_file: *Elf) !void {
|
||||
const hdr = try reader.readStruct(elf.ar_hdr);
|
||||
|
||||
if (!mem.eql(u8, &hdr.ar_fmag, elf.ARFMAG)) {
|
||||
// TODO convert into an error
|
||||
log.debug(
|
||||
"{s}: invalid header delimiter: expected '{s}', found '{s}'",
|
||||
.{ self.path, std.fmt.fmtSliceEscapeLower(elf.ARFMAG), std.fmt.fmtSliceEscapeLower(&hdr.ar_fmag) },
|
||||
);
|
||||
return;
|
||||
try elf_file.reportParseError(self.path, "invalid archive header delimiter: {s}", .{
|
||||
std.fmt.fmtSliceEscapeLower(&hdr.ar_fmag),
|
||||
});
|
||||
return error.MalformedArchive;
|
||||
}
|
||||
|
||||
const size = try hdr.size();
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
path: []const u8,
|
||||
cpu_arch: ?std.Target.Cpu.Arch = null,
|
||||
args: std.ArrayListUnmanaged(Elf.SystemLib) = .{},
|
||||
|
||||
@@ -6,7 +7,7 @@ pub fn deinit(scr: *LdScript, allocator: Allocator) void {
|
||||
}
|
||||
|
||||
pub const Error = error{
|
||||
InvalidScript,
|
||||
InvalidLdScript,
|
||||
UnexpectedToken,
|
||||
UnknownCpuArch,
|
||||
OutOfMemory,
|
||||
@@ -30,13 +31,12 @@ pub fn parse(scr: *LdScript, data: []const u8, elf_file: *Elf) Error!void {
|
||||
try line_col.append(.{ .line = line, .column = column });
|
||||
switch (tok.id) {
|
||||
.invalid => {
|
||||
// TODO errors
|
||||
// elf_file.base.fatal("invalid token in ld script: '{s}' ({d}:{d})", .{
|
||||
// tok.get(data),
|
||||
// line,
|
||||
// column,
|
||||
// });
|
||||
return error.InvalidScript;
|
||||
try elf_file.reportParseError(scr.path, "invalid token in LD script: '{s}' ({d}:{d})", .{
|
||||
std.fmt.fmtSliceEscapeLower(tok.get(data)),
|
||||
line,
|
||||
column,
|
||||
});
|
||||
return error.InvalidLdScript;
|
||||
},
|
||||
.new_line => {
|
||||
line += 1;
|
||||
@@ -55,17 +55,16 @@ pub fn parse(scr: *LdScript, data: []const u8, elf_file: *Elf) Error!void {
|
||||
.args = &args,
|
||||
}) catch |err| switch (err) {
|
||||
error.UnexpectedToken => {
|
||||
// const last_token_id = parser.it.pos - 1;
|
||||
// const last_token = parser.it.get(last_token_id);
|
||||
// const lcol = line_col.items[last_token_id];
|
||||
// TODO errors
|
||||
// elf_file.base.fatal("unexpected token in ld script: {s} : '{s}' ({d}:{d})", .{
|
||||
// @tagName(last_token.id),
|
||||
// last_token.get(data),
|
||||
// lcol.line,
|
||||
// lcol.column,
|
||||
// });
|
||||
return error.InvalidScript;
|
||||
const last_token_id = parser.it.pos - 1;
|
||||
const last_token = parser.it.get(last_token_id);
|
||||
const lcol = line_col.items[last_token_id];
|
||||
try elf_file.reportParseError(scr.path, "unexpected token in LD script: {s}: '{s}' ({d}:{d})", .{
|
||||
@tagName(last_token.id),
|
||||
last_token.get(data),
|
||||
lcol.line,
|
||||
lcol.column,
|
||||
});
|
||||
return error.InvalidLdScript;
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
|
||||
@@ -54,10 +54,30 @@ pub fn parse(self: *Object, elf_file: *Elf) !void {
|
||||
|
||||
self.header = try reader.readStruct(elf.Elf64_Ehdr);
|
||||
|
||||
if (elf_file.base.options.target.cpu.arch != self.header.?.e_machine.toTargetCpuArch().?) {
|
||||
try elf_file.reportParseError2(
|
||||
self.index,
|
||||
"invalid cpu architecture: {s}",
|
||||
.{@tagName(self.header.?.e_machine.toTargetCpuArch().?)},
|
||||
);
|
||||
return error.InvalidCpuArch;
|
||||
}
|
||||
|
||||
if (self.header.?.e_shnum == 0) return;
|
||||
|
||||
const gpa = elf_file.base.allocator;
|
||||
|
||||
if (self.data.len < self.header.?.e_shoff or
|
||||
self.data.len < self.header.?.e_shoff + @as(u64, @intCast(self.header.?.e_shnum)) * @sizeOf(elf.Elf64_Shdr))
|
||||
{
|
||||
try elf_file.reportParseError2(
|
||||
self.index,
|
||||
"corrupt header: section header table extends past the end of file",
|
||||
.{},
|
||||
);
|
||||
return error.MalformedObject;
|
||||
}
|
||||
|
||||
const shoff = math.cast(usize, self.header.?.e_shoff) orelse return error.Overflow;
|
||||
const shdrs = @as(
|
||||
[*]align(1) const elf.Elf64_Shdr,
|
||||
@@ -66,10 +86,23 @@ pub fn parse(self: *Object, elf_file: *Elf) !void {
|
||||
try self.shdrs.ensureTotalCapacityPrecise(gpa, shdrs.len);
|
||||
|
||||
for (shdrs) |shdr| {
|
||||
if (shdr.sh_type != elf.SHT_NOBITS) {
|
||||
if (self.data.len < shdr.sh_offset or self.data.len < shdr.sh_offset + shdr.sh_size) {
|
||||
try elf_file.reportParseError2(self.index, "corrupt section: extends past the end of file", .{});
|
||||
return error.MalformedObject;
|
||||
}
|
||||
}
|
||||
self.shdrs.appendAssumeCapacity(try ElfShdr.fromElf64Shdr(shdr));
|
||||
}
|
||||
|
||||
try self.strtab.appendSlice(gpa, self.shdrContents(self.header.?.e_shstrndx));
|
||||
const shstrtab = self.shdrContents(self.header.?.e_shstrndx);
|
||||
for (shdrs) |shdr| {
|
||||
if (shdr.sh_name >= shstrtab.len) {
|
||||
try elf_file.reportParseError2(self.index, "corrupt section name offset", .{});
|
||||
return error.MalformedObject;
|
||||
}
|
||||
}
|
||||
try self.strtab.appendSlice(gpa, shstrtab);
|
||||
|
||||
const symtab_index = for (self.shdrs.items, 0..) |shdr, i| switch (shdr.sh_type) {
|
||||
elf.SHT_SYMTAB => break @as(u16, @intCast(i)),
|
||||
@@ -81,7 +114,10 @@ pub fn parse(self: *Object, elf_file: *Elf) !void {
|
||||
self.first_global = shdr.sh_info;
|
||||
|
||||
const raw_symtab = self.shdrContents(index);
|
||||
const nsyms = @divExact(raw_symtab.len, @sizeOf(elf.Elf64_Sym));
|
||||
const nsyms = math.divExact(usize, raw_symtab.len, @sizeOf(elf.Elf64_Sym)) catch {
|
||||
try elf_file.reportParseError2(self.index, "symbol table not evenly divisible", .{});
|
||||
return error.MalformedObject;
|
||||
};
|
||||
const symtab = @as([*]align(1) const elf.Elf64_Sym, @ptrCast(raw_symtab.ptr))[0..nsyms];
|
||||
|
||||
const strtab_bias = @as(u32, @intCast(self.strtab.items.len));
|
||||
|
||||
@@ -52,6 +52,27 @@ pub fn parse(self: *SharedObject, elf_file: *Elf) !void {
|
||||
const reader = stream.reader();
|
||||
|
||||
self.header = try reader.readStruct(elf.Elf64_Ehdr);
|
||||
|
||||
if (elf_file.base.options.target.cpu.arch != self.header.?.e_machine.toTargetCpuArch().?) {
|
||||
try elf_file.reportParseError2(
|
||||
self.index,
|
||||
"invalid cpu architecture: {s}",
|
||||
.{@tagName(self.header.?.e_machine.toTargetCpuArch().?)},
|
||||
);
|
||||
return error.InvalidCpuArch;
|
||||
}
|
||||
|
||||
if (self.data.len < self.header.?.e_shoff or
|
||||
self.data.len < self.header.?.e_shoff + @as(u64, @intCast(self.header.?.e_shnum)) * @sizeOf(elf.Elf64_Shdr))
|
||||
{
|
||||
try elf_file.reportParseError2(
|
||||
self.index,
|
||||
"corrupted header: section header table extends past the end of file",
|
||||
.{},
|
||||
);
|
||||
return error.MalformedObject;
|
||||
}
|
||||
|
||||
const shoff = std.math.cast(usize, self.header.?.e_shoff) orelse return error.Overflow;
|
||||
|
||||
const shdrs = @as(
|
||||
@@ -61,6 +82,10 @@ pub fn parse(self: *SharedObject, elf_file: *Elf) !void {
|
||||
try self.shdrs.ensureTotalCapacityPrecise(gpa, shdrs.len);
|
||||
|
||||
for (shdrs, 0..) |shdr, i| {
|
||||
if (self.data.len < shdr.sh_offset or self.data.len < shdr.sh_offset + shdr.sh_size) {
|
||||
try elf_file.reportParseError2(self.index, "corrupted section header", .{});
|
||||
return error.MalformedObject;
|
||||
}
|
||||
self.shdrs.appendAssumeCapacity(try ElfShdr.fromElf64Shdr(shdr));
|
||||
switch (shdr.sh_type) {
|
||||
elf.SHT_DYNSYM => self.dynsym_sect_index = @as(u16, @intCast(i)),
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
//! and any relocations that may have been emitted.
|
||||
//! Think about this as fake in-memory Object file for the Zig module.
|
||||
|
||||
data: std.ArrayListUnmanaged(u8) = .{},
|
||||
path: []const u8,
|
||||
index: File.Index,
|
||||
|
||||
@@ -101,6 +102,7 @@ pub fn init(self: *ZigObject, elf_file: *Elf) !void {
|
||||
}
|
||||
|
||||
pub fn deinit(self: *ZigObject, allocator: Allocator) void {
|
||||
self.data.deinit(allocator);
|
||||
allocator.free(self.path);
|
||||
self.local_esyms.deinit(allocator);
|
||||
self.global_esyms.deinit(allocator);
|
||||
@@ -441,6 +443,27 @@ pub fn markLive(self: *ZigObject, elf_file: *Elf) void {
|
||||
}
|
||||
}
|
||||
|
||||
/// This is just a temporary helper function that allows us to re-read what we wrote to file into a buffer.
|
||||
/// We need this so that we can write to an archive.
|
||||
/// TODO implement writing ZigObject data directly to a buffer instead.
|
||||
pub fn readFileContents(self: *ZigObject, elf_file: *Elf) !void {
|
||||
const gpa = elf_file.base.allocator;
|
||||
const shsize: u64 = switch (elf_file.ptr_width) {
|
||||
.p32 => @sizeOf(elf.Elf32_Shdr),
|
||||
.p64 => @sizeOf(elf.Elf64_Shdr),
|
||||
};
|
||||
var end_pos: u64 = elf_file.shdr_table_offset.? + elf_file.shdrs.items.len * shsize;
|
||||
for (elf_file.shdrs.items) |shdr| {
|
||||
if (shdr.sh_type == elf.SHT_NOBITS) continue;
|
||||
end_pos = @max(end_pos, shdr.sh_offset + shdr.sh_size);
|
||||
}
|
||||
const size = std.math.cast(usize, end_pos) orelse return error.Overflow;
|
||||
try self.data.resize(gpa, size);
|
||||
|
||||
const amt = try elf_file.base.file.?.preadAll(self.data.items, 0);
|
||||
if (amt != size) return error.InputOutput;
|
||||
}
|
||||
|
||||
pub fn updateArSymtab(self: ZigObject, ar_symtab: *Archive.ArSymtab, elf_file: *Elf) error{OutOfMemory}!void {
|
||||
const gpa = elf_file.base.allocator;
|
||||
|
||||
@@ -457,34 +480,21 @@ pub fn updateArSymtab(self: ZigObject, ar_symtab: *Archive.ArSymtab, elf_file: *
|
||||
}
|
||||
}
|
||||
|
||||
pub fn updateArSize(self: *ZigObject, elf_file: *Elf) void {
|
||||
var end_pos: u64 = elf_file.shdr_table_offset.?;
|
||||
for (elf_file.shdrs.items) |shdr| {
|
||||
end_pos = @max(end_pos, shdr.sh_offset + shdr.sh_size);
|
||||
}
|
||||
self.output_ar_state.size = end_pos;
|
||||
pub fn updateArSize(self: *ZigObject) void {
|
||||
self.output_ar_state.size = self.data.items.len;
|
||||
}
|
||||
|
||||
pub fn writeAr(self: ZigObject, elf_file: *Elf, writer: anytype) !void {
|
||||
const gpa = elf_file.base.allocator;
|
||||
|
||||
const size = std.math.cast(usize, self.output_ar_state.size) orelse return error.Overflow;
|
||||
const contents = try gpa.alloc(u8, size);
|
||||
defer gpa.free(contents);
|
||||
|
||||
const amt = try elf_file.base.file.?.preadAll(contents, 0);
|
||||
if (amt != self.output_ar_state.size) return error.InputOutput;
|
||||
|
||||
pub fn writeAr(self: ZigObject, writer: anytype) !void {
|
||||
const name = self.path;
|
||||
const hdr = Archive.setArHdr(.{
|
||||
.name = if (name.len <= Archive.max_member_name_len)
|
||||
.{ .name = name }
|
||||
else
|
||||
.{ .name_off = self.output_ar_state.name_off },
|
||||
.size = @intCast(size),
|
||||
.size = @intCast(self.data.items.len),
|
||||
});
|
||||
try writer.writeAll(mem.asBytes(&hdr));
|
||||
try writer.writeAll(contents);
|
||||
try writer.writeAll(self.data.items);
|
||||
}
|
||||
|
||||
pub fn addAtomsToRelaSections(self: ZigObject, elf_file: *Elf) !void {
|
||||
|
||||
@@ -162,17 +162,17 @@ pub const File = union(enum) {
|
||||
state.name_off = try ar_strtab.insert(allocator, path);
|
||||
}
|
||||
|
||||
pub fn updateArSize(file: File, elf_file: *Elf) void {
|
||||
pub fn updateArSize(file: File) void {
|
||||
return switch (file) {
|
||||
.zig_object => |x| x.updateArSize(elf_file),
|
||||
.zig_object => |x| x.updateArSize(),
|
||||
.object => |x| x.updateArSize(),
|
||||
inline else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn writeAr(file: File, elf_file: *Elf, writer: anytype) !void {
|
||||
pub fn writeAr(file: File, writer: anytype) !void {
|
||||
return switch (file) {
|
||||
.zig_object => |x| x.writeAr(elf_file, writer),
|
||||
.zig_object => |x| x.writeAr(writer),
|
||||
.object => |x| x.writeAr(writer),
|
||||
inline else => unreachable,
|
||||
};
|
||||
|
||||
@@ -29,6 +29,7 @@ pub fn testAll(b: *Build) *Step {
|
||||
|
||||
// Exercise linker in ar mode
|
||||
elf_step.dependOn(testEmitStaticLib(b, .{ .target = musl_target }));
|
||||
elf_step.dependOn(testEmitStaticLibZig(b, .{ .use_llvm = false, .target = musl_target }));
|
||||
|
||||
// Exercise linker with self-hosted backend (no LLVM)
|
||||
elf_step.dependOn(testGcSectionsZig(b, .{ .use_llvm = false, .target = default_target }));
|
||||
@@ -743,6 +744,42 @@ fn testEmitStaticLib(b: *Build, opts: Options) *Step {
|
||||
return test_step;
|
||||
}
|
||||
|
||||
fn testEmitStaticLibZig(b: *Build, opts: Options) *Step {
|
||||
const test_step = addTestStep(b, "emit-static-lib-zig", opts);
|
||||
|
||||
const obj1 = addObject(b, "obj1", opts);
|
||||
addZigSourceBytes(obj1,
|
||||
\\export var foo: i32 = 42;
|
||||
\\export var bar: i32 = 2;
|
||||
);
|
||||
|
||||
const lib = addStaticLibrary(b, "lib", opts);
|
||||
addZigSourceBytes(lib,
|
||||
\\extern var foo: i32;
|
||||
\\extern var bar: i32;
|
||||
\\export fn fooBar() i32 {
|
||||
\\ return foo + bar;
|
||||
\\}
|
||||
);
|
||||
lib.addObject(obj1);
|
||||
|
||||
const exe = addExecutable(b, "test", opts);
|
||||
addZigSourceBytes(exe,
|
||||
\\const std = @import("std");
|
||||
\\extern fn fooBar() i32;
|
||||
\\pub fn main() void {
|
||||
\\ std.debug.print("{d}", .{fooBar()});
|
||||
\\}
|
||||
);
|
||||
exe.linkLibrary(lib);
|
||||
|
||||
const run = addRunArtifact(exe);
|
||||
run.expectStdErrEqual("44");
|
||||
test_step.dependOn(&run.step);
|
||||
|
||||
return test_step;
|
||||
}
|
||||
|
||||
fn testEmptyObject(b: *Build, opts: Options) *Step {
|
||||
const test_step = addTestStep(b, "empty-object", opts);
|
||||
|
||||
@@ -1875,7 +1912,7 @@ fn testMismatchedCpuArchitectureError(b: *Build, opts: Options) *Step {
|
||||
exe.linkLibC();
|
||||
|
||||
expectLinkErrors(exe, test_step, .{ .exact = &.{
|
||||
"invalid cpu architecture: expected 'x86_64', but found 'aarch64'",
|
||||
"invalid cpu architecture: aarch64",
|
||||
"note: while parsing /?/a.o",
|
||||
} });
|
||||
|
||||
@@ -3305,10 +3342,10 @@ fn testUnknownFileTypeError(b: *Build, opts: Options) *Step {
|
||||
exe.linkLibC();
|
||||
|
||||
expectLinkErrors(exe, test_step, .{ .exact = &.{
|
||||
"unknown file type",
|
||||
"invalid token in LD script: '\\x00\\x00\\x00\\x0c\\x00\\x00\\x00/usr/lib/dyld\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x0d' (0:829)",
|
||||
"note: while parsing /?/liba.dylib",
|
||||
"unexpected error: parsing input file failed with error InvalidLdScript",
|
||||
"note: while parsing /?/liba.dylib",
|
||||
"undefined symbol: foo",
|
||||
"note: referenced by /?/a.o:.text",
|
||||
} });
|
||||
|
||||
return test_step;
|
||||
|
||||
Reference in New Issue
Block a user