Skip to content

Commit

Permalink
Merge pull request #82 from kubkon/zig-sync
Browse files Browse the repository at this point in the history
update to latest zig
  • Loading branch information
kubkon authored Nov 23, 2023
2 parents 72dae32 + 32a2d1e commit 19ccd5c
Show file tree
Hide file tree
Showing 15 changed files with 39 additions and 40 deletions.
10 changes: 5 additions & 5 deletions build.zig.zon
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,14 @@

.dependencies = .{
.@"zig-yaml" = .{
.url = "https://github.com/kubkon/zig-yaml/archive/2b6de6354b7211abd1cf6c9d22d0b23402613e24.tar.gz",
.hash = "122055ce75eb5bf7dafab5dcb007f945672e10bf89156f2eec4ae0de6e682adb2596",
.url = "https://github.com/kubkon/zig-yaml/archive/1ed4925bed911b73a189526a6ad82bd8c5c2079a.tar.gz",
.hash = "1220f56d186377820d7ad62ee03987acdd53bc24da83e8f6dff571bc7343f789f69a",
},
.@"zig-dis-x86_64" = .{
.url = "https://github.com/kubkon/zig-dis-x86_64/archive/7b27010130df6763f813d2439e075e45327ac745.tar.gz",
.hash = "122047272f21ffb6e2577dc000c3bea9377049252c4bdcf6c56a4d56ad59b1ae6280",
.url = "https://github.com/kubkon/zig-dis-x86_64/archive/a9155631990aa6d56fa06fddef304cabb94a0681.tar.gz",
.hash = "1220a4d63ba372f6b5a0fc262f863572dc119727b469f6ccf527ad91790e353bb0f0",
},
},

.paths = .{ "" },
.paths = .{""},
}
2 changes: 1 addition & 1 deletion src/Elf.zig
Original file line number Diff line number Diff line change
Expand Up @@ -2124,7 +2124,7 @@ fn writeAtoms(self: *Elf) !void {

log.debug("writing atoms in '{s}' section", .{self.shstrtab.getAssumeExists(shdr.sh_name)});

var buffer = try self.base.allocator.alloc(u8, shdr.sh_size);
const buffer = try self.base.allocator.alloc(u8, shdr.sh_size);
defer self.base.allocator.free(buffer);
const padding_byte: u8 = if (shdr.sh_type == elf.SHT_PROGBITS and
shdr.sh_flags & elf.SHF_EXECINSTR != 0)
Expand Down
2 changes: 1 addition & 1 deletion src/Elf/eh_frame.zig
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,7 @@ pub const Iterator = struct {
var stream = std.io.fixedBufferStream(it.data[it.pos..]);
const reader = stream.reader();

var size = try reader.readInt(u32, .little);
const size = try reader.readInt(u32, .little);
if (size == 0xFFFFFFFF) @panic("TODO");

const id = try reader.readInt(u32, .little);
Expand Down
4 changes: 2 additions & 2 deletions src/MachO.zig
Original file line number Diff line number Diff line change
Expand Up @@ -388,7 +388,7 @@ pub fn flush(self: *MachO) !void {
const size = linkedit.fileoff - start;
if (size > 0) {
log.debug("zeroing out zerofill area of length {x} at {x}", .{ size, start });
var padding = try self.base.allocator.alloc(u8, size);
const padding = try self.base.allocator.alloc(u8, size);
defer self.base.allocator.free(padding);
@memset(padding, 0);
try self.base.file.pwriteAll(padding, start);
Expand Down Expand Up @@ -3006,7 +3006,7 @@ fn writeDyldInfoData(self: *MachO) !void {
link_seg.filesize = needed_size;
assert(mem.isAlignedGeneric(u64, link_seg.fileoff + link_seg.filesize, @alignOf(u64)));

var buffer = try gpa.alloc(u8, needed_size);
const buffer = try gpa.alloc(u8, needed_size);
defer gpa.free(buffer);
@memset(buffer, 0);

Expand Down
6 changes: 3 additions & 3 deletions src/MachO/Archive.zig
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ pub fn parse(self: *Archive, allocator: Allocator, reader: anytype, macho_file:
self.header = try reader.readStruct(ar_hdr);

const name_or_length = try self.header.nameOrLength();
var embedded_name = try parseName(allocator, name_or_length, reader);
const embedded_name = try parseName(allocator, name_or_length, reader);
log.debug("parsing archive '{s}' at '{s}'", .{ embedded_name, self.name });
defer allocator.free(embedded_name);

Expand All @@ -138,7 +138,7 @@ fn parseName(allocator: Allocator, name_or_length: ar_hdr.NameOrLength, reader:

fn parseTableOfContents(self: *Archive, allocator: Allocator, reader: anytype, macho_file: *MachO) !void {
const symtab_size = try reader.readInt(u32, .little);
var symtab = try allocator.alloc(u8, symtab_size);
const symtab = try allocator.alloc(u8, symtab_size);
defer allocator.free(symtab);

reader.readNoEof(symtab) catch return macho_file.base.fatal(
Expand All @@ -147,7 +147,7 @@ fn parseTableOfContents(self: *Archive, allocator: Allocator, reader: anytype, m
);

const strtab_size = try reader.readInt(u32, .little);
var strtab = try allocator.alloc(u8, strtab_size);
const strtab = try allocator.alloc(u8, strtab_size);
defer allocator.free(strtab);

reader.readNoEof(strtab) catch return macho_file.base.fatal(
Expand Down
6 changes: 3 additions & 3 deletions src/MachO/Dylib.zig
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ pub fn parseFromBinary(
.REEXPORT_DYLIB => {
if (should_lookup_reexports) {
// Parse install_name to dependent dylib.
var id = try Id.fromLoadCommand(
const id = try Id.fromLoadCommand(
allocator,
cmd.cast(macho.dylib_command).?,
cmd.getDylibPathName(),
Expand Down Expand Up @@ -442,7 +442,7 @@ pub fn parseFromStub(

log.debug(" (found re-export '{s}')", .{lib});

var dep_id = try Id.default(allocator, lib);
const dep_id = try Id.default(allocator, lib);
try dependent_libs.writeItem(.{ .id = dep_id, .parent = dylib_id });
}
}
Expand Down Expand Up @@ -559,7 +559,7 @@ pub fn parseFromStub(

log.debug(" (found re-export '{s}')", .{lib});

var dep_id = try Id.default(allocator, lib);
const dep_id = try Id.default(allocator, lib);
try dependent_libs.writeItem(.{ .id = dep_id, .parent = dylib_id });
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/MachO/Object.zig
Original file line number Diff line number Diff line change
Expand Up @@ -888,7 +888,7 @@ pub fn getSourceSectionIndexByName(self: Object, segname: []const u8, sectname:
} else return null;
}

pub fn getSourceSections(self: Object) []const macho.section_64 {
pub fn getSourceSections(self: Object) []align(1) const macho.section_64 {
var it = LoadCommandIterator{
.ncmds = self.header.ncmds,
.buffer = self.contents[@sizeOf(macho.mach_header_64)..][0..self.header.sizeofcmds],
Expand Down
20 changes: 10 additions & 10 deletions src/MachO/Trie.zig
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ pub const Node = struct {
// To: A -> C -> B
const mid = try allocator.create(Node);
mid.* = .{ .base = self.base };
var to_label = try allocator.dupe(u8, edge.label[match..]);
const to_label = try allocator.dupe(u8, edge.label[match..]);
allocator.free(edge.label);
const to_node = edge.to;
edge.to = mid;
Expand Down Expand Up @@ -242,7 +242,7 @@ pub const Node = struct {
/// Updates offset of this node in the output byte stream.
fn finalize(self: *Node, offset_in_trie: u64) !FinalizeResult {
var stream = std.io.countingWriter(std.io.null_writer);
var writer = stream.writer();
const writer = stream.writer();

var node_size: u64 = 0;
if (self.terminal_info) |info| {
Expand Down Expand Up @@ -392,7 +392,7 @@ pub fn deinit(self: *Trie, allocator: Allocator) void {
}

test "Trie node count" {
var gpa = testing.allocator;
const gpa = testing.allocator;
var trie: Trie = .{};
defer trie.deinit(gpa);
try trie.init(gpa);
Expand Down Expand Up @@ -438,7 +438,7 @@ test "Trie node count" {
}

test "Trie basic" {
var gpa = testing.allocator;
const gpa = testing.allocator;
var trie: Trie = .{};
defer trie.deinit(gpa);
try trie.init(gpa);
Expand Down Expand Up @@ -496,7 +496,7 @@ fn expectEqualHexStrings(expected: []const u8, given: []const u8) !void {
const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(given)});
defer testing.allocator.free(given_fmt);
const idx = mem.indexOfDiff(u8, expected_fmt, given_fmt).?;
var padding = try testing.allocator.alloc(u8, idx + 5);
const padding = try testing.allocator.alloc(u8, idx + 5);
defer testing.allocator.free(padding);
@memset(padding, ' ');
std.debug.print("\nEXP: {s}\nGIV: {s}\n{s}^ -- first differing byte\n", .{ expected_fmt, given_fmt, padding });
Expand Down Expand Up @@ -534,7 +534,7 @@ test "write Trie to a byte stream" {
0x3, 0x0, 0x80, 0x20, 0x0, // terminal node
};

var buffer = try gpa.alloc(u8, trie.size);
const buffer = try gpa.alloc(u8, trie.size);
defer gpa.free(buffer);
var stream = std.io.fixedBufferStream(buffer);
{
Expand All @@ -550,7 +550,7 @@ test "write Trie to a byte stream" {
}

test "parse Trie from byte stream" {
var gpa = testing.allocator;
const gpa = testing.allocator;

const in_buffer = [_]u8{
0x0, 0x1, // node root
Expand All @@ -573,15 +573,15 @@ test "parse Trie from byte stream" {

try trie.finalize(gpa);

var out_buffer = try gpa.alloc(u8, trie.size);
const out_buffer = try gpa.alloc(u8, trie.size);
defer gpa.free(out_buffer);
var out_stream = std.io.fixedBufferStream(out_buffer);
_ = try trie.write(out_stream.writer());
try expectEqualHexStrings(&in_buffer, out_buffer);
}

test "ordering bug" {
var gpa = testing.allocator;
const gpa = testing.allocator;
var trie: Trie = .{};
defer trie.deinit(gpa);
try trie.init(gpa);
Expand All @@ -605,7 +605,7 @@ test "ordering bug" {
0x00, 0x12, 0x03, 0x00, 0xD8, 0x0A, 0x00,
};

var buffer = try gpa.alloc(u8, trie.size);
const buffer = try gpa.alloc(u8, trie.size);
defer gpa.free(buffer);
var stream = std.io.fixedBufferStream(buffer);
// Writing finalized trie again should yield the same result.
Expand Down
2 changes: 1 addition & 1 deletion src/MachO/UnwindInfo.zig
Original file line number Diff line number Diff line change
Expand Up @@ -441,7 +441,7 @@ pub fn collect(info: *UnwindInfo, macho_file: *MachO) !void {
gop.value_ptr.count += 1;
}

var slice = common_encodings_counts.values();
const slice = common_encodings_counts.values();
mem.sort(CommonEncWithCount, slice, {}, CommonEncWithCount.greaterThan);

var i: u7 = 0;
Expand Down
2 changes: 1 addition & 1 deletion src/MachO/eh_frame.zig
Original file line number Diff line number Diff line change
Expand Up @@ -600,7 +600,7 @@ pub const Iterator = struct {
var stream = std.io.fixedBufferStream(it.data[it.pos..]);
const reader = stream.reader();

var size = try reader.readInt(u32, .little);
const size = try reader.readInt(u32, .little);
if (size == 0xFFFFFFFF) {
macho_file.base.fatal("MachO doesn't support 64bit DWARF CFI __eh_frame records", .{});
return null;
Expand Down
2 changes: 1 addition & 1 deletion src/MachO/load_commands.zig
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ pub fn calcMinHeaderPad(macho_file: *MachO) !u64 {
log.debug("minimum requested headerpad size 0x{x}", .{padding + @sizeOf(macho.mach_header_64)});

if (options.headerpad_max_install_names) {
var min_headerpad_size: u32 = try calcLCsSize(macho_file, true);
const min_headerpad_size: u32 = try calcLCsSize(macho_file, true);
log.debug("headerpad_max_install_names minimum headerpad size 0x{x}", .{
min_headerpad_size + @sizeOf(macho.mach_header_64),
});
Expand Down
2 changes: 1 addition & 1 deletion src/Wasm.zig
Original file line number Diff line number Diff line change
Expand Up @@ -674,7 +674,7 @@ fn resolveSymbolsInArchives(wasm: *Wasm) !void {
// Parse object and and resolve symbols again before we check remaining
// undefined symbols.
const object_file_index = @as(u16, @intCast(wasm.objects.items.len));
var object = try archive.parseObject(wasm.base.allocator, offset.items[0]);
const object = try archive.parseObject(wasm.base.allocator, offset.items[0]);
try wasm.objects.append(wasm.base.allocator, object);
try wasm.resolveSymbolsInObject(object_file_index);

Expand Down
15 changes: 7 additions & 8 deletions src/Wasm/Object.zig
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ fn checkLegacyIndirectFunctionTable(object: *Object) !?Symbol {
return error.MissingTableSymbols;
}

var table_import: types.Import = for (object.imports) |imp| {
const table_import: types.Import = for (object.imports) |imp| {
if (imp.kind == .table) {
break imp;
}
Expand Down Expand Up @@ -532,7 +532,7 @@ fn Parser(comptime ReaderType: type) type {
try assertEnd(reader);
},
.code => {
var start = reader.context.bytes_left;
const start = reader.context.bytes_left;
var index: u32 = 0;
const count = try readLeb(u32, reader);
const imported_function_count = parser.object.importedCountByKind(.function);
Expand All @@ -556,7 +556,7 @@ fn Parser(comptime ReaderType: type) type {
try parser.object.relocatable_data.put(gpa, .code, try relocatable_data.toOwnedSlice());
},
.data => {
var start = reader.context.bytes_left;
const start = reader.context.bytes_left;
var index: u32 = 0;
const count = try readLeb(u32, reader);
var relocatable_data = try std.ArrayList(RelocatableData).initCapacity(gpa, count);
Expand Down Expand Up @@ -861,11 +861,10 @@ fn ElementType(comptime ptr: type) type {
/// signedness of the given type `T`.
/// Asserts `T` is an integer.
fn readLeb(comptime T: type, reader: anytype) !T {
if (comptime std.meta.trait.isSignedInt(T)) {
return try leb.readILEB128(T, reader);
} else {
return try leb.readULEB128(T, reader);
}
return switch (@typeInfo(T).Int.signedness) {
.signed => try leb.readILEB128(T, reader),
.unsigned => try leb.readULEB128(T, reader),
};
}

/// Reads an enum type from the given reader.
Expand Down
2 changes: 1 addition & 1 deletion src/Wasm/emit_wasm.zig
Original file line number Diff line number Diff line change
Expand Up @@ -554,7 +554,7 @@ fn emitExport(exported: std.wasm.Export, writer: anytype) !void {

fn emitElement(wasm: *Wasm, writer: anytype) !void {
// passive, with implicit 0-index table
var flags: u32 = 0;
const flags: u32 = 0;
try leb.writeULEB128(writer, flags);
// Start the function table at index 1
try emitInitExpression(.{ .i32_const = 1 }, writer);
Expand Down
2 changes: 1 addition & 1 deletion test/elf.zig
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ pub fn addElfTests(b: *Build, options: common.Options) *Step {

if (builtin.target.ofmt != .elf) return skipTestStep(elf_step);

var opts = Options{
const opts = Options{
.zld = options.zld,
.is_musl = options.is_musl,
.has_zig = options.has_zig,
Expand Down

0 comments on commit 19ccd5c

Please sign in to comment.