mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 06:13:07 +00:00
(wip) update wasm linker to new Writer API
This commit is contained in:
parent
3280fc98f3
commit
168da23d8f
@ -4,6 +4,7 @@ const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const leb = std.leb;
|
||||
const Writer = std.Io.Writer;
|
||||
|
||||
const Wasm = link.File.Wasm;
|
||||
const Mir = @import("Mir.zig");
|
||||
@ -14,16 +15,16 @@ const codegen = @import("../../codegen.zig");
|
||||
|
||||
mir: Mir,
|
||||
wasm: *Wasm,
|
||||
/// The binary representation that will be emitted by this module.
|
||||
code: *std.ArrayListUnmanaged(u8),
|
||||
/// The binary representation of this module is written here.
|
||||
writer: *Writer,
|
||||
|
||||
pub const Error = error{
|
||||
OutOfMemory,
|
||||
WriteFailed,
|
||||
};
|
||||
|
||||
pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
const mir = &emit.mir;
|
||||
const code = emit.code;
|
||||
const writer = emit.writer;
|
||||
const wasm = emit.wasm;
|
||||
const comp = wasm.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
@ -41,18 +42,19 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
},
|
||||
.block, .loop => {
|
||||
const block_type = datas[inst].block_type;
|
||||
try code.ensureUnusedCapacity(gpa, 2);
|
||||
code.appendAssumeCapacity(@intFromEnum(tags[inst]));
|
||||
code.appendAssumeCapacity(@intFromEnum(block_type));
|
||||
try writer.writeAll(&.{
|
||||
@intFromEnum(tags[inst]),
|
||||
@intFromEnum(block_type),
|
||||
});
|
||||
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
.uav_ref => {
|
||||
if (is_obj) {
|
||||
try uavRefObj(wasm, code, datas[inst].ip_index, 0, is_wasm32);
|
||||
try uavRefObj(wasm, writer, datas[inst].ip_index, 0, is_wasm32);
|
||||
} else {
|
||||
try uavRefExe(wasm, code, datas[inst].ip_index, 0, is_wasm32);
|
||||
try uavRefExe(wasm, writer, datas[inst].ip_index, 0, is_wasm32);
|
||||
}
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
@ -60,20 +62,20 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
.uav_ref_off => {
|
||||
const extra = mir.extraData(Mir.UavRefOff, datas[inst].payload).data;
|
||||
if (is_obj) {
|
||||
try uavRefObj(wasm, code, extra.value, extra.offset, is_wasm32);
|
||||
try uavRefObj(wasm, writer, extra.value, extra.offset, is_wasm32);
|
||||
} else {
|
||||
try uavRefExe(wasm, code, extra.value, extra.offset, is_wasm32);
|
||||
try uavRefExe(wasm, writer, extra.value, extra.offset, is_wasm32);
|
||||
}
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
.nav_ref => {
|
||||
try navRefOff(wasm, code, .{ .nav_index = datas[inst].nav_index, .offset = 0 }, is_wasm32);
|
||||
try navRefOff(wasm, writer, .{ .nav_index = datas[inst].nav_index, .offset = 0 }, is_wasm32);
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
.nav_ref_off => {
|
||||
try navRefOff(wasm, code, mir.extraData(Mir.NavRefOff, datas[inst].payload).data, is_wasm32);
|
||||
try navRefOff(wasm, writer, mir.extraData(Mir.NavRefOff, datas[inst].payload).data, is_wasm32);
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
@ -81,11 +83,11 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
const indirect_func_idx: Wasm.ZcuIndirectFunctionSetIndex = @enumFromInt(
|
||||
wasm.zcu_indirect_function_set.getIndex(datas[inst].nav_index).?,
|
||||
);
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i32_const));
|
||||
try writer.writeByte(@intFromEnum(std.wasm.Opcode.i32_const));
|
||||
if (is_obj) {
|
||||
@panic("TODO");
|
||||
} else {
|
||||
leb.writeUleb128(code.fixedWriter(), 1 + @intFromEnum(indirect_func_idx)) catch unreachable;
|
||||
try writer.writeLeb128(1 + @intFromEnum(indirect_func_idx));
|
||||
}
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
@ -95,52 +97,48 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
.errors_len => {
|
||||
try code.ensureUnusedCapacity(gpa, 6);
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i32_const));
|
||||
try writer.writeByte(@intFromEnum(std.wasm.Opcode.i32_const));
|
||||
// MIR is lowered during flush, so there is indeed only one thread at this time.
|
||||
const errors_len = 1 + comp.zcu.?.intern_pool.global_error_set.getNamesFromMainThread().len;
|
||||
leb.writeIleb128(code.fixedWriter(), errors_len) catch unreachable;
|
||||
const errors_len: u32 = @intCast(1 + comp.zcu.?.intern_pool.global_error_set.getNamesFromMainThread().len);
|
||||
try writer.writeLeb128(@as(i32, @bitCast(errors_len)));
|
||||
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
.error_name_table_ref => {
|
||||
wasm.error_name_table_ref_count += 1;
|
||||
try code.ensureUnusedCapacity(gpa, 11);
|
||||
const opcode: std.wasm.Opcode = if (is_wasm32) .i32_const else .i64_const;
|
||||
code.appendAssumeCapacity(@intFromEnum(opcode));
|
||||
try writer.writeByte(@intFromEnum(opcode));
|
||||
if (is_obj) {
|
||||
try wasm.out_relocs.append(gpa, .{
|
||||
.offset = @intCast(code.items.len),
|
||||
.offset = @intCast(writer.count),
|
||||
.pointee = .{ .symbol_index = try wasm.errorNameTableSymbolIndex() },
|
||||
.tag = if (is_wasm32) .memory_addr_leb else .memory_addr_leb64,
|
||||
.addend = 0,
|
||||
});
|
||||
code.appendNTimesAssumeCapacity(0, if (is_wasm32) 5 else 10);
|
||||
try writer.splatByteAll(0, if (is_wasm32) 5 else 10);
|
||||
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
} else {
|
||||
const addr: u32 = wasm.errorNameTableAddr();
|
||||
leb.writeIleb128(code.fixedWriter(), addr) catch unreachable;
|
||||
try writer.writeLeb128(@as(i32, @bitCast(addr)));
|
||||
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
}
|
||||
},
|
||||
.br_if, .br, .memory_grow, .memory_size => {
|
||||
try code.ensureUnusedCapacity(gpa, 11);
|
||||
code.appendAssumeCapacity(@intFromEnum(tags[inst]));
|
||||
leb.writeUleb128(code.fixedWriter(), datas[inst].label) catch unreachable;
|
||||
try writer.writeByte(@intFromEnum(tags[inst]));
|
||||
try writer.writeLeb128(datas[inst].label);
|
||||
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
|
||||
.local_get, .local_set, .local_tee => {
|
||||
try code.ensureUnusedCapacity(gpa, 11);
|
||||
code.appendAssumeCapacity(@intFromEnum(tags[inst]));
|
||||
leb.writeUleb128(code.fixedWriter(), datas[inst].local) catch unreachable;
|
||||
try writer.writeByte(@intFromEnum(tags[inst]));
|
||||
try writer.writeLeb128(datas[inst].local);
|
||||
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
@ -150,29 +148,27 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
const extra_index = datas[inst].payload;
|
||||
const extra = mir.extraData(Mir.JumpTable, extra_index);
|
||||
const labels = mir.extra[extra.end..][0..extra.data.length];
|
||||
try code.ensureUnusedCapacity(gpa, 11 + 10 * labels.len);
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.br_table));
|
||||
try writer.writeByte(@intFromEnum(std.wasm.Opcode.br_table));
|
||||
// -1 because default label is not part of length/depth.
|
||||
leb.writeUleb128(code.fixedWriter(), extra.data.length - 1) catch unreachable;
|
||||
for (labels) |label| leb.writeUleb128(code.fixedWriter(), label) catch unreachable;
|
||||
try writer.writeLeb128(extra.data.length - 1);
|
||||
for (labels) |label| try writer.writeLeb128(label);
|
||||
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
|
||||
.call_nav => {
|
||||
try code.ensureUnusedCapacity(gpa, 6);
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.call));
|
||||
try writer.writeByte(@intFromEnum(std.wasm.Opcode.call));
|
||||
if (is_obj) {
|
||||
try wasm.out_relocs.append(gpa, .{
|
||||
.offset = @intCast(code.items.len),
|
||||
.offset = @intCast(writer.count),
|
||||
.pointee = .{ .symbol_index = try wasm.navSymbolIndex(datas[inst].nav_index) },
|
||||
.tag = .function_index_leb,
|
||||
.addend = 0,
|
||||
});
|
||||
code.appendNTimesAssumeCapacity(0, 5);
|
||||
try writer.splatByteAll(0, 5);
|
||||
} else {
|
||||
appendOutputFunctionIndex(code, .fromIpNav(wasm, datas[inst].nav_index));
|
||||
try appendOutputFunctionIndex(writer, .fromIpNav(wasm, datas[inst].nav_index));
|
||||
}
|
||||
|
||||
inst += 1;
|
||||
@ -180,7 +176,6 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
},
|
||||
|
||||
.call_indirect => {
|
||||
try code.ensureUnusedCapacity(gpa, 11);
|
||||
const fn_info = comp.zcu.?.typeToFunc(.fromInterned(datas[inst].ip_index)).?;
|
||||
const func_ty_index = wasm.getExistingFunctionType(
|
||||
fn_info.cc,
|
||||
@ -188,38 +183,37 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
.fromInterned(fn_info.return_type),
|
||||
target,
|
||||
).?;
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.call_indirect));
|
||||
try writer.writeByte(@intFromEnum(std.wasm.Opcode.call_indirect));
|
||||
if (is_obj) {
|
||||
try wasm.out_relocs.append(gpa, .{
|
||||
.offset = @intCast(code.items.len),
|
||||
.offset = @intCast(writer.count),
|
||||
.pointee = .{ .type_index = func_ty_index },
|
||||
.tag = .type_index_leb,
|
||||
.addend = 0,
|
||||
});
|
||||
code.appendNTimesAssumeCapacity(0, 5);
|
||||
try writer.splatByteAll(0, 5);
|
||||
} else {
|
||||
const index: Wasm.Flush.FuncTypeIndex = .fromTypeIndex(func_ty_index, &wasm.flush_buffer);
|
||||
leb.writeUleb128(code.fixedWriter(), @intFromEnum(index)) catch unreachable;
|
||||
try writer.writeLeb128(@intFromEnum(index));
|
||||
}
|
||||
leb.writeUleb128(code.fixedWriter(), @as(u32, 0)) catch unreachable; // table index
|
||||
try writer.writeUleb128(0); // table index
|
||||
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
|
||||
.call_tag_name => {
|
||||
try code.ensureUnusedCapacity(gpa, 6);
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.call));
|
||||
try writer.writeByte(@intFromEnum(std.wasm.Opcode.call));
|
||||
if (is_obj) {
|
||||
try wasm.out_relocs.append(gpa, .{
|
||||
.offset = @intCast(code.items.len),
|
||||
.offset = @intCast(writer.count),
|
||||
.pointee = .{ .symbol_index = try wasm.tagNameSymbolIndex(datas[inst].ip_index) },
|
||||
.tag = .function_index_leb,
|
||||
.addend = 0,
|
||||
});
|
||||
code.appendNTimesAssumeCapacity(0, 5);
|
||||
try writer.splatByteAll(0, 5);
|
||||
} else {
|
||||
appendOutputFunctionIndex(code, .fromTagNameType(wasm, datas[inst].ip_index));
|
||||
try appendOutputFunctionIndex(writer, .fromTagNameType(wasm, datas[inst].ip_index));
|
||||
}
|
||||
|
||||
inst += 1;
|
||||
@ -232,18 +226,17 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
// table initialized based on the `Mir.Intrinsic` enum.
|
||||
const symbol_name = try wasm.internString(@tagName(datas[inst].intrinsic));
|
||||
|
||||
try code.ensureUnusedCapacity(gpa, 6);
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.call));
|
||||
try writer.writeByte(@intFromEnum(std.wasm.Opcode.call));
|
||||
if (is_obj) {
|
||||
try wasm.out_relocs.append(gpa, .{
|
||||
.offset = @intCast(code.items.len),
|
||||
.offset = @intCast(writer.count),
|
||||
.pointee = .{ .symbol_index = try wasm.symbolNameIndex(symbol_name) },
|
||||
.tag = .function_index_leb,
|
||||
.addend = 0,
|
||||
});
|
||||
code.appendNTimesAssumeCapacity(0, 5);
|
||||
try writer.splatByteAll(0, 5);
|
||||
} else {
|
||||
appendOutputFunctionIndex(code, .fromSymbolName(wasm, symbol_name));
|
||||
try appendOutputFunctionIndex(writer, .fromSymbolName(wasm, symbol_name));
|
||||
}
|
||||
|
||||
inst += 1;
|
||||
@ -251,19 +244,17 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
},
|
||||
|
||||
.global_set_sp => {
|
||||
try code.ensureUnusedCapacity(gpa, 6);
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.global_set));
|
||||
try writer.writeByte(@intFromEnum(std.wasm.Opcode.global_set));
|
||||
if (is_obj) {
|
||||
try wasm.out_relocs.append(gpa, .{
|
||||
.offset = @intCast(code.items.len),
|
||||
.offset = @intCast(writer.count),
|
||||
.pointee = .{ .symbol_index = try wasm.stackPointerSymbolIndex() },
|
||||
.tag = .global_index_leb,
|
||||
.addend = 0,
|
||||
});
|
||||
code.appendNTimesAssumeCapacity(0, 5);
|
||||
try writer.splatByteAll(0, 5);
|
||||
} else {
|
||||
const sp_global: Wasm.GlobalIndex = .stack_pointer;
|
||||
std.leb.writeUleb128(code.fixedWriter(), @intFromEnum(sp_global)) catch unreachable;
|
||||
try writer.writeLeb128(@intFromEnum(Wasm.GlobalIndex.stack_pointer));
|
||||
}
|
||||
|
||||
inst += 1;
|
||||
@ -271,36 +262,32 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
},
|
||||
|
||||
.f32_const => {
|
||||
try code.ensureUnusedCapacity(gpa, 5);
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.f32_const));
|
||||
std.mem.writeInt(u32, code.addManyAsArrayAssumeCapacity(4), @bitCast(datas[inst].float32), .little);
|
||||
try writer.writeByte(@intFromEnum(std.wasm.Opcode.f32_const));
|
||||
try writer.writeInt(u32, @bitCast(datas[inst].float32), .little);
|
||||
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
|
||||
.f64_const => {
|
||||
try code.ensureUnusedCapacity(gpa, 9);
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.f64_const));
|
||||
try writer.writeByte(@intFromEnum(std.wasm.Opcode.f64_const));
|
||||
const float64 = mir.extraData(Mir.Float64, datas[inst].payload).data;
|
||||
std.mem.writeInt(u64, code.addManyAsArrayAssumeCapacity(8), float64.toInt(), .little);
|
||||
try writer.writeInt(u64, float64.toInt(), .little);
|
||||
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
.i32_const => {
|
||||
try code.ensureUnusedCapacity(gpa, 6);
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i32_const));
|
||||
leb.writeIleb128(code.fixedWriter(), datas[inst].imm32) catch unreachable;
|
||||
try writer.writeByte(@intFromEnum(std.wasm.Opcode.i32_const));
|
||||
try writer.writeLeb128(datas[inst].imm32);
|
||||
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
.i64_const => {
|
||||
try code.ensureUnusedCapacity(gpa, 11);
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i64_const));
|
||||
try writer.writeByte(@intFromEnum(std.wasm.Opcode.i64_const));
|
||||
const int64: i64 = @bitCast(mir.extraData(Mir.Imm64, datas[inst].payload).data.toInt());
|
||||
leb.writeIleb128(code.fixedWriter(), int64) catch unreachable;
|
||||
try writer.writeLeb128(int64);
|
||||
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
@ -330,9 +317,8 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
.i64_store16,
|
||||
.i64_store32,
|
||||
=> {
|
||||
try code.ensureUnusedCapacity(gpa, 1 + 20);
|
||||
code.appendAssumeCapacity(@intFromEnum(tags[inst]));
|
||||
encodeMemArg(code, mir.extraData(Mir.MemArg, datas[inst].payload).data);
|
||||
try writer.writeByte(@intFromEnum(tags[inst]));
|
||||
try encodeMemArg(writer, mir.extraData(Mir.MemArg, datas[inst].payload).data);
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
@ -466,43 +452,42 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
.i64_clz,
|
||||
.i64_ctz,
|
||||
=> {
|
||||
try code.append(gpa, @intFromEnum(tags[inst]));
|
||||
try writer.writeByte(@intFromEnum(tags[inst]));
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
|
||||
.misc_prefix => {
|
||||
try code.ensureUnusedCapacity(gpa, 6 + 6);
|
||||
const extra_index = datas[inst].payload;
|
||||
const opcode = mir.extra[extra_index];
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.misc_prefix));
|
||||
leb.writeUleb128(code.fixedWriter(), opcode) catch unreachable;
|
||||
switch (@as(std.wasm.MiscOpcode, @enumFromInt(opcode))) {
|
||||
const opcode: std.wasm.MiscOpcode = @enumFromInt(mir.extra[extra_index]);
|
||||
try writer.writeByte(@intFromEnum(std.wasm.Opcode.misc_prefix));
|
||||
try writer.writeLeb128(@intFromEnum(opcode));
|
||||
switch (opcode) {
|
||||
// bulk-memory opcodes
|
||||
.data_drop => {
|
||||
const segment = mir.extra[extra_index + 1];
|
||||
leb.writeUleb128(code.fixedWriter(), segment) catch unreachable;
|
||||
try writer.writeLeb128(segment);
|
||||
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
.memory_init => {
|
||||
const segment = mir.extra[extra_index + 1];
|
||||
leb.writeUleb128(code.fixedWriter(), segment) catch unreachable;
|
||||
leb.writeUleb128(code.fixedWriter(), @as(u32, 0)) catch unreachable; // memory index
|
||||
try writer.writeLeb128(segment);
|
||||
try writer.writeByte(0); // memory index
|
||||
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
.memory_fill => {
|
||||
leb.writeUleb128(code.fixedWriter(), @as(u32, 0)) catch unreachable; // memory index
|
||||
try writer.writeByte(0); // memory index
|
||||
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
.memory_copy => {
|
||||
leb.writeUleb128(code.fixedWriter(), @as(u32, 0)) catch unreachable; // dst memory index
|
||||
leb.writeUleb128(code.fixedWriter(), @as(u32, 0)) catch unreachable; // src memory index
|
||||
try writer.writeByte(0); // dst memory index
|
||||
try writer.writeByte(0); // src memory index
|
||||
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
@ -534,12 +519,11 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
comptime unreachable;
|
||||
},
|
||||
.simd_prefix => {
|
||||
try code.ensureUnusedCapacity(gpa, 6 + 20);
|
||||
const extra_index = datas[inst].payload;
|
||||
const opcode = mir.extra[extra_index];
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.simd_prefix));
|
||||
leb.writeUleb128(code.fixedWriter(), opcode) catch unreachable;
|
||||
switch (@as(std.wasm.SimdOpcode, @enumFromInt(opcode))) {
|
||||
const opcode: std.wasm.SimdOpcode = @enumFromInt(mir.extra[extra_index]);
|
||||
try writer.writeByte(@intFromEnum(std.wasm.Opcode.simd_prefix));
|
||||
try writer.writeLeb128(@intFromEnum(opcode));
|
||||
switch (opcode) {
|
||||
.v128_store,
|
||||
.v128_load,
|
||||
.v128_load8_splat,
|
||||
@ -547,12 +531,12 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
.v128_load32_splat,
|
||||
.v128_load64_splat,
|
||||
=> {
|
||||
encodeMemArg(code, mir.extraData(Mir.MemArg, extra_index + 1).data);
|
||||
try encodeMemArg(writer, mir.extraData(Mir.MemArg, extra_index + 1).data);
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
.v128_const, .i8x16_shuffle => {
|
||||
code.appendSliceAssumeCapacity(std.mem.asBytes(mir.extra[extra_index + 1 ..][0..4]));
|
||||
try writer.writeAll(std.mem.asBytes(mir.extra[extra_index + 1 ..][0..4]));
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
@ -571,7 +555,7 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
.f64x2_extract_lane,
|
||||
.f64x2_replace_lane,
|
||||
=> {
|
||||
code.appendAssumeCapacity(@intCast(mir.extra[extra_index + 1]));
|
||||
try writer.writeByte(@intCast(mir.extra[extra_index + 1]));
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
@ -819,13 +803,11 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
comptime unreachable;
|
||||
},
|
||||
.atomics_prefix => {
|
||||
try code.ensureUnusedCapacity(gpa, 6 + 20);
|
||||
|
||||
const extra_index = datas[inst].payload;
|
||||
const opcode = mir.extra[extra_index];
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.atomics_prefix));
|
||||
leb.writeUleb128(code.fixedWriter(), opcode) catch unreachable;
|
||||
switch (@as(std.wasm.AtomicsOpcode, @enumFromInt(opcode))) {
|
||||
const opcode: std.wasm.AtomicsOpcode = @enumFromInt(mir.extra[extra_index]);
|
||||
try writer.writeByte(@intFromEnum(std.wasm.Opcode.atomics_prefix));
|
||||
try writer.writeLeb128(@intFromEnum(opcode));
|
||||
switch (opcode) {
|
||||
.i32_atomic_load,
|
||||
.i64_atomic_load,
|
||||
.i32_atomic_load8_u,
|
||||
@ -892,15 +874,12 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
.i64_atomic_rmw32_cmpxchg_u,
|
||||
=> {
|
||||
const mem_arg = mir.extraData(Mir.MemArg, extra_index + 1).data;
|
||||
encodeMemArg(code, mem_arg);
|
||||
try encodeMemArg(writer, mem_arg);
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
.atomic_fence => {
|
||||
// Hard-codes memory index 0 since multi-memory proposal is
|
||||
// not yet accepted nor implemented.
|
||||
const memory_index: u32 = 0;
|
||||
leb.writeUleb128(code.fixedWriter(), memory_index) catch unreachable;
|
||||
try writer.writeByte(0); // memory index
|
||||
inst += 1;
|
||||
continue :loop tags[inst];
|
||||
},
|
||||
@ -915,44 +894,36 @@ pub fn lowerToCode(emit: *Emit) Error!void {
|
||||
}
|
||||
|
||||
/// Asserts 20 unused capacity.
|
||||
fn encodeMemArg(code: *std.ArrayListUnmanaged(u8), mem_arg: Mir.MemArg) void {
|
||||
assert(code.unusedCapacitySlice().len >= 20);
|
||||
// Wasm encodes alignment as power of 2, rather than natural alignment.
|
||||
const encoded_alignment = @ctz(mem_arg.alignment);
|
||||
leb.writeUleb128(code.fixedWriter(), encoded_alignment) catch unreachable;
|
||||
leb.writeUleb128(code.fixedWriter(), mem_arg.offset) catch unreachable;
|
||||
fn encodeMemArg(writer: *Writer, mem_arg: Mir.MemArg) Writer.Error!void {
|
||||
try writer.writeLeb128(Wasm.Alignment.fromNonzeroByteUnits(mem_arg.alignment).toLog2Units());
|
||||
try writer.writeLeb128(mem_arg.offset);
|
||||
}
|
||||
|
||||
fn uavRefObj(wasm: *Wasm, code: *std.ArrayListUnmanaged(u8), value: InternPool.Index, offset: i32, is_wasm32: bool) !void {
|
||||
fn uavRefObj(wasm: *Wasm, writer: *Writer, value: InternPool.Index, offset: i32, is_wasm32: bool) Writer.Error!void {
|
||||
const comp = wasm.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
const opcode: std.wasm.Opcode = if (is_wasm32) .i32_const else .i64_const;
|
||||
|
||||
try code.ensureUnusedCapacity(gpa, 11);
|
||||
code.appendAssumeCapacity(@intFromEnum(opcode));
|
||||
try writer.writeByte(@intFromEnum(opcode));
|
||||
|
||||
try wasm.out_relocs.append(gpa, .{
|
||||
.offset = @intCast(code.items.len),
|
||||
.offset = @intCast(writer.count),
|
||||
.pointee = .{ .symbol_index = try wasm.uavSymbolIndex(value) },
|
||||
.tag = if (is_wasm32) .memory_addr_leb else .memory_addr_leb64,
|
||||
.addend = offset,
|
||||
});
|
||||
code.appendNTimesAssumeCapacity(0, if (is_wasm32) 5 else 10);
|
||||
try writer.splatByteAll(0, if (is_wasm32) 5 else 10);
|
||||
}
|
||||
|
||||
fn uavRefExe(wasm: *Wasm, code: *std.ArrayListUnmanaged(u8), value: InternPool.Index, offset: i32, is_wasm32: bool) !void {
|
||||
const comp = wasm.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
fn uavRefExe(wasm: *Wasm, writer: *Writer, value: InternPool.Index, offset: i32, is_wasm32: bool) !void {
|
||||
const opcode: std.wasm.Opcode = if (is_wasm32) .i32_const else .i64_const;
|
||||
|
||||
try code.ensureUnusedCapacity(gpa, 11);
|
||||
code.appendAssumeCapacity(@intFromEnum(opcode));
|
||||
try writer.writeByte(@intFromEnum(opcode));
|
||||
|
||||
const addr = wasm.uavAddr(value);
|
||||
leb.writeUleb128(code.fixedWriter(), @as(u32, @intCast(@as(i64, addr) + offset))) catch unreachable;
|
||||
try writer.writeLeb128(@as(u32, @intCast(@as(i64, addr) + offset)));
|
||||
}
|
||||
|
||||
fn navRefOff(wasm: *Wasm, code: *std.ArrayListUnmanaged(u8), data: Mir.NavRefOff, is_wasm32: bool) !void {
|
||||
fn navRefOff(wasm: *Wasm, writer: *Writer, data: Mir.NavRefOff, is_wasm32: bool) !void {
|
||||
const comp = wasm.base.comp;
|
||||
const zcu = comp.zcu.?;
|
||||
const ip = &zcu.intern_pool;
|
||||
@ -961,24 +932,22 @@ fn navRefOff(wasm: *Wasm, code: *std.ArrayListUnmanaged(u8), data: Mir.NavRefOff
|
||||
const nav_ty = ip.getNav(data.nav_index).typeOf(ip);
|
||||
assert(!ip.isFunctionType(nav_ty));
|
||||
|
||||
try code.ensureUnusedCapacity(gpa, 11);
|
||||
|
||||
const opcode: std.wasm.Opcode = if (is_wasm32) .i32_const else .i64_const;
|
||||
code.appendAssumeCapacity(@intFromEnum(opcode));
|
||||
try writer.writeByte(@intFromEnum(opcode));
|
||||
if (is_obj) {
|
||||
try wasm.out_relocs.append(gpa, .{
|
||||
.offset = @intCast(code.items.len),
|
||||
.offset = @intCast(writer.count),
|
||||
.pointee = .{ .symbol_index = try wasm.navSymbolIndex(data.nav_index) },
|
||||
.tag = if (is_wasm32) .memory_addr_leb else .memory_addr_leb64,
|
||||
.addend = data.offset,
|
||||
});
|
||||
code.appendNTimesAssumeCapacity(0, if (is_wasm32) 5 else 10);
|
||||
try writer.splatByteAll(0, if (is_wasm32) 5 else 10);
|
||||
} else {
|
||||
const addr = wasm.navAddr(data.nav_index);
|
||||
leb.writeUleb128(code.fixedWriter(), @as(u32, @intCast(@as(i64, addr) + data.offset))) catch unreachable;
|
||||
try writer.writeLeb128(@as(i32, @bitCast(@as(u32, @intCast(@as(i64, addr) + data.offset)))));
|
||||
}
|
||||
}
|
||||
|
||||
fn appendOutputFunctionIndex(code: *std.ArrayListUnmanaged(u8), i: Wasm.OutputFunctionIndex) void {
|
||||
leb.writeUleb128(code.fixedWriter(), @intFromEnum(i)) catch unreachable;
|
||||
fn appendOutputFunctionIndex(writer: *Writer, i: Wasm.OutputFunctionIndex) Writer.Error!void {
|
||||
return writer.writeLeb128(@intFromEnum(i));
|
||||
}
|
||||
|
||||
@ -669,16 +669,14 @@ pub fn deinit(mir: *Mir, gpa: std.mem.Allocator) void {
|
||||
mir.* = undefined;
|
||||
}
|
||||
|
||||
pub fn lower(mir: *const Mir, wasm: *Wasm, code: *std.ArrayListUnmanaged(u8)) std.mem.Allocator.Error!void {
|
||||
const gpa = wasm.base.comp.gpa;
|
||||
|
||||
pub fn lower(mir: *const Mir, wasm: *Wasm, writer: *std.Io.Writer) std.Io.Writer.Error!void {
|
||||
// Write the locals in the prologue of the function body.
|
||||
try code.ensureUnusedCapacity(gpa, 5 + mir.locals.len * 6 + 38);
|
||||
_ = try writer.writableSliceGreedy(5 + mir.locals.len * 6 + 38);
|
||||
|
||||
std.leb.writeUleb128(code.fixedWriter(), @as(u32, @intCast(mir.locals.len))) catch unreachable;
|
||||
writer.writeLeb128(@as(u32, @intCast(mir.locals.len))) catch unreachable;
|
||||
for (mir.locals) |local| {
|
||||
std.leb.writeUleb128(code.fixedWriter(), @as(u32, 1)) catch unreachable;
|
||||
code.appendAssumeCapacity(@intFromEnum(local));
|
||||
writer.writeLeb128(@as(u32, 1)) catch unreachable;
|
||||
writer.writeByte(@intFromEnum(local)) catch unreachable;
|
||||
}
|
||||
|
||||
// Stack management section of function prologue.
|
||||
@ -686,37 +684,37 @@ pub fn lower(mir: *const Mir, wasm: *Wasm, code: *std.ArrayListUnmanaged(u8)) st
|
||||
if (stack_alignment.toByteUnits()) |align_bytes| {
|
||||
const sp_global: Wasm.GlobalIndex = .stack_pointer;
|
||||
// load stack pointer
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.global_get));
|
||||
std.leb.writeUleb128(code.fixedWriter(), @intFromEnum(sp_global)) catch unreachable;
|
||||
writer.writeByte(@intFromEnum(std.wasm.Opcode.global_get)) catch unreachable;
|
||||
writer.writeLeb128(@intFromEnum(sp_global)) catch unreachable;
|
||||
// store stack pointer so we can restore it when we return from the function
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.local_tee));
|
||||
leb.writeUleb128(code.fixedWriter(), mir.prologue.sp_local) catch unreachable;
|
||||
writer.writeByte(@intFromEnum(std.wasm.Opcode.local_tee)) catch unreachable;
|
||||
writer.writeLeb128(mir.prologue.sp_local) catch unreachable;
|
||||
// get the total stack size
|
||||
const aligned_stack: i32 = @intCast(stack_alignment.forward(mir.prologue.stack_size));
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i32_const));
|
||||
leb.writeIleb128(code.fixedWriter(), aligned_stack) catch unreachable;
|
||||
writer.writeByte(@intFromEnum(std.wasm.Opcode.i32_const)) catch unreachable;
|
||||
writer.writeLeb128(aligned_stack) catch unreachable;
|
||||
// subtract it from the current stack pointer
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i32_sub));
|
||||
writer.writeByte(@intFromEnum(std.wasm.Opcode.i32_sub)) catch unreachable;
|
||||
// Get negative stack alignment
|
||||
const neg_stack_align = @as(i32, @intCast(align_bytes)) * -1;
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i32_const));
|
||||
leb.writeIleb128(code.fixedWriter(), neg_stack_align) catch unreachable;
|
||||
writer.writeByte(@intFromEnum(std.wasm.Opcode.i32_const)) catch unreachable;
|
||||
writer.writeLeb128(neg_stack_align) catch unreachable;
|
||||
// Bitwise-and the value to get the new stack pointer to ensure the
|
||||
// pointers are aligned with the abi alignment.
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.i32_and));
|
||||
writer.writeByte(@intFromEnum(std.wasm.Opcode.i32_and)) catch unreachable;
|
||||
// The bottom will be used to calculate all stack pointer offsets.
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.local_tee));
|
||||
leb.writeUleb128(code.fixedWriter(), mir.prologue.bottom_stack_local) catch unreachable;
|
||||
writer.writeByte(@intFromEnum(std.wasm.Opcode.local_tee)) catch unreachable;
|
||||
writer.writeLeb128(mir.prologue.bottom_stack_local) catch unreachable;
|
||||
// Store the current stack pointer value into the global stack pointer so other function calls will
|
||||
// start from this value instead and not overwrite the current stack.
|
||||
code.appendAssumeCapacity(@intFromEnum(std.wasm.Opcode.global_set));
|
||||
std.leb.writeUleb128(code.fixedWriter(), @intFromEnum(sp_global)) catch unreachable;
|
||||
writer.writeByte(@intFromEnum(std.wasm.Opcode.global_set)) catch unreachable;
|
||||
writer.writeLeb128(@intFromEnum(sp_global)) catch unreachable;
|
||||
}
|
||||
|
||||
var emit: Emit = .{
|
||||
.mir = mir.*,
|
||||
.wasm = wasm,
|
||||
.code = code,
|
||||
.writer = writer,
|
||||
};
|
||||
try emit.lowerToCode();
|
||||
}
|
||||
|
||||
@ -28,6 +28,7 @@ const fs = std.fs;
|
||||
const leb = std.leb;
|
||||
const log = std.log.scoped(.link);
|
||||
const mem = std.mem;
|
||||
const Writer = std.Io.Writer;
|
||||
|
||||
const Mir = @import("../arch/wasm/Mir.zig");
|
||||
const CodeGen = @import("../arch/wasm/CodeGen.zig");
|
||||
@ -2087,11 +2088,9 @@ pub const Expr = enum(u32) {
|
||||
pub const end = @intFromEnum(std.wasm.Opcode.end);
|
||||
|
||||
pub fn slice(index: Expr, wasm: *const Wasm) [:end]const u8 {
|
||||
const start_slice = wasm.string_bytes.items[@intFromEnum(index)..];
|
||||
const end_pos = Object.exprEndPos(start_slice, 0) catch |err| switch (err) {
|
||||
error.InvalidInitOpcode => unreachable,
|
||||
};
|
||||
return start_slice[0..end_pos :end];
|
||||
var r: std.Io.Reader = .fixed(wasm.string_bytes.items[@intFromEnum(index)..]);
|
||||
Object.skipInit(&r) catch unreachable;
|
||||
return r.buffered()[0 .. r.seek - 1 :end];
|
||||
}
|
||||
};
|
||||
|
||||
@ -2126,7 +2125,7 @@ pub const FunctionType = extern struct {
|
||||
wasm: *const Wasm,
|
||||
ft: FunctionType,
|
||||
|
||||
pub fn format(self: Formatter, writer: *std.io.Writer) std.io.Writer.Error!void {
|
||||
pub fn format(self: Formatter, writer: *Writer) Writer.Error!void {
|
||||
const params = self.ft.params.slice(self.wasm);
|
||||
const returns = self.ft.returns.slice(self.wasm);
|
||||
|
||||
@ -2905,7 +2904,7 @@ pub const Feature = packed struct(u8) {
|
||||
@"=",
|
||||
};
|
||||
|
||||
pub fn format(feature: Feature, writer: *std.io.Writer) std.io.Writer.Error!void {
|
||||
pub fn format(feature: Feature, writer: *Writer) Writer.Error!void {
|
||||
try writer.print("{s} {s}", .{ @tagName(feature.prefix), @tagName(feature.tag) });
|
||||
}
|
||||
|
||||
@ -3037,16 +3036,16 @@ fn parseObject(wasm: *Wasm, obj: link.Input.Object) !void {
|
||||
const stat = try obj.file.stat();
|
||||
const size = std.math.cast(usize, stat.size) orelse return error.FileTooBig;
|
||||
|
||||
const file_contents = try gpa.alloc(u8, size);
|
||||
defer gpa.free(file_contents);
|
||||
var br: std.Io.Reader = .fixed(try gpa.alloc(u8, size));
|
||||
defer gpa.free(br.buffered());
|
||||
|
||||
const n = try obj.file.preadAll(file_contents, 0);
|
||||
if (n != file_contents.len) return error.UnexpectedEndOfFile;
|
||||
const n = try obj.file.preadAll(br.buffered(), 0);
|
||||
if (n != br.bufferedLen()) return error.UnexpectedEndOfFile;
|
||||
|
||||
var ss: Object.ScratchSpace = .{};
|
||||
defer ss.deinit(gpa);
|
||||
|
||||
const object = try Object.parse(wasm, file_contents, obj.path, null, wasm.object_host_name, &ss, obj.must_link, gc_sections);
|
||||
const object = try Object.parse(wasm, &br, obj.path, null, wasm.object_host_name, &ss, obj.must_link, gc_sections);
|
||||
wasm.objects.appendAssumeCapacity(object);
|
||||
}
|
||||
|
||||
|
||||
@ -167,9 +167,8 @@ pub fn parseObject(
|
||||
};
|
||||
|
||||
const object_file_size = try header.parsedSize();
|
||||
const contents = file_contents[object_offset + @sizeOf(Header) ..][0..object_file_size];
|
||||
|
||||
return Object.parse(wasm, contents, path, object_name, host_name, scratch_space, must_link, gc_sections);
|
||||
var r: std.io.Reader = .fixed(file_contents[object_offset + @sizeOf(Header) ..][0..object_file_size]);
|
||||
return Object.parse(wasm, &r, path, object_name, host_name, scratch_space, must_link, gc_sections);
|
||||
}
|
||||
|
||||
const Archive = @This();
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -8,6 +8,7 @@ const Allocator = std.mem.Allocator;
|
||||
const Path = std.Build.Cache.Path;
|
||||
const log = std.log.scoped(.object);
|
||||
const assert = std.debug.assert;
|
||||
const Reader = std.Io.Reader;
|
||||
|
||||
/// Wasm spec version used for this `Object`
|
||||
version: u32,
|
||||
@ -252,25 +253,21 @@ pub const ScratchSpace = struct {
|
||||
|
||||
pub fn parse(
|
||||
wasm: *Wasm,
|
||||
bytes: []const u8,
|
||||
br: *Reader,
|
||||
path: Path,
|
||||
archive_member_name: ?[]const u8,
|
||||
host_name: Wasm.OptionalString,
|
||||
ss: *ScratchSpace,
|
||||
must_link: bool,
|
||||
gc_sections: bool,
|
||||
) anyerror!Object {
|
||||
) !Object {
|
||||
const comp = wasm.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
const diags = &comp.link_diags;
|
||||
|
||||
var pos: usize = 0;
|
||||
if (!std.mem.eql(u8, try br.takeArray(std.wasm.magic.len), &std.wasm.magic)) return error.BadObjectMagic;
|
||||
|
||||
if (!std.mem.eql(u8, bytes[0..std.wasm.magic.len], &std.wasm.magic)) return error.BadObjectMagic;
|
||||
pos += std.wasm.magic.len;
|
||||
|
||||
const version = std.mem.readInt(u32, bytes[pos..][0..4], .little);
|
||||
pos += 4;
|
||||
const version = try br.takeInt(u32, .little);
|
||||
|
||||
const data_segment_start: u32 = @intCast(wasm.object_data_segments.items.len);
|
||||
const custom_segment_start: u32 = @intCast(wasm.object_custom_segments.entries.len);
|
||||
@ -298,200 +295,187 @@ pub fn parse(
|
||||
var code_section_index: ?Wasm.ObjectSectionIndex = null;
|
||||
var global_section_index: ?Wasm.ObjectSectionIndex = null;
|
||||
var data_section_index: ?Wasm.ObjectSectionIndex = null;
|
||||
while (pos < bytes.len) : (wasm.object_total_sections += 1) {
|
||||
while (br.takeEnum(std.wasm.Section, .little)) |section_tag| : (wasm.object_total_sections += 1) {
|
||||
const section_index: Wasm.ObjectSectionIndex = @enumFromInt(wasm.object_total_sections);
|
||||
|
||||
const section_tag: std.wasm.Section = @enumFromInt(bytes[pos]);
|
||||
pos += 1;
|
||||
|
||||
const len, pos = readLeb(u32, bytes, pos);
|
||||
const section_end = pos + len;
|
||||
const len = try br.takeLeb128(u32);
|
||||
const section_end = br.seek + len;
|
||||
switch (section_tag) {
|
||||
.custom => {
|
||||
const section_name, pos = readBytes(bytes, pos);
|
||||
const section_name = try br.take(try br.takeLeb128(u32));
|
||||
if (std.mem.eql(u8, section_name, "linking")) {
|
||||
saw_linking_section = true;
|
||||
const section_version, pos = readLeb(u32, bytes, pos);
|
||||
const section_version = try br.takeLeb128(u32);
|
||||
log.debug("link meta data version: {d}", .{section_version});
|
||||
if (section_version != 2) return error.UnsupportedVersion;
|
||||
while (pos < section_end) {
|
||||
const sub_type, pos = readLeb(u8, bytes, pos);
|
||||
log.debug("found subsection: {s}", .{@tagName(@as(SubsectionType, @enumFromInt(sub_type)))});
|
||||
const payload_len, pos = readLeb(u32, bytes, pos);
|
||||
while (br.seek < section_end) {
|
||||
const sub_type = try br.takeEnum(SubsectionType, .little);
|
||||
log.debug("found subsection: {s}", .{@tagName(sub_type)});
|
||||
const payload_len = try br.takeLeb128(u32);
|
||||
if (payload_len == 0) break;
|
||||
|
||||
const count, pos = readLeb(u32, bytes, pos);
|
||||
|
||||
switch (@as(SubsectionType, @enumFromInt(sub_type))) {
|
||||
.segment_info => {
|
||||
for (try ss.segment_info.addManyAsSlice(gpa, count)) |*segment| {
|
||||
const name, pos = readBytes(bytes, pos);
|
||||
const alignment, pos = readLeb(u32, bytes, pos);
|
||||
const flags_u32, pos = readLeb(u32, bytes, pos);
|
||||
const flags: SegmentInfo.Flags = @bitCast(flags_u32);
|
||||
const tls = flags.tls or
|
||||
// Supports legacy object files that specified
|
||||
// being TLS by the name instead of the TLS flag.
|
||||
std.mem.startsWith(u8, name, ".tdata") or
|
||||
std.mem.startsWith(u8, name, ".tbss");
|
||||
has_tls = has_tls or tls;
|
||||
segment.* = .{
|
||||
.name = try wasm.internString(name),
|
||||
.flags = .{
|
||||
.strings = flags.strings,
|
||||
.tls = tls,
|
||||
.alignment = @enumFromInt(alignment),
|
||||
.retain = flags.retain,
|
||||
},
|
||||
};
|
||||
}
|
||||
const count = try br.takeLeb128(u32);
|
||||
switch (sub_type) {
|
||||
.segment_info => for (try ss.segment_info.addManyAsSlice(gpa, count)) |*segment| {
|
||||
const name = try br.take(try br.takeLeb128(u32));
|
||||
const alignment: Alignment = .fromLog2Units(try br.takeLeb128(u32));
|
||||
const flags: SegmentInfo.Flags = @bitCast(try br.takeLeb128(u32));
|
||||
const tls = flags.tls or
|
||||
// Supports legacy object files that specified
|
||||
// being TLS by the name instead of the TLS flag.
|
||||
std.mem.startsWith(u8, name, ".tdata") or
|
||||
std.mem.startsWith(u8, name, ".tbss");
|
||||
has_tls = has_tls or tls;
|
||||
segment.* = .{
|
||||
.name = try wasm.internString(name),
|
||||
.flags = .{
|
||||
.strings = flags.strings,
|
||||
.tls = tls,
|
||||
.alignment = alignment,
|
||||
.retain = flags.retain,
|
||||
},
|
||||
};
|
||||
},
|
||||
.init_funcs => {
|
||||
for (try wasm.object_init_funcs.addManyAsSlice(gpa, count)) |*func| {
|
||||
const priority, pos = readLeb(u32, bytes, pos);
|
||||
const symbol_index, pos = readLeb(u32, bytes, pos);
|
||||
if (symbol_index > ss.symbol_table.items.len)
|
||||
return diags.failParse(path, "init_funcs before symbol table", .{});
|
||||
const sym = &ss.symbol_table.items[symbol_index];
|
||||
if (sym.pointee != .function) {
|
||||
return diags.failParse(path, "init_func symbol '{s}' not a function", .{
|
||||
sym.name.slice(wasm).?,
|
||||
});
|
||||
} else if (sym.flags.undefined) {
|
||||
return diags.failParse(path, "init_func symbol '{s}' is an import", .{
|
||||
sym.name.slice(wasm).?,
|
||||
});
|
||||
}
|
||||
func.* = .{
|
||||
.priority = priority,
|
||||
.function_index = sym.pointee.function,
|
||||
};
|
||||
.init_funcs => for (try wasm.object_init_funcs.addManyAsSlice(gpa, count)) |*func| {
|
||||
const priority = try br.takeLeb128(u32);
|
||||
const symbol_index = try br.takeLeb128(u32);
|
||||
if (symbol_index > ss.symbol_table.items.len)
|
||||
return diags.failParse(path, "init_funcs before symbol table", .{});
|
||||
const sym = &ss.symbol_table.items[symbol_index];
|
||||
if (sym.pointee != .function) {
|
||||
return diags.failParse(path, "init_func symbol '{s}' not a function", .{
|
||||
sym.name.slice(wasm).?,
|
||||
});
|
||||
} else if (sym.flags.undefined) {
|
||||
return diags.failParse(path, "init_func symbol '{s}' is an import", .{
|
||||
sym.name.slice(wasm).?,
|
||||
});
|
||||
}
|
||||
func.* = .{
|
||||
.priority = priority,
|
||||
.function_index = sym.pointee.function,
|
||||
};
|
||||
},
|
||||
.comdat_info => {
|
||||
for (try wasm.object_comdats.addManyAsSlice(gpa, count)) |*comdat| {
|
||||
const name, pos = readBytes(bytes, pos);
|
||||
const flags, pos = readLeb(u32, bytes, pos);
|
||||
if (flags != 0) return error.UnexpectedComdatFlags;
|
||||
const symbol_count, pos = readLeb(u32, bytes, pos);
|
||||
const start_off: u32 = @intCast(wasm.object_comdat_symbols.len);
|
||||
try wasm.object_comdat_symbols.ensureUnusedCapacity(gpa, symbol_count);
|
||||
for (0..symbol_count) |_| {
|
||||
const kind, pos = readEnum(Wasm.Comdat.Symbol.Type, bytes, pos);
|
||||
const index, pos = readLeb(u32, bytes, pos);
|
||||
if (true) @panic("TODO rebase index depending on kind");
|
||||
wasm.object_comdat_symbols.appendAssumeCapacity(.{
|
||||
.kind = kind,
|
||||
.index = index,
|
||||
});
|
||||
}
|
||||
comdat.* = .{
|
||||
.name = try wasm.internString(name),
|
||||
.flags = flags,
|
||||
.symbols = .{
|
||||
.off = start_off,
|
||||
.len = @intCast(wasm.object_comdat_symbols.len - start_off),
|
||||
},
|
||||
};
|
||||
.comdat_info => for (try wasm.object_comdats.addManyAsSlice(gpa, count)) |*comdat| {
|
||||
const name = try br.take(try br.takeLeb128(u32));
|
||||
const flags = try br.takeLeb128(u32);
|
||||
if (flags != 0) return error.UnexpectedComdatFlags;
|
||||
const symbol_count = try br.takeLeb128(u32);
|
||||
const start_off: u32 = @intCast(wasm.object_comdat_symbols.len);
|
||||
try wasm.object_comdat_symbols.ensureUnusedCapacity(gpa, symbol_count);
|
||||
for (0..symbol_count) |_| {
|
||||
const kind = try br.takeEnum(Wasm.Comdat.Symbol.Type, .little);
|
||||
const index = try br.takeLeb128(u32);
|
||||
if (true) @panic("TODO rebase index depending on kind");
|
||||
wasm.object_comdat_symbols.appendAssumeCapacity(.{
|
||||
.kind = kind,
|
||||
.index = index,
|
||||
});
|
||||
}
|
||||
comdat.* = .{
|
||||
.name = try wasm.internString(name),
|
||||
.flags = flags,
|
||||
.symbols = .{
|
||||
.off = start_off,
|
||||
.len = @intCast(wasm.object_comdat_symbols.len - start_off),
|
||||
},
|
||||
};
|
||||
},
|
||||
.symbol_table => {
|
||||
for (try ss.symbol_table.addManyAsSlice(gpa, count)) |*symbol| {
|
||||
const tag, pos = readEnum(Symbol.Tag, bytes, pos);
|
||||
const flags, pos = readLeb(u32, bytes, pos);
|
||||
symbol.* = .{
|
||||
.flags = @bitCast(flags),
|
||||
.name = .none,
|
||||
.pointee = undefined,
|
||||
};
|
||||
symbol.flags.initZigSpecific(must_link, gc_sections);
|
||||
.symbol_table => for (try ss.symbol_table.addManyAsSlice(gpa, count)) |*symbol| {
|
||||
const tag = try br.takeEnum(Symbol.Tag, .little);
|
||||
const flags: Wasm.SymbolFlags = @bitCast(try br.takeLeb128(u32));
|
||||
symbol.* = .{
|
||||
.flags = flags,
|
||||
.name = .none,
|
||||
.pointee = undefined,
|
||||
};
|
||||
symbol.flags.initZigSpecific(must_link, gc_sections);
|
||||
|
||||
switch (tag) {
|
||||
.data => {
|
||||
const name, pos = readBytes(bytes, pos);
|
||||
const interned_name = try wasm.internString(name);
|
||||
symbol.name = interned_name.toOptional();
|
||||
if (symbol.flags.undefined) {
|
||||
symbol.pointee = .data_import;
|
||||
} else {
|
||||
const segment_index, pos = readLeb(u32, bytes, pos);
|
||||
const segment_offset, pos = readLeb(u32, bytes, pos);
|
||||
const size, pos = readLeb(u32, bytes, pos);
|
||||
try wasm.object_datas.append(gpa, .{
|
||||
.segment = @enumFromInt(data_segment_start + segment_index),
|
||||
.offset = segment_offset,
|
||||
.size = size,
|
||||
.name = interned_name,
|
||||
.flags = symbol.flags,
|
||||
});
|
||||
symbol.pointee = .{
|
||||
.data = @enumFromInt(wasm.object_datas.items.len - 1),
|
||||
};
|
||||
}
|
||||
},
|
||||
.section => {
|
||||
const local_section, pos = readLeb(u32, bytes, pos);
|
||||
const section: Wasm.ObjectSectionIndex = @enumFromInt(local_section_index_base + local_section);
|
||||
symbol.pointee = .{ .section = section };
|
||||
},
|
||||
switch (tag) {
|
||||
.data => {
|
||||
const name = try br.take(try br.takeLeb128(u32));
|
||||
const interned_name = try wasm.internString(name);
|
||||
symbol.name = interned_name.toOptional();
|
||||
if (symbol.flags.undefined) {
|
||||
symbol.pointee = .data_import;
|
||||
} else {
|
||||
const segment_index = try br.takeLeb128(u32);
|
||||
const segment_offset = try br.takeLeb128(u32);
|
||||
const size = try br.takeLeb128(u32);
|
||||
try wasm.object_datas.append(gpa, .{
|
||||
.segment = @enumFromInt(data_segment_start + segment_index),
|
||||
.offset = segment_offset,
|
||||
.size = size,
|
||||
.name = interned_name,
|
||||
.flags = symbol.flags,
|
||||
});
|
||||
symbol.pointee = .{
|
||||
.data = @enumFromInt(wasm.object_datas.items.len - 1),
|
||||
};
|
||||
}
|
||||
},
|
||||
.section => {
|
||||
const local_section = try br.takeLeb128(u32);
|
||||
const section: Wasm.ObjectSectionIndex = @enumFromInt(local_section_index_base + local_section);
|
||||
symbol.pointee = .{ .section = section };
|
||||
},
|
||||
|
||||
.function => {
|
||||
const local_index, pos = readLeb(u32, bytes, pos);
|
||||
if (symbol.flags.undefined) {
|
||||
const function_import: ScratchSpace.FuncImportIndex = @enumFromInt(local_index);
|
||||
symbol.pointee = .{ .function_import = function_import };
|
||||
if (symbol.flags.explicit_name) {
|
||||
const name, pos = readBytes(bytes, pos);
|
||||
symbol.name = (try wasm.internString(name)).toOptional();
|
||||
} else {
|
||||
symbol.name = function_import.ptr(ss).name.toOptional();
|
||||
}
|
||||
} else {
|
||||
symbol.pointee = .{ .function = @enumFromInt(functions_start + (local_index - ss.func_imports.items.len)) };
|
||||
const name, pos = readBytes(bytes, pos);
|
||||
.function => {
|
||||
const local_index = try br.takeLeb128(u32);
|
||||
if (symbol.flags.undefined) {
|
||||
const function_import: ScratchSpace.FuncImportIndex = @enumFromInt(local_index);
|
||||
symbol.pointee = .{ .function_import = function_import };
|
||||
if (symbol.flags.explicit_name) {
|
||||
const name = try br.take(try br.takeLeb128(u32));
|
||||
symbol.name = (try wasm.internString(name)).toOptional();
|
||||
}
|
||||
},
|
||||
.global => {
|
||||
const local_index, pos = readLeb(u32, bytes, pos);
|
||||
if (symbol.flags.undefined) {
|
||||
const global_import: ScratchSpace.GlobalImportIndex = @enumFromInt(local_index);
|
||||
symbol.pointee = .{ .global_import = global_import };
|
||||
if (symbol.flags.explicit_name) {
|
||||
const name, pos = readBytes(bytes, pos);
|
||||
symbol.name = (try wasm.internString(name)).toOptional();
|
||||
} else {
|
||||
symbol.name = global_import.ptr(ss).name.toOptional();
|
||||
}
|
||||
} else {
|
||||
symbol.pointee = .{ .global = @enumFromInt(globals_start + (local_index - ss.global_imports.items.len)) };
|
||||
const name, pos = readBytes(bytes, pos);
|
||||
symbol.name = (try wasm.internString(name)).toOptional();
|
||||
symbol.name = function_import.ptr(ss).name.toOptional();
|
||||
}
|
||||
},
|
||||
.table => {
|
||||
const local_index, pos = readLeb(u32, bytes, pos);
|
||||
if (symbol.flags.undefined) {
|
||||
table_import_symbol_count += 1;
|
||||
const table_import: ScratchSpace.TableImportIndex = @enumFromInt(local_index);
|
||||
symbol.pointee = .{ .table_import = table_import };
|
||||
if (symbol.flags.explicit_name) {
|
||||
const name, pos = readBytes(bytes, pos);
|
||||
symbol.name = (try wasm.internString(name)).toOptional();
|
||||
} else {
|
||||
symbol.name = table_import.ptr(ss).name.toOptional();
|
||||
}
|
||||
} else {
|
||||
symbol.pointee = .{ .function = @enumFromInt(functions_start + (local_index - ss.func_imports.items.len)) };
|
||||
const name = try br.take(try br.takeLeb128(u32));
|
||||
symbol.name = (try wasm.internString(name)).toOptional();
|
||||
}
|
||||
},
|
||||
.global => {
|
||||
const local_index = try br.takeLeb128(u32);
|
||||
if (symbol.flags.undefined) {
|
||||
const global_import: ScratchSpace.GlobalImportIndex = @enumFromInt(local_index);
|
||||
symbol.pointee = .{ .global_import = global_import };
|
||||
if (symbol.flags.explicit_name) {
|
||||
const name = try br.take(try br.takeLeb128(u32));
|
||||
symbol.name = (try wasm.internString(name)).toOptional();
|
||||
} else {
|
||||
symbol.pointee = .{ .table = @enumFromInt(tables_start + (local_index - ss.table_imports.items.len)) };
|
||||
const name, pos = readBytes(bytes, pos);
|
||||
symbol.name = (try wasm.internString(name)).toOptional();
|
||||
symbol.name = global_import.ptr(ss).name.toOptional();
|
||||
}
|
||||
},
|
||||
else => {
|
||||
log.debug("unrecognized symbol type tag: {x}", .{@intFromEnum(tag)});
|
||||
return error.UnrecognizedSymbolType;
|
||||
},
|
||||
}
|
||||
} else {
|
||||
symbol.pointee = .{ .global = @enumFromInt(globals_start + (local_index - ss.global_imports.items.len)) };
|
||||
const name = try br.take(try br.takeLeb128(u32));
|
||||
symbol.name = (try wasm.internString(name)).toOptional();
|
||||
}
|
||||
},
|
||||
.table => {
|
||||
const local_index = try br.takeLeb128(u32);
|
||||
if (symbol.flags.undefined) {
|
||||
table_import_symbol_count += 1;
|
||||
const table_import: ScratchSpace.TableImportIndex = @enumFromInt(local_index);
|
||||
symbol.pointee = .{ .table_import = table_import };
|
||||
if (symbol.flags.explicit_name) {
|
||||
const name = try br.take(try br.takeLeb128(u32));
|
||||
symbol.name = (try wasm.internString(name)).toOptional();
|
||||
} else {
|
||||
symbol.name = table_import.ptr(ss).name.toOptional();
|
||||
}
|
||||
} else {
|
||||
symbol.pointee = .{ .table = @enumFromInt(tables_start + (local_index - ss.table_imports.items.len)) };
|
||||
const name = try br.take(try br.takeLeb128(u32));
|
||||
symbol.name = (try wasm.internString(name)).toOptional();
|
||||
}
|
||||
},
|
||||
else => {
|
||||
log.debug("unrecognized symbol type tag: {x}", .{@intFromEnum(tag)});
|
||||
return error.UnrecognizedSymbolType;
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
@ -504,8 +488,8 @@ pub fn parse(
|
||||
// which section they apply to, and must be sequenced in
|
||||
// the module after that section."
|
||||
// "Relocation sections can only target code, data and custom sections."
|
||||
const local_section, pos = readLeb(u32, bytes, pos);
|
||||
const count, pos = readLeb(u32, bytes, pos);
|
||||
const local_section = try br.takeLeb128(u32);
|
||||
const count = try br.takeLeb128(u32);
|
||||
const section: Wasm.ObjectSectionIndex = @enumFromInt(local_section_index_base + local_section);
|
||||
|
||||
log.debug("found {d} relocations for section={d}", .{ count, section });
|
||||
@ -513,10 +497,9 @@ pub fn parse(
|
||||
var prev_offset: u32 = 0;
|
||||
try wasm.object_relocations.ensureUnusedCapacity(gpa, count);
|
||||
for (0..count) |_| {
|
||||
const tag: RelocationType = @enumFromInt(bytes[pos]);
|
||||
pos += 1;
|
||||
const offset, pos = readLeb(u32, bytes, pos);
|
||||
const index, pos = readLeb(u32, bytes, pos);
|
||||
const tag = try br.takeEnum(RelocationType, .little);
|
||||
const offset = try br.takeLeb128(u32);
|
||||
const index = try br.takeLeb128(u32);
|
||||
|
||||
if (offset < prev_offset)
|
||||
return diags.failParse(path, "relocation entries not sorted by offset", .{});
|
||||
@ -537,7 +520,7 @@ pub fn parse(
|
||||
.memory_addr_locrel_i32,
|
||||
.memory_addr_tls_sleb64,
|
||||
=> {
|
||||
const addend: i32, pos = readLeb(i32, bytes, pos);
|
||||
const addend = try br.takeLeb128(i32);
|
||||
wasm.object_relocations.appendAssumeCapacity(switch (sym.pointee) {
|
||||
.data => |data| .{
|
||||
.tag = .fromType(tag),
|
||||
@ -555,7 +538,7 @@ pub fn parse(
|
||||
});
|
||||
},
|
||||
.function_offset_i32, .function_offset_i64 => {
|
||||
const addend: i32, pos = readLeb(i32, bytes, pos);
|
||||
const addend = try br.takeLeb128(i32);
|
||||
wasm.object_relocations.appendAssumeCapacity(switch (sym.pointee) {
|
||||
.function => .{
|
||||
.tag = .fromType(tag),
|
||||
@ -573,7 +556,7 @@ pub fn parse(
|
||||
});
|
||||
},
|
||||
.section_offset_i32 => {
|
||||
const addend: i32, pos = readLeb(i32, bytes, pos);
|
||||
const addend = try br.takeLeb128(i32);
|
||||
wasm.object_relocations.appendAssumeCapacity(.{
|
||||
.tag = .section_offset_i32,
|
||||
.offset = offset,
|
||||
@ -658,10 +641,9 @@ pub fn parse(
|
||||
.len = count,
|
||||
});
|
||||
} else if (std.mem.eql(u8, section_name, "target_features")) {
|
||||
opt_features, pos = try parseFeatures(wasm, bytes, pos, path);
|
||||
opt_features = try parseFeatures(wasm, br, path);
|
||||
} else if (std.mem.startsWith(u8, section_name, ".debug")) {
|
||||
const debug_content = bytes[pos..section_end];
|
||||
pos = section_end;
|
||||
const debug_content = try br.take(len);
|
||||
|
||||
const data_off: u32 = @intCast(wasm.string_bytes.items.len);
|
||||
try wasm.string_bytes.appendSlice(gpa, debug_content);
|
||||
@ -669,23 +651,20 @@ pub fn parse(
|
||||
try wasm.object_custom_segments.put(gpa, section_index, .{
|
||||
.payload = .{
|
||||
.off = @enumFromInt(data_off),
|
||||
.len = @intCast(debug_content.len),
|
||||
.len = @intCast(len),
|
||||
},
|
||||
.flags = .{},
|
||||
.section_name = try wasm.internString(section_name),
|
||||
});
|
||||
} else {
|
||||
pos = section_end;
|
||||
}
|
||||
} else br.seek = section_end;
|
||||
},
|
||||
.type => {
|
||||
const func_types_len, pos = readLeb(u32, bytes, pos);
|
||||
const func_types_len = try br.takeLeb128(u32);
|
||||
for (try ss.func_types.addManyAsSlice(gpa, func_types_len)) |*func_type| {
|
||||
if (bytes[pos] != std.wasm.function_type) return error.ExpectedFuncType;
|
||||
pos += 1;
|
||||
if (try br.takeByte() != std.wasm.function_type) return error.ExpectedFuncType;
|
||||
|
||||
const params, pos = readBytes(bytes, pos);
|
||||
const returns, pos = readBytes(bytes, pos);
|
||||
const params = try br.take(try br.takeLeb128(u32));
|
||||
const returns = try br.take(try br.takeLeb128(u32));
|
||||
func_type.* = try wasm.addFuncType(.{
|
||||
.params = .fromString(try wasm.internString(params)),
|
||||
.returns = .fromString(try wasm.internString(returns)),
|
||||
@ -693,16 +672,16 @@ pub fn parse(
|
||||
}
|
||||
},
|
||||
.import => {
|
||||
const imports_len, pos = readLeb(u32, bytes, pos);
|
||||
const imports_len = try br.takeLeb128(u32);
|
||||
for (0..imports_len) |_| {
|
||||
const module_name, pos = readBytes(bytes, pos);
|
||||
const name, pos = readBytes(bytes, pos);
|
||||
const kind, pos = readEnum(std.wasm.ExternalKind, bytes, pos);
|
||||
const module_name = try br.take(try br.takeLeb128(u32));
|
||||
const name = try br.take(try br.takeLeb128(u32));
|
||||
const kind = try br.takeEnum(std.wasm.ExternalKind, .little);
|
||||
const interned_module_name = try wasm.internString(module_name);
|
||||
const interned_name = try wasm.internString(name);
|
||||
switch (kind) {
|
||||
.function => {
|
||||
const function, pos = readLeb(u32, bytes, pos);
|
||||
const function = try br.takeLeb128(u32);
|
||||
try ss.func_imports.append(gpa, .{
|
||||
.module_name = interned_module_name,
|
||||
.name = interned_name,
|
||||
@ -710,7 +689,7 @@ pub fn parse(
|
||||
});
|
||||
},
|
||||
.memory => {
|
||||
const limits, pos = readLimits(bytes, pos);
|
||||
const limits = try readLimits(br);
|
||||
const gop = try wasm.object_memory_imports.getOrPut(gpa, interned_name);
|
||||
if (gop.found_existing) {
|
||||
if (gop.value_ptr.module_name != interned_module_name) {
|
||||
@ -736,9 +715,12 @@ pub fn parse(
|
||||
}
|
||||
},
|
||||
.global => {
|
||||
const valtype, pos = readEnum(std.wasm.Valtype, bytes, pos);
|
||||
const mutable = bytes[pos] == 0x01;
|
||||
pos += 1;
|
||||
const valtype = try br.takeEnum(std.wasm.Valtype, .little);
|
||||
const mutable = switch (try br.takeByte()) {
|
||||
0 => false,
|
||||
1 => true,
|
||||
else => return error.InvalidMutability,
|
||||
};
|
||||
try ss.global_imports.append(gpa, .{
|
||||
.name = interned_name,
|
||||
.valtype = valtype,
|
||||
@ -747,8 +729,8 @@ pub fn parse(
|
||||
});
|
||||
},
|
||||
.table => {
|
||||
const ref_type, pos = readEnum(std.wasm.RefType, bytes, pos);
|
||||
const limits, pos = readLimits(bytes, pos);
|
||||
const ref_type = try br.takeEnum(std.wasm.RefType, .little);
|
||||
const limits = try readLimits(br);
|
||||
try ss.table_imports.append(gpa, .{
|
||||
.name = interned_name,
|
||||
.module_name = interned_module_name,
|
||||
@ -763,17 +745,16 @@ pub fn parse(
|
||||
}
|
||||
},
|
||||
.function => {
|
||||
const functions_len, pos = readLeb(u32, bytes, pos);
|
||||
const functions_len = try br.takeLeb128(u32);
|
||||
for (try ss.func_type_indexes.addManyAsSlice(gpa, functions_len)) |*func_type_index| {
|
||||
const i, pos = readLeb(u32, bytes, pos);
|
||||
func_type_index.* = @enumFromInt(i);
|
||||
func_type_index.* = @enumFromInt(try br.takeLeb128(u32));
|
||||
}
|
||||
},
|
||||
.table => {
|
||||
const tables_len, pos = readLeb(u32, bytes, pos);
|
||||
const tables_len = try br.takeLeb128(u32);
|
||||
for (try wasm.object_tables.addManyAsSlice(gpa, tables_len)) |*table| {
|
||||
const ref_type, pos = readEnum(std.wasm.RefType, bytes, pos);
|
||||
const limits, pos = readLimits(bytes, pos);
|
||||
const ref_type = try br.takeEnum(std.wasm.RefType, .little);
|
||||
const limits = try readLimits(br);
|
||||
table.* = .{
|
||||
.name = .none,
|
||||
.module_name = .none,
|
||||
@ -788,9 +769,9 @@ pub fn parse(
|
||||
}
|
||||
},
|
||||
.memory => {
|
||||
const memories_len, pos = readLeb(u32, bytes, pos);
|
||||
const memories_len = try br.takeLeb128(u32);
|
||||
for (try wasm.object_memories.addManyAsSlice(gpa, memories_len)) |*memory| {
|
||||
const limits, pos = readLimits(bytes, pos);
|
||||
const limits = try readLimits(br);
|
||||
memory.* = .{
|
||||
.name = .none,
|
||||
.flags = .{
|
||||
@ -807,14 +788,17 @@ pub fn parse(
|
||||
return diags.failParse(path, "object has more than one global section", .{});
|
||||
global_section_index = section_index;
|
||||
|
||||
const section_start = pos;
|
||||
const globals_len, pos = readLeb(u32, bytes, pos);
|
||||
const section_start = br.seek;
|
||||
const globals_len = try br.takeLeb128(u32);
|
||||
for (try wasm.object_globals.addManyAsSlice(gpa, globals_len)) |*global| {
|
||||
const valtype, pos = readEnum(std.wasm.Valtype, bytes, pos);
|
||||
const mutable = bytes[pos] == 0x01;
|
||||
pos += 1;
|
||||
const init_start = pos;
|
||||
const expr, pos = try readInit(wasm, bytes, pos);
|
||||
const valtype = try br.takeEnum(std.wasm.Valtype, .little);
|
||||
const mutable = switch (try br.takeByte()) {
|
||||
0 => false,
|
||||
1 => true,
|
||||
else => return error.InvalidMutability,
|
||||
};
|
||||
const init_start = br.seek;
|
||||
const expr = try readInit(wasm, br);
|
||||
global.* = .{
|
||||
.name = .none,
|
||||
.flags = .{
|
||||
@ -826,20 +810,19 @@ pub fn parse(
|
||||
.expr = expr,
|
||||
.object_index = object_index,
|
||||
.offset = @intCast(init_start - section_start),
|
||||
.size = @intCast(pos - init_start),
|
||||
.size = @intCast(br.seek - init_start),
|
||||
};
|
||||
}
|
||||
},
|
||||
.@"export" => {
|
||||
const exports_len, pos = readLeb(u32, bytes, pos);
|
||||
const exports_len = try br.takeLeb128(u32);
|
||||
// Read into scratch space, and then later add this data as if
|
||||
// it were extra symbol table entries, but allow merging with
|
||||
// existing symbol table data if the name matches.
|
||||
for (try ss.exports.addManyAsSlice(gpa, exports_len)) |*exp| {
|
||||
const name, pos = readBytes(bytes, pos);
|
||||
const kind: std.wasm.ExternalKind = @enumFromInt(bytes[pos]);
|
||||
pos += 1;
|
||||
const index, pos = readLeb(u32, bytes, pos);
|
||||
const name = try br.take(try br.takeLeb128(u32));
|
||||
const kind = try br.takeEnum(std.wasm.ExternalKind, .little);
|
||||
const index = try br.takeLeb128(u32);
|
||||
exp.* = .{
|
||||
.name = try wasm.internString(name),
|
||||
.pointee = switch (kind) {
|
||||
@ -852,25 +835,24 @@ pub fn parse(
|
||||
}
|
||||
},
|
||||
.start => {
|
||||
const index, pos = readLeb(u32, bytes, pos);
|
||||
const index = try br.takeLeb128(u32);
|
||||
start_function = @enumFromInt(functions_start + index);
|
||||
},
|
||||
.element => {
|
||||
log.warn("unimplemented: element section in {f} {?s}", .{ path, archive_member_name });
|
||||
pos = section_end;
|
||||
br.seek = section_end;
|
||||
},
|
||||
.code => {
|
||||
if (code_section_index != null)
|
||||
return diags.failParse(path, "object has more than one code section", .{});
|
||||
code_section_index = section_index;
|
||||
|
||||
const start = pos;
|
||||
const count, pos = readLeb(u32, bytes, pos);
|
||||
const start = br.seek;
|
||||
const count = try br.takeLeb128(u32);
|
||||
for (try wasm.object_functions.addManyAsSlice(gpa, count)) |*elem| {
|
||||
const code_len, pos = readLeb(u32, bytes, pos);
|
||||
const offset: u32 = @intCast(pos - start);
|
||||
const payload = try wasm.addRelocatableDataPayload(bytes[pos..][0..code_len]);
|
||||
pos += code_len;
|
||||
const code_len = try br.takeLeb128(u32);
|
||||
const offset: u32 = @intCast(br.seek - start);
|
||||
const payload = try wasm.addRelocatableDataPayload(try br.take(code_len));
|
||||
elem.* = .{
|
||||
.flags = .{}, // populated from symbol table
|
||||
.name = .none, // populated from symbol table
|
||||
@ -886,20 +868,19 @@ pub fn parse(
|
||||
return diags.failParse(path, "object has more than one data section", .{});
|
||||
data_section_index = section_index;
|
||||
|
||||
const section_start = pos;
|
||||
const count, pos = readLeb(u32, bytes, pos);
|
||||
const section_start = br.seek;
|
||||
const count = try br.takeLeb128(u32);
|
||||
for (try wasm.object_data_segments.addManyAsSlice(gpa, count)) |*elem| {
|
||||
const flags, pos = readEnum(DataSegmentFlags, bytes, pos);
|
||||
const flags: DataSegmentFlags = @enumFromInt(try br.takeLeb128(u32));
|
||||
if (flags == .active_memidx) {
|
||||
const memidx, pos = readLeb(u32, bytes, pos);
|
||||
const memidx = try br.takeLeb128(u32);
|
||||
if (memidx != 0) return diags.failParse(path, "data section uses mem index {d}", .{memidx});
|
||||
}
|
||||
//const expr, pos = if (flags != .passive) try readInit(wasm, bytes, pos) else .{ .none, pos };
|
||||
if (flags != .passive) pos = try skipInit(bytes, pos);
|
||||
const data_len, pos = readLeb(u32, bytes, pos);
|
||||
const segment_start = pos;
|
||||
const payload = try wasm.addRelocatableDataPayload(bytes[pos..][0..data_len]);
|
||||
pos += data_len;
|
||||
//const expr = if (flags != .passive) try readInit(wasm, br) else .none;
|
||||
if (flags != .passive) try skipInit(br);
|
||||
const data_len = try br.takeLeb128(u32);
|
||||
const segment_start = br.seek;
|
||||
const payload = try wasm.addRelocatableDataPayload(try br.take(data_len));
|
||||
elem.* = .{
|
||||
.payload = payload,
|
||||
.name = .none, // Populated from segment_info
|
||||
@ -911,10 +892,10 @@ pub fn parse(
|
||||
};
|
||||
}
|
||||
},
|
||||
else => pos = section_end,
|
||||
else => br.seek = section_end,
|
||||
}
|
||||
if (pos != section_end) return error.MalformedSection;
|
||||
}
|
||||
if (br.seek != section_end) return error.MalformedSection;
|
||||
} else |_| {}
|
||||
if (!saw_linking_section) return error.MissingLinkingSection;
|
||||
|
||||
const cpu = comp.root_mod.resolved_target.result.cpu;
|
||||
@ -1422,27 +1403,27 @@ pub fn parse(
|
||||
/// Based on the "features" custom section, parses it into a list of
|
||||
/// features that tell the linker what features were enabled and may be mandatory
|
||||
/// to be able to link.
|
||||
fn parseFeatures(
|
||||
wasm: *Wasm,
|
||||
bytes: []const u8,
|
||||
start_pos: usize,
|
||||
path: Path,
|
||||
) error{ OutOfMemory, LinkFailure }!struct { Wasm.Feature.Set, usize } {
|
||||
fn parseFeatures(wasm: *Wasm, reader: *Reader, path: Path) error{ OutOfMemory, LinkFailure }!Wasm.Feature.Set {
|
||||
const gpa = wasm.base.comp.gpa;
|
||||
const diags = &wasm.base.comp.link_diags;
|
||||
const features_len, var pos = readLeb(u32, bytes, start_pos);
|
||||
const features_len = reader.takeLeb128(u32) catch |err|
|
||||
return diags.failParse(path, "invalid features length: {t}", .{err});
|
||||
// This temporary allocation could be avoided by using the string_bytes buffer as a scratch space.
|
||||
const feature_buffer = try gpa.alloc(Wasm.Feature, features_len);
|
||||
defer gpa.free(feature_buffer);
|
||||
for (feature_buffer) |*feature| {
|
||||
const prefix: Wasm.Feature.Prefix = switch (bytes[pos]) {
|
||||
const prefix: Wasm.Feature.Prefix = switch (reader.takeByte() catch |err| {
|
||||
return diags.failParse(path, "invalid feature prefix: {t}", .{err});
|
||||
}) {
|
||||
'-' => .@"-",
|
||||
'+' => .@"+",
|
||||
'=' => .@"=",
|
||||
else => |b| return diags.failParse(path, "invalid feature prefix: 0x{x}", .{b}),
|
||||
};
|
||||
pos += 1;
|
||||
const name, pos = readBytes(bytes, pos);
|
||||
const name_len = reader.takeLeb128(u32) catch |err|
|
||||
return diags.failParse(path, "bad feature name length: {t}", .{err});
|
||||
const name = reader.take(name_len) catch |err|
|
||||
return diags.failParse(path, "bad feature name: {t}", .{err});
|
||||
const tag = std.meta.stringToEnum(Wasm.Feature.Tag, name) orelse {
|
||||
return diags.failParse(path, "unrecognized wasm feature in object: {s}", .{name});
|
||||
};
|
||||
@ -1453,68 +1434,34 @@ fn parseFeatures(
|
||||
}
|
||||
std.mem.sortUnstable(Wasm.Feature, feature_buffer, {}, Wasm.Feature.lessThan);
|
||||
|
||||
return .fromString(try wasm.internString(@ptrCast(feature_buffer)));
|
||||
}
|
||||
|
||||
fn readLimits(reader: *Reader) !std.wasm.Limits {
|
||||
const flags: std.wasm.Limits.Flags = @bitCast(try reader.takeByte());
|
||||
const min = try reader.takeLeb128(u32);
|
||||
const max = if (flags.has_max) try reader.takeLeb128(u32) else 0;
|
||||
return .{
|
||||
.fromString(try wasm.internString(@ptrCast(feature_buffer))),
|
||||
pos,
|
||||
};
|
||||
}
|
||||
|
||||
fn readLeb(comptime T: type, bytes: []const u8, pos: usize) struct { T, usize } {
|
||||
var fbr = std.io.fixedBufferStream(bytes[pos..]);
|
||||
return .{
|
||||
switch (@typeInfo(T).int.signedness) {
|
||||
.signed => std.leb.readIleb128(T, fbr.reader()) catch unreachable,
|
||||
.unsigned => std.leb.readUleb128(T, fbr.reader()) catch unreachable,
|
||||
},
|
||||
pos + fbr.pos,
|
||||
};
|
||||
}
|
||||
|
||||
fn readBytes(bytes: []const u8, start_pos: usize) struct { []const u8, usize } {
|
||||
const len, const pos = readLeb(u32, bytes, start_pos);
|
||||
return .{
|
||||
bytes[pos..][0..len],
|
||||
pos + len,
|
||||
};
|
||||
}
|
||||
|
||||
fn readEnum(comptime T: type, bytes: []const u8, pos: usize) struct { T, usize } {
|
||||
const Tag = @typeInfo(T).@"enum".tag_type;
|
||||
const int, const new_pos = readLeb(Tag, bytes, pos);
|
||||
return .{ @enumFromInt(int), new_pos };
|
||||
}
|
||||
|
||||
fn readLimits(bytes: []const u8, start_pos: usize) struct { std.wasm.Limits, usize } {
|
||||
const flags: std.wasm.Limits.Flags = @bitCast(bytes[start_pos]);
|
||||
const min, const max_pos = readLeb(u32, bytes, start_pos + 1);
|
||||
const max, const end_pos = if (flags.has_max) readLeb(u32, bytes, max_pos) else .{ 0, max_pos };
|
||||
return .{ .{
|
||||
.flags = flags,
|
||||
.min = min,
|
||||
.max = max,
|
||||
}, end_pos };
|
||||
}
|
||||
|
||||
fn readInit(wasm: *Wasm, bytes: []const u8, pos: usize) !struct { Wasm.Expr, usize } {
|
||||
const end_pos = try skipInit(bytes, pos); // one after the end opcode
|
||||
return .{ try wasm.addExpr(bytes[pos..end_pos]), end_pos };
|
||||
}
|
||||
|
||||
pub fn exprEndPos(bytes: []const u8, pos: usize) error{InvalidInitOpcode}!usize {
|
||||
const opcode = bytes[pos];
|
||||
return switch (@as(std.wasm.Opcode, @enumFromInt(opcode))) {
|
||||
.i32_const => readLeb(i32, bytes, pos + 1)[1],
|
||||
.i64_const => readLeb(i64, bytes, pos + 1)[1],
|
||||
.f32_const => pos + 5,
|
||||
.f64_const => pos + 9,
|
||||
.global_get => readLeb(u32, bytes, pos + 1)[1],
|
||||
else => return error.InvalidInitOpcode,
|
||||
};
|
||||
}
|
||||
|
||||
fn skipInit(bytes: []const u8, pos: usize) !usize {
|
||||
const end_pos = try exprEndPos(bytes, pos);
|
||||
const op, const final_pos = readEnum(std.wasm.Opcode, bytes, end_pos);
|
||||
if (op != .end) return error.InitExprMissingEnd;
|
||||
return final_pos;
|
||||
fn readInit(wasm: *Wasm, reader: *Reader) !Wasm.Expr {
|
||||
const start = reader.seek;
|
||||
try skipInit(reader); // one after the end opcode
|
||||
return wasm.addExpr(reader.buffered()[start..reader.seek]);
|
||||
}
|
||||
|
||||
pub fn skipInit(reader: *Reader) !void {
|
||||
switch (try reader.takeEnumNonexhaustive(std.wasm.Opcode, .little)) {
|
||||
.i32_const => _ = try reader.takeLeb128(i32),
|
||||
.i64_const => _ = try reader.takeLeb128(i64),
|
||||
.f32_const => try reader.discardAll(5),
|
||||
.f64_const => try reader.discardAll(9),
|
||||
.global_get => _ = try reader.takeLeb128(u32),
|
||||
else => return error.InvalidInitOpcode,
|
||||
}
|
||||
if (try reader.takeEnum(std.wasm.Opcode, .little) != .end) return error.InitExprMissingEnd;
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user