mirror of
https://github.com/ziglang/zig.git
synced 2025-12-16 03:03:09 +00:00
Merge pull request #8439 from Luukdegram/wasm-mem
stage2: wasm - "Hello world"
This commit is contained in:
commit
952032b40c
@ -280,3 +280,6 @@ pub const block_empty: u8 = 0x40;
|
||||
// binary constants
|
||||
pub const magic = [_]u8{ 0x00, 0x61, 0x73, 0x6D }; // \0asm
|
||||
pub const version = [_]u8{ 0x01, 0x00, 0x00, 0x00 }; // version 1
|
||||
|
||||
// Each wasm page size is 64kB
|
||||
pub const page_size = 64 * 1024;
|
||||
|
||||
@ -3029,9 +3029,12 @@ fn astgenAndSemaVarDecl(
|
||||
};
|
||||
defer gen_scope.instructions.deinit(mod.gpa);
|
||||
|
||||
const init_result_loc: AstGen.ResultLoc = if (var_decl.ast.type_node != 0) .{
|
||||
.ty = try AstGen.expr(&gen_scope, &gen_scope.base, .{ .ty = .type_type }, var_decl.ast.type_node),
|
||||
} else .none;
|
||||
const init_result_loc: AstGen.ResultLoc = if (var_decl.ast.type_node != 0)
|
||||
.{
|
||||
.ty = try AstGen.expr(&gen_scope, &gen_scope.base, .{ .ty = .type_type }, var_decl.ast.type_node),
|
||||
}
|
||||
else
|
||||
.none;
|
||||
|
||||
const init_inst = try AstGen.comptimeExpr(
|
||||
&gen_scope,
|
||||
@ -3834,7 +3837,7 @@ fn allocateNewDecl(
|
||||
.elf => .{ .elf = link.File.Elf.TextBlock.empty },
|
||||
.macho => .{ .macho = link.File.MachO.TextBlock.empty },
|
||||
.c => .{ .c = link.File.C.DeclBlock.empty },
|
||||
.wasm => .{ .wasm = {} },
|
||||
.wasm => .{ .wasm = link.File.Wasm.DeclBlock.empty },
|
||||
.spirv => .{ .spirv = {} },
|
||||
},
|
||||
.fn_link = switch (mod.comp.bin_file.tag) {
|
||||
@ -3842,7 +3845,7 @@ fn allocateNewDecl(
|
||||
.elf => .{ .elf = link.File.Elf.SrcFn.empty },
|
||||
.macho => .{ .macho = link.File.MachO.SrcFn.empty },
|
||||
.c => .{ .c = link.File.C.FnBlock.empty },
|
||||
.wasm => .{ .wasm = null },
|
||||
.wasm => .{ .wasm = link.File.Wasm.FnData.empty },
|
||||
.spirv => .{ .spirv = .{} },
|
||||
},
|
||||
.generation = 0,
|
||||
|
||||
@ -16,9 +16,12 @@ const Value = @import("../value.zig").Value;
|
||||
const Compilation = @import("../Compilation.zig");
|
||||
const AnyMCValue = @import("../codegen.zig").AnyMCValue;
|
||||
const LazySrcLoc = Module.LazySrcLoc;
|
||||
const link = @import("../link.zig");
|
||||
const TypedValue = @import("../TypedValue.zig");
|
||||
|
||||
/// Wasm Value, created when generating an instruction
|
||||
const WValue = union(enum) {
|
||||
/// May be referenced but is unused
|
||||
none: void,
|
||||
/// Index of the local variable
|
||||
local: u32,
|
||||
@ -163,25 +166,23 @@ fn buildOpcode(args: OpcodeBuildArguments) wasm.Opcode {
|
||||
.global_get => return .global_get,
|
||||
.global_set => return .global_set,
|
||||
|
||||
.load => if (args.width) |width|
|
||||
switch (width) {
|
||||
8 => switch (args.valtype1.?) {
|
||||
.i32 => if (args.signedness.? == .signed) return .i32_load8_s else return .i32_load8_u,
|
||||
.i64 => if (args.signedness.? == .signed) return .i64_load8_s else return .i64_load8_u,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
16 => switch (args.valtype1.?) {
|
||||
.i32 => if (args.signedness.? == .signed) return .i32_load16_s else return .i32_load16_u,
|
||||
.i64 => if (args.signedness.? == .signed) return .i64_load16_s else return .i64_load16_u,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
32 => switch (args.valtype1.?) {
|
||||
.i64 => if (args.signedness.? == .signed) return .i64_load32_s else return .i64_load32_u,
|
||||
.i32, .f32, .f64 => unreachable,
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
else switch (args.valtype1.?) {
|
||||
.load => if (args.width) |width| switch (width) {
|
||||
8 => switch (args.valtype1.?) {
|
||||
.i32 => if (args.signedness.? == .signed) return .i32_load8_s else return .i32_load8_u,
|
||||
.i64 => if (args.signedness.? == .signed) return .i64_load8_s else return .i64_load8_u,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
16 => switch (args.valtype1.?) {
|
||||
.i32 => if (args.signedness.? == .signed) return .i32_load16_s else return .i32_load16_u,
|
||||
.i64 => if (args.signedness.? == .signed) return .i64_load16_s else return .i64_load16_u,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
32 => switch (args.valtype1.?) {
|
||||
.i64 => if (args.signedness.? == .signed) return .i64_load32_s else return .i64_load32_u,
|
||||
.i32, .f32, .f64 => unreachable,
|
||||
},
|
||||
else => unreachable,
|
||||
} else switch (args.valtype1.?) {
|
||||
.i32 => return .i32_load,
|
||||
.i64 => return .i64_load,
|
||||
.f32 => return .f32_load,
|
||||
@ -469,6 +470,13 @@ test "Wasm - buildOpcode" {
|
||||
testing.expectEqual(@as(wasm.Opcode, .f64_reinterpret_i64), f64_reinterpret_i64);
|
||||
}
|
||||
|
||||
pub const Result = union(enum) {
|
||||
/// The codegen bytes have been appended to `Context.code`
|
||||
appended: void,
|
||||
/// The data is managed externally and are part of the `Result`
|
||||
externally_managed: []const u8,
|
||||
};
|
||||
|
||||
/// Hashmap to store generated `WValue` for each `Inst`
|
||||
pub const ValueTable = std.AutoHashMapUnmanaged(*Inst, WValue);
|
||||
|
||||
@ -504,6 +512,8 @@ pub const Context = struct {
|
||||
const InnerError = error{
|
||||
OutOfMemory,
|
||||
CodegenFail,
|
||||
/// Can occur when dereferencing a pointer that points to a `Decl` of which the analysis has failed
|
||||
AnalysisFail,
|
||||
};
|
||||
|
||||
pub fn deinit(self: *Context) void {
|
||||
@ -533,11 +543,20 @@ pub const Context = struct {
|
||||
|
||||
/// Using a given `Type`, returns the corresponding wasm Valtype
|
||||
fn typeToValtype(self: *Context, src: LazySrcLoc, ty: Type) InnerError!wasm.Valtype {
|
||||
return switch (ty.tag()) {
|
||||
.f32 => .f32,
|
||||
.f64 => .f64,
|
||||
.u32, .i32, .bool => .i32,
|
||||
.u64, .i64 => .i64,
|
||||
return switch (ty.zigTypeTag()) {
|
||||
.Float => blk: {
|
||||
const bits = ty.floatBits(self.target);
|
||||
if (bits == 16 or bits == 32) break :blk wasm.Valtype.f32;
|
||||
if (bits == 64) break :blk wasm.Valtype.f64;
|
||||
return self.fail(src, "Float bit size not supported by wasm: '{d}'", .{bits});
|
||||
},
|
||||
.Int => blk: {
|
||||
const info = ty.intInfo(self.target);
|
||||
if (info.bits <= 32) break :blk wasm.Valtype.i32;
|
||||
if (info.bits > 32 and info.bits <= 64) break :blk wasm.Valtype.i64;
|
||||
return self.fail(src, "Integer bit size not supported by wasm: '{d}'", .{info.bits});
|
||||
},
|
||||
.Bool, .Pointer => wasm.Valtype.i32,
|
||||
else => self.fail(src, "TODO - Wasm valtype for type '{s}'", .{ty.tag()}),
|
||||
};
|
||||
}
|
||||
@ -604,48 +623,82 @@ pub const Context = struct {
|
||||
}
|
||||
|
||||
/// Generates the wasm bytecode for the function declaration belonging to `Context`
|
||||
pub fn gen(self: *Context) InnerError!void {
|
||||
assert(self.code.items.len == 0);
|
||||
try self.genFunctype();
|
||||
pub fn gen(self: *Context, typed_value: TypedValue) InnerError!Result {
|
||||
switch (typed_value.ty.zigTypeTag()) {
|
||||
.Fn => {
|
||||
try self.genFunctype();
|
||||
|
||||
// Write instructions
|
||||
// TODO: check for and handle death of instructions
|
||||
const tv = self.decl.typed_value.most_recent.typed_value;
|
||||
const mod_fn = blk: {
|
||||
if (tv.val.castTag(.function)) |func| break :blk func.data;
|
||||
if (tv.val.castTag(.extern_fn)) |ext_fn| return; // don't need codegen for extern functions
|
||||
return self.fail(.{ .node_offset = 0 }, "TODO: Wasm codegen for decl type '{s}'", .{tv.ty.tag()});
|
||||
};
|
||||
// Write instructions
|
||||
// TODO: check for and handle death of instructions
|
||||
const mod_fn = blk: {
|
||||
if (typed_value.val.castTag(.function)) |func| break :blk func.data;
|
||||
if (typed_value.val.castTag(.extern_fn)) |ext_fn| return Result.appended; // don't need code body for extern functions
|
||||
unreachable;
|
||||
};
|
||||
|
||||
// Reserve space to write the size after generating the code as well as space for locals count
|
||||
try self.code.resize(10);
|
||||
// Reserve space to write the size after generating the code as well as space for locals count
|
||||
try self.code.resize(10);
|
||||
|
||||
try self.genBody(mod_fn.body);
|
||||
try self.genBody(mod_fn.body);
|
||||
|
||||
// finally, write our local types at the 'offset' position
|
||||
{
|
||||
leb.writeUnsignedFixed(5, self.code.items[5..10], @intCast(u32, self.locals.items.len));
|
||||
// finally, write our local types at the 'offset' position
|
||||
{
|
||||
leb.writeUnsignedFixed(5, self.code.items[5..10], @intCast(u32, self.locals.items.len));
|
||||
|
||||
// offset into 'code' section where we will put our locals types
|
||||
var local_offset: usize = 10;
|
||||
// offset into 'code' section where we will put our locals types
|
||||
var local_offset: usize = 10;
|
||||
|
||||
// emit the actual locals amount
|
||||
for (self.locals.items) |local| {
|
||||
var buf: [6]u8 = undefined;
|
||||
leb.writeUnsignedFixed(5, buf[0..5], @as(u32, 1));
|
||||
buf[5] = local;
|
||||
try self.code.insertSlice(local_offset, &buf);
|
||||
local_offset += 6;
|
||||
}
|
||||
// emit the actual locals amount
|
||||
for (self.locals.items) |local| {
|
||||
var buf: [6]u8 = undefined;
|
||||
leb.writeUnsignedFixed(5, buf[0..5], @as(u32, 1));
|
||||
buf[5] = local;
|
||||
try self.code.insertSlice(local_offset, &buf);
|
||||
local_offset += 6;
|
||||
}
|
||||
}
|
||||
|
||||
const writer = self.code.writer();
|
||||
try writer.writeByte(wasm.opcode(.end));
|
||||
|
||||
// Fill in the size of the generated code to the reserved space at the
|
||||
// beginning of the buffer.
|
||||
const size = self.code.items.len - 5 + self.decl.fn_link.wasm.idx_refs.items.len * 5;
|
||||
leb.writeUnsignedFixed(5, self.code.items[0..5], @intCast(u32, size));
|
||||
|
||||
// codegen data has been appended to `code`
|
||||
return Result.appended;
|
||||
},
|
||||
.Array => {
|
||||
if (typed_value.val.castTag(.bytes)) |payload| {
|
||||
if (typed_value.ty.sentinel()) |sentinel| {
|
||||
try self.code.appendSlice(payload.data);
|
||||
|
||||
switch (try self.gen(.{
|
||||
.ty = typed_value.ty.elemType(),
|
||||
.val = sentinel,
|
||||
})) {
|
||||
.appended => return Result.appended,
|
||||
.externally_managed => |data| {
|
||||
try self.code.appendSlice(data);
|
||||
return Result.appended;
|
||||
},
|
||||
}
|
||||
}
|
||||
return Result{ .externally_managed = payload.data };
|
||||
} else return self.fail(.{ .node_offset = 0 }, "TODO implement gen for more kinds of arrays", .{});
|
||||
},
|
||||
.Int => {
|
||||
const info = typed_value.ty.intInfo(self.target);
|
||||
if (info.bits == 8 and info.signedness == .unsigned) {
|
||||
const int_byte = typed_value.val.toUnsignedInt();
|
||||
try self.code.append(@intCast(u8, int_byte));
|
||||
return Result.appended;
|
||||
}
|
||||
return self.fail(.{ .node_offset = 0 }, "TODO: Implement codegen for int type: '{}'", .{typed_value.ty});
|
||||
},
|
||||
else => |tag| return self.fail(.{ .node_offset = 0 }, "TODO: Implement zig type codegen for type: '{s}'", .{tag}),
|
||||
}
|
||||
|
||||
const writer = self.code.writer();
|
||||
try writer.writeByte(wasm.opcode(.end));
|
||||
|
||||
// Fill in the size of the generated code to the reserved space at the
|
||||
// beginning of the buffer.
|
||||
const size = self.code.items.len - 5 + self.decl.fn_link.wasm.?.idx_refs.items.len * 5;
|
||||
leb.writeUnsignedFixed(5, self.code.items[0..5], @intCast(u32, size));
|
||||
}
|
||||
|
||||
fn genInst(self: *Context, inst: *Inst) InnerError!WValue {
|
||||
@ -721,7 +774,7 @@ pub const Context = struct {
|
||||
|
||||
// The function index immediate argument will be filled in using this data
|
||||
// in link.Wasm.flush().
|
||||
try self.decl.fn_link.wasm.?.idx_refs.append(self.gpa, .{
|
||||
try self.decl.fn_link.wasm.idx_refs.append(self.gpa, .{
|
||||
.offset = @intCast(u32, self.code.items.len),
|
||||
.decl = target,
|
||||
});
|
||||
@ -812,6 +865,22 @@ pub const Context = struct {
|
||||
else => |bits| return self.fail(inst.base.src, "Wasm TODO: emitConstant for float with {d} bits", .{bits}),
|
||||
}
|
||||
},
|
||||
.Pointer => {
|
||||
if (inst.val.castTag(.decl_ref)) |payload| {
|
||||
const decl = payload.data;
|
||||
|
||||
// offset into the offset table within the 'data' section
|
||||
const ptr_width = self.target.cpu.arch.ptrBitWidth() / 8;
|
||||
try writer.writeByte(wasm.opcode(.i32_const));
|
||||
try leb.writeULEB128(writer, decl.link.wasm.offset_index * ptr_width);
|
||||
|
||||
// memory instruction followed by their memarg immediate
|
||||
// memarg ::== x:u32, y:u32 => {align x, offset y}
|
||||
try writer.writeByte(wasm.opcode(.i32_load));
|
||||
try leb.writeULEB128(writer, @as(u32, 0));
|
||||
try leb.writeULEB128(writer, @as(u32, 0));
|
||||
} else return self.fail(inst.base.src, "Wasm TODO: emitConstant for other const pointer tag {s}", .{inst.val.tag()});
|
||||
},
|
||||
.Void => {},
|
||||
else => |ty| return self.fail(inst.base.src, "Wasm TODO: emitConstant for zigTypeTag {s}", .{ty}),
|
||||
}
|
||||
|
||||
@ -138,7 +138,7 @@ pub const File = struct {
|
||||
coff: Coff.TextBlock,
|
||||
macho: MachO.TextBlock,
|
||||
c: C.DeclBlock,
|
||||
wasm: void,
|
||||
wasm: Wasm.DeclBlock,
|
||||
spirv: void,
|
||||
};
|
||||
|
||||
@ -147,7 +147,7 @@ pub const File = struct {
|
||||
coff: Coff.SrcFn,
|
||||
macho: MachO.SrcFn,
|
||||
c: C.FnBlock,
|
||||
wasm: ?Wasm.FnData,
|
||||
wasm: Wasm.FnData,
|
||||
spirv: SpirV.FnData,
|
||||
};
|
||||
|
||||
@ -328,7 +328,8 @@ pub const File = struct {
|
||||
.elf => return @fieldParentPtr(Elf, "base", base).allocateDeclIndexes(decl),
|
||||
.macho => return @fieldParentPtr(MachO, "base", base).allocateDeclIndexes(decl),
|
||||
.c => return @fieldParentPtr(C, "base", base).allocateDeclIndexes(decl),
|
||||
.wasm, .spirv => {},
|
||||
.wasm => return @fieldParentPtr(Wasm, "base", base).allocateDeclIndexes(decl),
|
||||
.spirv => {},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -16,21 +16,11 @@ const link = @import("../link.zig");
|
||||
const trace = @import("../tracy.zig").trace;
|
||||
const build_options = @import("build_options");
|
||||
const Cache = @import("../Cache.zig");
|
||||
const TypedValue = @import("../TypedValue.zig");
|
||||
|
||||
pub const base_tag = link.File.Tag.wasm;
|
||||
|
||||
pub const FnData = struct {
|
||||
/// Generated code for the type of the function
|
||||
functype: std.ArrayListUnmanaged(u8) = .{},
|
||||
/// Generated code for the body of the function
|
||||
code: std.ArrayListUnmanaged(u8) = .{},
|
||||
/// Locations in the generated code where function indexes must be filled in.
|
||||
/// This must be kept ordered by offset.
|
||||
idx_refs: std.ArrayListUnmanaged(struct { offset: u32, decl: *Module.Decl }) = .{},
|
||||
};
|
||||
|
||||
base: link.File,
|
||||
|
||||
/// List of all function Decls to be written to the output file. The index of
|
||||
/// each Decl in this list at the time of writing the binary is used as the
|
||||
/// function index. In the event where ext_funcs' size is not 0, the index of
|
||||
@ -45,6 +35,77 @@ ext_funcs: std.ArrayListUnmanaged(*Module.Decl) = .{},
|
||||
/// to support existing code.
|
||||
/// TODO: Allow setting this through a flag?
|
||||
host_name: []const u8 = "env",
|
||||
/// The last `DeclBlock` that was initialized will be saved here.
|
||||
last_block: ?*DeclBlock = null,
|
||||
/// Table with offsets, each element represents an offset with the value being
|
||||
/// the offset into the 'data' section where the data lives
|
||||
offset_table: std.ArrayListUnmanaged(u32) = .{},
|
||||
/// List of offset indexes which are free to be used for new decl's.
|
||||
/// Each element's value points to an index into the offset_table.
|
||||
offset_table_free_list: std.ArrayListUnmanaged(u32) = .{},
|
||||
/// List of all `Decl` that are currently alive.
|
||||
/// This is ment for bookkeeping so we can safely cleanup all codegen memory
|
||||
/// when calling `deinit`
|
||||
symbols: std.ArrayListUnmanaged(*Module.Decl) = .{},
|
||||
|
||||
pub const FnData = struct {
|
||||
/// Generated code for the type of the function
|
||||
functype: std.ArrayListUnmanaged(u8),
|
||||
/// Generated code for the body of the function
|
||||
code: std.ArrayListUnmanaged(u8),
|
||||
/// Locations in the generated code where function indexes must be filled in.
|
||||
/// This must be kept ordered by offset.
|
||||
idx_refs: std.ArrayListUnmanaged(struct { offset: u32, decl: *Module.Decl }),
|
||||
|
||||
pub const empty: FnData = .{
|
||||
.functype = .{},
|
||||
.code = .{},
|
||||
.idx_refs = .{},
|
||||
};
|
||||
};
|
||||
|
||||
pub const DeclBlock = struct {
|
||||
/// Determines whether the `DeclBlock` has been initialized for codegen.
|
||||
init: bool,
|
||||
/// Index into the `symbols` list.
|
||||
symbol_index: u32,
|
||||
/// Index into the offset table
|
||||
offset_index: u32,
|
||||
/// The size of the block and how large part of the data section it occupies.
|
||||
/// Will be 0 when the Decl will not live inside the data section and `data` will be undefined.
|
||||
size: u32,
|
||||
/// Points to the previous and next blocks.
|
||||
/// Can be used to find the total size, and used to calculate the `offset` based on the previous block.
|
||||
prev: ?*DeclBlock,
|
||||
next: ?*DeclBlock,
|
||||
/// Pointer to data that will be written to the 'data' section.
|
||||
/// This data either lives in `FnData.code` or is externally managed.
|
||||
/// For data that does not live inside the 'data' section, this field will be undefined. (size == 0).
|
||||
data: [*]const u8,
|
||||
|
||||
pub const empty: DeclBlock = .{
|
||||
.init = false,
|
||||
.symbol_index = 0,
|
||||
.offset_index = 0,
|
||||
.size = 0,
|
||||
.prev = null,
|
||||
.next = null,
|
||||
.data = undefined,
|
||||
};
|
||||
|
||||
/// Unplugs the `DeclBlock` from the chain
|
||||
fn unplug(self: *DeclBlock) void {
|
||||
if (self.prev) |prev| {
|
||||
prev.next = self.next;
|
||||
}
|
||||
|
||||
if (self.next) |next| {
|
||||
next.prev = self.prev;
|
||||
}
|
||||
self.next = null;
|
||||
self.prev = null;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn openPath(allocator: *Allocator, sub_path: []const u8, options: link.Options) !*Wasm {
|
||||
assert(options.object_format == .wasm);
|
||||
@ -52,7 +113,7 @@ pub fn openPath(allocator: *Allocator, sub_path: []const u8, options: link.Optio
|
||||
if (options.use_llvm) return error.LLVM_BackendIsTODO_ForWasm; // TODO
|
||||
if (options.use_lld) return error.LLD_LinkingIsTODO_ForWasm; // TODO
|
||||
|
||||
// TODO: read the file and keep vaild parts instead of truncating
|
||||
// TODO: read the file and keep valid parts instead of truncating
|
||||
const file = try options.emit.?.directory.handle.createFile(sub_path, .{ .truncate = true, .read = true });
|
||||
errdefer file.close();
|
||||
|
||||
@ -80,50 +141,67 @@ pub fn createEmpty(gpa: *Allocator, options: link.Options) !*Wasm {
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Wasm) void {
|
||||
for (self.funcs.items) |decl| {
|
||||
decl.fn_link.wasm.?.functype.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.?.code.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.?.idx_refs.deinit(self.base.allocator);
|
||||
}
|
||||
for (self.ext_funcs.items) |decl| {
|
||||
decl.fn_link.wasm.?.functype.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.?.code.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.?.idx_refs.deinit(self.base.allocator);
|
||||
for (self.symbols.items) |decl| {
|
||||
decl.fn_link.wasm.functype.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.code.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.idx_refs.deinit(self.base.allocator);
|
||||
}
|
||||
|
||||
self.funcs.deinit(self.base.allocator);
|
||||
self.ext_funcs.deinit(self.base.allocator);
|
||||
self.offset_table.deinit(self.base.allocator);
|
||||
self.offset_table_free_list.deinit(self.base.allocator);
|
||||
self.symbols.deinit(self.base.allocator);
|
||||
}
|
||||
|
||||
pub fn allocateDeclIndexes(self: *Wasm, decl: *Module.Decl) !void {
|
||||
if (decl.link.wasm.init) return;
|
||||
|
||||
try self.offset_table.ensureCapacity(self.base.allocator, self.offset_table.items.len + 1);
|
||||
try self.symbols.ensureCapacity(self.base.allocator, self.symbols.items.len + 1);
|
||||
|
||||
const block = &decl.link.wasm;
|
||||
block.init = true;
|
||||
|
||||
block.symbol_index = @intCast(u32, self.symbols.items.len);
|
||||
self.symbols.appendAssumeCapacity(decl);
|
||||
|
||||
if (self.offset_table_free_list.popOrNull()) |index| {
|
||||
block.offset_index = index;
|
||||
} else {
|
||||
block.offset_index = @intCast(u32, self.offset_table.items.len);
|
||||
_ = self.offset_table.addOneAssumeCapacity();
|
||||
}
|
||||
|
||||
self.offset_table.items[block.offset_index] = 0;
|
||||
|
||||
const typed_value = decl.typed_value.most_recent.typed_value;
|
||||
if (typed_value.ty.zigTypeTag() == .Fn) {
|
||||
switch (typed_value.val.tag()) {
|
||||
// dependent on function type, appends it to the correct list
|
||||
.function => try self.funcs.append(self.base.allocator, decl),
|
||||
.extern_fn => try self.ext_funcs.append(self.base.allocator, decl),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Generate code for the Decl, storing it in memory to be later written to
|
||||
// the file on flush().
|
||||
pub fn updateDecl(self: *Wasm, module: *Module, decl: *Module.Decl) !void {
|
||||
std.debug.assert(decl.link.wasm.init); // Must call allocateDeclIndexes()
|
||||
|
||||
const typed_value = decl.typed_value.most_recent.typed_value;
|
||||
if (typed_value.ty.zigTypeTag() != .Fn)
|
||||
return error.TODOImplementNonFnDeclsForWasm;
|
||||
|
||||
if (decl.fn_link.wasm) |*fn_data| {
|
||||
fn_data.functype.items.len = 0;
|
||||
fn_data.code.items.len = 0;
|
||||
fn_data.idx_refs.items.len = 0;
|
||||
} else {
|
||||
decl.fn_link.wasm = .{};
|
||||
// dependent on function type, appends it to the correct list
|
||||
switch (decl.typed_value.most_recent.typed_value.val.tag()) {
|
||||
.function => try self.funcs.append(self.base.allocator, decl),
|
||||
.extern_fn => try self.ext_funcs.append(self.base.allocator, decl),
|
||||
else => return error.TODOImplementNonFnDeclsForWasm,
|
||||
}
|
||||
}
|
||||
const fn_data = &decl.fn_link.wasm.?;
|
||||
|
||||
var managed_functype = fn_data.functype.toManaged(self.base.allocator);
|
||||
var managed_code = fn_data.code.toManaged(self.base.allocator);
|
||||
const fn_data = &decl.fn_link.wasm;
|
||||
fn_data.functype.items.len = 0;
|
||||
fn_data.code.items.len = 0;
|
||||
fn_data.idx_refs.items.len = 0;
|
||||
|
||||
var context = codegen.Context{
|
||||
.gpa = self.base.allocator,
|
||||
.values = .{},
|
||||
.code = managed_code,
|
||||
.func_type_data = managed_functype,
|
||||
.code = fn_data.code.toManaged(self.base.allocator),
|
||||
.func_type_data = fn_data.functype.toManaged(self.base.allocator),
|
||||
.decl = decl,
|
||||
.err_msg = undefined,
|
||||
.locals = .{},
|
||||
@ -132,7 +210,7 @@ pub fn updateDecl(self: *Wasm, module: *Module, decl: *Module.Decl) !void {
|
||||
defer context.deinit();
|
||||
|
||||
// generate the 'code' section for the function declaration
|
||||
context.gen() catch |err| switch (err) {
|
||||
const result = context.gen(typed_value) catch |err| switch (err) {
|
||||
error.CodegenFail => {
|
||||
decl.analysis = .codegen_failure;
|
||||
try module.failed_decls.put(module.gpa, decl, context.err_msg);
|
||||
@ -141,15 +219,38 @@ pub fn updateDecl(self: *Wasm, module: *Module, decl: *Module.Decl) !void {
|
||||
else => |e| return err,
|
||||
};
|
||||
|
||||
// as locals are patched afterwards, the offsets of funcidx's are off,
|
||||
// here we update them to correct them
|
||||
for (decl.fn_link.wasm.?.idx_refs.items) |*func| {
|
||||
// For each local, add 6 bytes (count + type)
|
||||
func.offset += @intCast(u32, context.locals.items.len * 6);
|
||||
const code: []const u8 = switch (result) {
|
||||
.appended => @as([]const u8, context.code.items),
|
||||
.externally_managed => |payload| payload,
|
||||
};
|
||||
|
||||
fn_data.code = context.code.toUnmanaged();
|
||||
fn_data.functype = context.func_type_data.toUnmanaged();
|
||||
|
||||
const block = &decl.link.wasm;
|
||||
if (typed_value.ty.zigTypeTag() == .Fn) {
|
||||
// as locals are patched afterwards, the offsets of funcidx's are off,
|
||||
// here we update them to correct them
|
||||
for (fn_data.idx_refs.items) |*func| {
|
||||
// For each local, add 6 bytes (count + type)
|
||||
func.offset += @intCast(u32, context.locals.items.len * 6);
|
||||
}
|
||||
} else {
|
||||
block.size = @intCast(u32, code.len);
|
||||
block.data = code.ptr;
|
||||
}
|
||||
|
||||
fn_data.functype = context.func_type_data.toUnmanaged();
|
||||
fn_data.code = context.code.toUnmanaged();
|
||||
// If we're updating an existing decl, unplug it first
|
||||
// to avoid infinite loops due to earlier links
|
||||
block.unplug();
|
||||
|
||||
if (self.last_block) |last| {
|
||||
if (last != block) {
|
||||
last.next = block;
|
||||
block.prev = last;
|
||||
}
|
||||
}
|
||||
self.last_block = block;
|
||||
}
|
||||
|
||||
pub fn updateDeclExports(
|
||||
@ -160,18 +261,34 @@ pub fn updateDeclExports(
|
||||
) !void {}
|
||||
|
||||
pub fn freeDecl(self: *Wasm, decl: *Module.Decl) void {
|
||||
// TODO: remove this assert when non-function Decls are implemented
|
||||
assert(decl.typed_value.most_recent.typed_value.ty.zigTypeTag() == .Fn);
|
||||
const func_idx = self.getFuncidx(decl).?;
|
||||
switch (decl.typed_value.most_recent.typed_value.val.tag()) {
|
||||
.function => _ = self.funcs.swapRemove(func_idx),
|
||||
.extern_fn => _ = self.ext_funcs.swapRemove(func_idx),
|
||||
else => unreachable,
|
||||
if (self.getFuncidx(decl)) |func_idx| {
|
||||
switch (decl.typed_value.most_recent.typed_value.val.tag()) {
|
||||
.function => _ = self.funcs.swapRemove(func_idx),
|
||||
.extern_fn => _ = self.ext_funcs.swapRemove(func_idx),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
decl.fn_link.wasm.?.functype.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.?.code.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.?.idx_refs.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm = null;
|
||||
const block = &decl.link.wasm;
|
||||
|
||||
if (self.last_block == block) {
|
||||
self.last_block = block.prev;
|
||||
}
|
||||
|
||||
block.unplug();
|
||||
|
||||
self.offset_table_free_list.append(self.base.allocator, decl.link.wasm.offset_index) catch {};
|
||||
_ = self.symbols.swapRemove(block.symbol_index);
|
||||
|
||||
// update symbol_index as we swap removed the last symbol into the removed's position
|
||||
if (block.symbol_index < self.symbols.items.len)
|
||||
self.symbols.items[block.symbol_index].link.wasm.symbol_index = block.symbol_index;
|
||||
|
||||
block.init = false;
|
||||
|
||||
decl.fn_link.wasm.functype.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.code.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.idx_refs.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm = undefined;
|
||||
}
|
||||
|
||||
pub fn flush(self: *Wasm, comp: *Compilation) !void {
|
||||
@ -188,6 +305,25 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void {
|
||||
|
||||
const file = self.base.file.?;
|
||||
const header_size = 5 + 1;
|
||||
// ptr_width in bytes
|
||||
const ptr_width = self.base.options.target.cpu.arch.ptrBitWidth() / 8;
|
||||
// The size of the offset table in bytes
|
||||
// The table contains all decl's with its corresponding offset into
|
||||
// the 'data' section
|
||||
const offset_table_size = @intCast(u32, self.offset_table.items.len * ptr_width);
|
||||
|
||||
// The size of the data, this together with `offset_table_size` amounts to the
|
||||
// total size of the 'data' section
|
||||
var first_decl: ?*DeclBlock = null;
|
||||
const data_size: u32 = if (self.last_block) |last| blk: {
|
||||
var size = last.size;
|
||||
var cur = last;
|
||||
while (cur.prev) |prev| : (cur = prev) {
|
||||
size += prev.size;
|
||||
}
|
||||
first_decl = cur;
|
||||
break :blk size;
|
||||
} else 0;
|
||||
|
||||
// No need to rewrite the magic/version header
|
||||
try file.setEndPos(@sizeOf(@TypeOf(wasm.magic ++ wasm.version)));
|
||||
@ -199,8 +335,8 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void {
|
||||
|
||||
// extern functions are defined in the wasm binary first through the `import`
|
||||
// section, so define their func types first
|
||||
for (self.ext_funcs.items) |decl| try file.writeAll(decl.fn_link.wasm.?.functype.items);
|
||||
for (self.funcs.items) |decl| try file.writeAll(decl.fn_link.wasm.?.functype.items);
|
||||
for (self.ext_funcs.items) |decl| try file.writeAll(decl.fn_link.wasm.functype.items);
|
||||
for (self.funcs.items) |decl| try file.writeAll(decl.fn_link.wasm.functype.items);
|
||||
|
||||
try writeVecSectionHeader(
|
||||
file,
|
||||
@ -257,6 +393,31 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void {
|
||||
);
|
||||
}
|
||||
|
||||
// Memory section
|
||||
if (data_size != 0) {
|
||||
const header_offset = try reserveVecSectionHeader(file);
|
||||
const writer = file.writer();
|
||||
|
||||
try leb.writeULEB128(writer, @as(u32, 0));
|
||||
// Calculate the amount of memory pages are required and write them.
|
||||
// Wasm uses 64kB page sizes. Round up to ensure the data segments fit into the memory
|
||||
try leb.writeULEB128(
|
||||
writer,
|
||||
try std.math.divCeil(
|
||||
u32,
|
||||
offset_table_size + data_size,
|
||||
std.wasm.page_size,
|
||||
),
|
||||
);
|
||||
try writeVecSectionHeader(
|
||||
file,
|
||||
header_offset,
|
||||
.memory,
|
||||
@intCast(u32, (try file.getPos()) - header_offset - header_size),
|
||||
@as(u32, 1), // wasm currently only supports 1 linear memory segment
|
||||
);
|
||||
}
|
||||
|
||||
// Export section
|
||||
if (self.base.options.module) |module| {
|
||||
const header_offset = try reserveVecSectionHeader(file);
|
||||
@ -281,6 +442,16 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void {
|
||||
count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// export memory if size is not 0
|
||||
if (data_size != 0) {
|
||||
try leb.writeULEB128(writer, @intCast(u32, "memory".len));
|
||||
try writer.writeAll("memory");
|
||||
try writer.writeByte(wasm.externalKind(.memory));
|
||||
try leb.writeULEB128(writer, @as(u32, 0)); // only 1 memory 'object' can exist
|
||||
count += 1;
|
||||
}
|
||||
|
||||
try writeVecSectionHeader(
|
||||
file,
|
||||
header_offset,
|
||||
@ -295,7 +466,7 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void {
|
||||
const header_offset = try reserveVecSectionHeader(file);
|
||||
const writer = file.writer();
|
||||
for (self.funcs.items) |decl| {
|
||||
const fn_data = &decl.fn_link.wasm.?;
|
||||
const fn_data = &decl.fn_link.wasm;
|
||||
|
||||
// Write the already generated code to the file, inserting
|
||||
// function indexes where required.
|
||||
@ -320,6 +491,51 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void {
|
||||
@intCast(u32, self.funcs.items.len),
|
||||
);
|
||||
}
|
||||
|
||||
// Data section
|
||||
if (data_size != 0) {
|
||||
const header_offset = try reserveVecSectionHeader(file);
|
||||
const writer = file.writer();
|
||||
var len: u32 = 0;
|
||||
// index to memory section (currently, there can only be 1 memory section in wasm)
|
||||
try leb.writeULEB128(writer, @as(u32, 0));
|
||||
|
||||
// offset into data section
|
||||
try writer.writeByte(wasm.opcode(.i32_const));
|
||||
try leb.writeILEB128(writer, @as(i32, 0));
|
||||
try writer.writeByte(wasm.opcode(.end));
|
||||
|
||||
const total_size = offset_table_size + data_size;
|
||||
|
||||
// offset table + data size
|
||||
try leb.writeULEB128(writer, total_size);
|
||||
|
||||
// fill in the offset table and the data segments
|
||||
const file_offset = try file.getPos();
|
||||
var cur = first_decl;
|
||||
var data_offset = offset_table_size;
|
||||
while (cur) |cur_block| : (cur = cur_block.next) {
|
||||
if (cur_block.size == 0) continue;
|
||||
std.debug.assert(cur_block.init);
|
||||
|
||||
const offset = (cur_block.offset_index) * ptr_width;
|
||||
var buf: [4]u8 = undefined;
|
||||
std.mem.writeIntLittle(u32, &buf, data_offset);
|
||||
|
||||
try file.pwriteAll(&buf, file_offset + offset);
|
||||
try file.pwriteAll(cur_block.data[0..cur_block.size], file_offset + data_offset);
|
||||
data_offset += cur_block.size;
|
||||
}
|
||||
|
||||
try file.seekTo(file_offset + data_offset);
|
||||
try writeVecSectionHeader(
|
||||
file,
|
||||
header_offset,
|
||||
.data,
|
||||
@intCast(u32, (file_offset + data_offset) - header_offset - header_size),
|
||||
@intCast(u32, 1), // only 1 data section
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn linkWithLLD(self: *Wasm, comp: *Compilation) !void {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user