stage2: semaDecl properly analyzes the decl block

Also flattened out Decl TypedValue fields into
ty, val, has_tv
and add relevant fields to Decl for alignment and link section.
This commit is contained in:
Andrew Kelley 2021-04-27 18:36:12 -07:00
parent fa6bb4b662
commit f86469bc5e
16 changed files with 383 additions and 293 deletions

View File

@ -1,5 +1,11 @@
* namespace decls table can't reference ZIR memory because it can get modified on updates
- change it for astgen worker to compare old and new ZIR, updating existing
namespaces & decls, and creating a changelist.
* reimplement semaDecl
* use a hash map for instructions because the array is too big
- no, actually modify the Zir.Inst.Ref strategy so that each decl gets
their indexes starting at 0 so that we can use an array to store Sema
results rather than a map.
* keep track of file dependencies/dependants
* unload files from memory when a dependency is dropped
@ -101,39 +107,6 @@ fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool {
.aligned_var_decl => return mod.astgenAndSemaVarDecl(decl, tree.*, tree.alignedVarDecl(decl_node)),
.@"comptime" => {
decl.analysis = .in_progress;
// A comptime decl does not store any value so we can just deinit this arena after analysis is done.
var analysis_arena = std.heap.ArenaAllocator.init(mod.gpa);
defer analysis_arena.deinit();
var sema: Sema = .{
.mod = mod,
.gpa = mod.gpa,
.arena = &analysis_arena.allocator,
.code = code,
.inst_map = try analysis_arena.allocator.alloc(*ir.Inst, code.instructions.len),
.owner_decl = decl,
.namespace = decl.namespace,
.func = null,
.owner_func = null,
.param_inst_list = &.{},
};
var block_scope: Scope.Block = .{
.parent = null,
.sema = &sema,
.src_decl = decl,
.instructions = .{},
.inlining = null,
.is_comptime = true,
};
defer block_scope.instructions.deinit(mod.gpa);
_ = try sema.root(&block_scope);
decl.analysis = .complete;
decl.generation = mod.generation;
return true;
},
.@"usingnamespace" => {
decl.analysis = .in_progress;
@ -424,3 +397,78 @@ pub fn analyzeNamespace(
};
}
if (align_inst != .none) {
return mod.fail(&namespace.base, .{ .node_abs = decl_node }, "TODO: implement decls with align()", .{});
}
if (section_inst != .none) {
return mod.fail(&namespace.base, .{ .node_abs = decl_node }, "TODO: implement decls with linksection()", .{});
}
/// Trailing:
/// 0. `EmitH` if `module.emit_h != null`.
/// 1. A per-Decl link object. Represents the position of the code in the output file.
/// This is populated regardless of semantic analysis and code generation.
/// Depending on the target, will be one of:
/// * Elf.TextBlock
/// * Coff.TextBlock
/// * MachO.TextBlock
/// * C.DeclBlock
/// * Wasm.DeclBlock
/// * void
/// 2. If it is a function, a per-Decl link function object. Represents the
/// function in the linked output file, if the `Decl` is a function.
/// This is stored here and not in `Fn` because `Decl` survives across updates but
/// `Fn` does not. Depending on the target, will be one of:
/// * Elf.SrcFn
/// * Coff.SrcFn
/// * MachO.SrcFn
/// * C.FnBlock
/// * Wasm.FnData
/// * SpirV.FnData
/// This name is relative to the containing namespace of the decl.
/// The memory is owned by the containing File ZIR.
pub fn getName(decl: Decl) ?[:0]const u8 {
const zir = decl.namespace.file_scope.zir;
const name_index = zir.extra[decl.zir_decl_index + 4];
if (name_index <= 1) return null;
return zir.nullTerminatedString(name_index);
}
extra_index += @boolToInt(has_align);
extra_index += @boolToInt(has_section);
/// Contains un-analyzed ZIR instructions generated from Zig source AST.
/// Even after we finish analysis, the ZIR is kept in memory, so that
/// comptime and inline function calls can happen.
/// Parameter names are stored here so that they may be referenced for debug info,
/// without having source code bytes loaded into memory.
/// The number of parameters is determined by referring to the type.
/// The first N elements of `extra` are indexes into `string_bytes` to
/// a null-terminated string.
/// This memory is managed with gpa, must be freed when the function is freed.
zir: Zir,
pub fn root(sema: *Sema, root_block: *Scope.Block) !Zir.Inst.Index {
const inst_data = sema.code.instructions.items(.data)[0].pl_node;
const extra = sema.code.extraData(Zir.Inst.Block, inst_data.payload_index);
const root_body = sema.code.extra[extra.end..][0..extra.data.body_len];
return sema.analyzeBody(root_block, root_body);
}
pub fn rootAsRef(sema: *Sema, root_block: *Scope.Block) !Zir.Inst.Ref {
const break_inst = try sema.root(root_block);
return sema.code.instructions.items(.data)[break_inst].@"break".operand;
}
/// Assumes that `root_block` ends with `break_inline`.
pub fn rootAsType(sema: *Sema, root_block: *Scope.Block) !Type {
assert(root_block.is_comptime);
const zir_inst_ref = try sema.rootAsRef(root_block);
// Source location is unneeded because resolveConstValue must have already
// been successfully called when coercing the value to a type, from the
// result location.
return sema.resolveType(root_block, .unneeded, zir_inst_ref);
}

View File

@ -1890,7 +1890,8 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
if (build_options.omit_stage2)
@panic("sadly stage2 is omitted from this build to save memory on the CI server");
const module = self.bin_file.options.module.?;
if (decl.typed_value.most_recent.typed_value.val.castTag(.function)) |payload| {
assert(decl.has_tv);
if (decl.val.castTag(.function)) |payload| {
const func = payload.data;
switch (func.state) {
.queued => module.analyzeFnBody(decl, func) catch |err| switch (err) {
@ -1907,8 +1908,8 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
}
// Here we tack on additional allocations to the Decl's arena. The allocations
// are lifetime annotations in the ZIR.
var decl_arena = decl.typed_value.most_recent.arena.?.promote(module.gpa);
defer decl.typed_value.most_recent.arena.?.* = decl_arena.state;
var decl_arena = decl.value_arena.?.promote(module.gpa);
defer decl.value_arena.?.* = decl_arena.state;
log.debug("analyze liveness of {s}", .{decl.name});
try liveness.analyze(module.gpa, &decl_arena.allocator, func.body);
@ -1918,9 +1919,9 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
}
log.debug("calling updateDecl on '{s}', type={}", .{
decl.name, decl.typed_value.most_recent.typed_value.ty,
decl.name, decl.ty,
});
assert(decl.typed_value.most_recent.typed_value.ty.hasCodeGenBits());
assert(decl.ty.hasCodeGenBits());
self.bin_file.updateDecl(module, decl) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
@ -1960,7 +1961,6 @@ pub fn performAllTheWork(self: *Compilation) error{ TimerUnsupported, OutOfMemor
const module = self.bin_file.options.module.?;
const emit_h = module.emit_h.?;
_ = try emit_h.decl_table.getOrPut(module.gpa, decl);
const tv = decl.typed_value.most_recent.typed_value;
const decl_emit_h = decl.getEmitH(module);
const fwd_decl = &decl_emit_h.fwd_decl;
fwd_decl.shrinkRetainingCapacity(0);

View File

@ -154,17 +154,29 @@ pub const DeclPlusEmitH = struct {
};
pub const Decl = struct {
/// This name is relative to the containing namespace of the decl. It uses
/// null-termination to save bytes, since there can be a lot of decls in a
/// compilation. The null byte is not allowed in symbol names, because
/// executable file formats use null-terminated strings for symbol names.
/// This name is relative to the containing namespace of the decl.
/// All Decls have names, even values that are not bound to a zig namespace.
/// This is necessary for mapping them to an address in the output file.
/// Memory owned by this decl, using Module's allocator.
/// Memory is owned by this decl, using Module's allocator.
/// Note that this cannot be changed to reference ZIR memory because when
/// ZIR updates, it would change the Decl name, but we still need the previous
/// name to delete the Decl from the hash maps it has been inserted into.
name: [*:0]const u8,
/// The most recent Type of the Decl after a successful semantic analysis.
/// Populated when `has_tv`.
ty: Type,
/// The most recent Value of the Decl after a successful semantic analysis.
/// Populated when `has_tv`.
val: Value,
/// Populated when `has_tv`.
align_val: Value,
/// Populated when `has_tv`.
linksection_val: Value,
/// The memory for ty, val, align_val, linksection_val.
/// If this is `null` then there is no memory management needed.
value_arena: ?*std.heap.ArenaAllocator.State = null,
/// The direct parent namespace of the Decl.
/// Reference to externally owned memory.
/// This is `null` for the Decl that represents a `File`.
namespace: *Scope.Namespace,
/// An integer that can be checked against the corresponding incrementing
@ -174,12 +186,11 @@ pub const Decl = struct {
/// The AST node index of this declaration.
/// Must be recomputed when the corresponding source file is modified.
src_node: ast.Node.Index,
/// Index to ZIR `extra` array to the block of ZIR code that encodes the Decl expression.
zir_block_index: Zir.Inst.Index,
zir_align_ref: Zir.Inst.Ref = .none,
zir_linksection_ref: Zir.Inst.Ref = .none,
/// The most recent value of the Decl after a successful semantic analysis.
typed_value: union(enum) {
never_succeeded: void,
most_recent: TypedValue.Managed,
},
/// Represents the "shallow" analysis status. For example, for decls that are functions,
/// the function type is analyzed with this set to `in_progress`, however, the semantic
/// analysis of the function body is performed with this value set to `success`. Functions
@ -214,11 +225,15 @@ pub const Decl = struct {
/// to require re-analysis.
outdated,
},
/// Whether `typed_value`, `align_val`, and `linksection_val` are populated.
has_tv: bool,
/// This flag is set when this Decl is added to `Module.deletion_set`, and cleared
/// when removed.
deletion_flag: bool,
/// Whether the corresponding AST decl has a `pub` keyword.
is_pub: bool,
/// Whether the corresponding AST decl has a `export` keyword.
is_exported: bool,
/// Represents the position of the code in the output file.
/// This is populated regardless of semantic analysis and code generation.
@ -231,6 +246,9 @@ pub const Decl = struct {
/// to save on memory usage.
fn_link: link.File.LinkFn,
/// This is stored separately in addition to being available via `zir_decl_index`
/// because when the underlying ZIR code is updated, this field is used to find
/// out if anything changed.
contents_hash: std.zig.SrcHash,
/// The shallow set of other decls whose typed_value could possibly change if this Decl's
@ -247,12 +265,12 @@ pub const Decl = struct {
pub fn destroy(decl: *Decl, module: *Module) void {
const gpa = module.gpa;
gpa.free(mem.spanZ(decl.name));
if (decl.typedValueManaged()) |tvm| {
if (tvm.typed_value.val.castTag(.function)) |payload| {
if (decl.has_tv) {
if (decl.val.castTag(.function)) |payload| {
const func = payload.data;
func.deinit(gpa);
}
tvm.deinit(gpa);
if (decl.value_arena) |a| a.promote(gpa).deinit();
}
decl.dependants.deinit(gpa);
decl.dependencies.deinit(gpa);
@ -311,9 +329,12 @@ pub const Decl = struct {
return buffer.toOwnedSlice();
}
pub fn typedValue(decl: *Decl) error{AnalysisFail}!TypedValue {
const tvm = decl.typedValueManaged() orelse return error.AnalysisFail;
return tvm.typed_value;
pub fn typedValue(decl: Decl) error{AnalysisFail}!TypedValue {
if (!decl.has_tv) return error.AnalysisFail;
return TypedValue{
.ty = decl.ty,
.val = decl.val,
};
}
pub fn value(decl: *Decl) error{AnalysisFail}!Value {
@ -334,19 +355,12 @@ pub const Decl = struct {
mem.spanZ(decl.name),
@tagName(decl.analysis),
});
if (decl.typedValueManaged()) |tvm| {
std.debug.print(" ty={} val={}", .{ tvm.typed_value.ty, tvm.typed_value.val });
if (decl.has_tv) {
std.debug.print(" ty={} val={}", .{ decl.ty, decl.val });
}
std.debug.print("\n", .{});
}
pub fn typedValueManaged(decl: *Decl) ?*TypedValue.Managed {
switch (decl.typed_value) {
.most_recent => |*x| return x,
.never_succeeded => return null,
}
}
pub fn getFileScope(decl: Decl) *Scope.File {
return decl.namespace.file_scope;
}
@ -475,16 +489,6 @@ pub const EnumFull = struct {
/// the `Decl` only, with a `Value` tag of `extern_fn`.
pub const Fn = struct {
owner_decl: *Decl,
/// Contains un-analyzed ZIR instructions generated from Zig source AST.
/// Even after we finish analysis, the ZIR is kept in memory, so that
/// comptime and inline function calls can happen.
/// Parameter names are stored here so that they may be referenced for debug info,
/// without having source code bytes loaded into memory.
/// The number of parameters is determined by referring to the type.
/// The first N elements of `extra` are indexes into `string_bytes` to
/// a null-terminated string.
/// This memory is managed with gpa, must be freed when the function is freed.
zir: Zir,
/// undefined unless analysis state is `success`.
body: ir.Body,
state: Analysis,
@ -508,9 +512,7 @@ pub const Fn = struct {
ir.dumpFn(mod, func);
}
pub fn deinit(func: *Fn, gpa: *Allocator) void {
func.zir.deinit(gpa);
}
pub fn deinit(func: *Fn, gpa: *Allocator) void {}
};
pub const Var = struct {
@ -3111,7 +3113,7 @@ pub fn ensureDeclAnalyzed(mod: *Module, decl: *Decl) InnerError!void {
if (subsequent_analysis) {
// We may need to chase the dependants and re-analyze them.
// However, if the decl is a function, and the type is the same, we do not need to.
if (type_changed or decl.typed_value.most_recent.typed_value.val.tag() != .function) {
if (type_changed or decl.ty.zigTypeTag() != .Fn) {
for (decl.dependants.items()) |entry| {
const dep = entry.key;
switch (dep.analysis) {
@ -3162,13 +3164,20 @@ pub fn semaFile(mod: *Module, file: *Scope.File) InnerError!void {
.namespace = &tmp_namespace,
.generation = mod.generation,
.src_node = 0, // the root AST node for the file
.typed_value = .never_succeeded,
.analysis = .in_progress,
.deletion_flag = false,
.is_pub = true,
.is_exported = false,
.link = undefined, // don't try to codegen this
.fn_link = undefined, // not a function
.contents_hash = undefined, // top-level struct has no contents hash
.zir_block_index = undefined,
.has_tv = false,
.ty = undefined,
.val = undefined,
.align_val = undefined,
.linksection_val = undefined,
};
defer top_decl.dependencies.deinit(gpa);
@ -3223,7 +3232,56 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
const tracy = trace(@src());
defer tracy.end();
@panic("TODO implement semaDecl");
const gpa = mod.gpa;
decl.analysis = .in_progress;
var analysis_arena = std.heap.ArenaAllocator.init(gpa);
defer analysis_arena.deinit();
const zir = decl.namespace.file_scope.zir;
var sema: Sema = .{
.mod = mod,
.gpa = gpa,
.arena = &analysis_arena.allocator,
.code = zir,
.inst_map = try analysis_arena.allocator.alloc(*ir.Inst, zir.instructions.len),
.owner_decl = decl,
.namespace = decl.namespace,
.func = null,
.owner_func = null,
.param_inst_list = &.{},
};
var block_scope: Scope.Block = .{
.parent = null,
.sema = &sema,
.src_decl = decl,
.instructions = .{},
.inlining = null,
.is_comptime = true,
};
defer block_scope.instructions.deinit(gpa);
const inst_data = zir.instructions.items(.data)[decl.zir_block_index].pl_node;
const extra = zir.extraData(Zir.Inst.Block, inst_data.payload_index);
const body = zir.extra[extra.end..][0..extra.data.body_len];
const break_index = try sema.analyzeBody(&block_scope, body);
if (decl.zir_align_ref != .none) {
@panic("TODO implement decl align");
}
if (decl.zir_linksection_ref != .none) {
@panic("TODO implement decl linksection");
}
decl.analysis = .complete;
decl.generation = mod.generation;
// TODO inspect the type and return a proper type_changed result
@breakpoint();
return true;
}
/// Returns the depender's index of the dependee.
@ -3489,6 +3547,7 @@ fn scanDecl(
const gpa = mod.gpa;
const zir = namespace.file_scope.zir;
const decl_block_inst_data = zir.instructions.items(.data)[decl_index].pl_node;
const decl_node = parent_decl.relativeToNodeIndex(decl_block_inst_data.src_node);
@ -3504,13 +3563,9 @@ fn scanDecl(
const decl_key = decl_name orelse &contents_hash;
const gop = try namespace.decls.getOrPut(gpa, decl_key);
if (!gop.found_existing) {
if (align_inst != .none) {
return mod.fail(&namespace.base, .{ .node_abs = decl_node }, "TODO: implement decls with align()", .{});
}
if (section_inst != .none) {
return mod.fail(&namespace.base, .{ .node_abs = decl_node }, "TODO: implement decls with linksection()", .{});
}
const new_decl = try mod.createNewDecl(namespace, decl_key, decl_node, contents_hash);
const new_decl = try mod.allocateNewDecl(namespace, decl_node);
new_decl.contents_hash = contents_hash;
new_decl.name = try gpa.dupeZ(u8, decl_key);
// Update the key reference to the longer-lived memory.
gop.entry.key = &new_decl.contents_hash;
gop.entry.value = new_decl;
@ -3524,7 +3579,11 @@ fn scanDecl(
if (want_analysis) {
mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
}
new_decl.is_exported = is_exported;
new_decl.is_pub = is_pub;
new_decl.zir_block_index = decl_index;
new_decl.zir_align_ref = align_inst;
new_decl.zir_linksection_ref = section_inst;
return;
}
const decl = gop.entry.value;
@ -3532,6 +3591,11 @@ fn scanDecl(
// have been re-ordered.
const prev_src_node = decl.src_node;
decl.src_node = decl_node;
decl.is_pub = is_pub;
decl.is_exported = is_exported;
decl.zir_block_index = decl_index;
decl.zir_align_ref = align_inst;
decl.zir_linksection_ref = section_inst;
if (deleted_decls.swapRemove(decl) == null) {
if (true) {
@panic("TODO I think this code path is unreachable; should be caught by AstGen.");
@ -3681,64 +3745,70 @@ pub fn analyzeFnBody(mod: *Module, decl: *Decl, func: *Fn) !void {
defer tracy.end();
// Use the Decl's arena for function memory.
var arena = decl.typed_value.most_recent.arena.?.promote(mod.gpa);
defer decl.typed_value.most_recent.arena.?.* = arena.state;
var arena = decl.value_arena.?.promote(mod.gpa);
defer decl.value_arena.?.* = arena.state;
const fn_ty = decl.typed_value.most_recent.typed_value.ty;
const fn_ty = decl.ty;
const param_inst_list = try mod.gpa.alloc(*ir.Inst, fn_ty.fnParamLen());
defer mod.gpa.free(param_inst_list);
for (param_inst_list) |*param_inst, param_index| {
const param_type = fn_ty.fnParamType(param_index);
const name = func.zir.nullTerminatedString(func.zir.extra[param_index]);
const arg_inst = try arena.allocator.create(ir.Inst.Arg);
arg_inst.* = .{
.base = .{
.tag = .arg,
.ty = param_type,
.src = .unneeded,
},
.name = name,
};
param_inst.* = &arg_inst.base;
var f = false;
if (f) {
return error.AnalysisFail;
}
@panic("TODO reimplement analyzeFnBody now that ZIR is whole-file");
var sema: Sema = .{
.mod = mod,
.gpa = mod.gpa,
.arena = &arena.allocator,
.code = func.zir,
.inst_map = try mod.gpa.alloc(*ir.Inst, func.zir.instructions.len),
.owner_decl = decl,
.namespace = decl.namespace,
.func = func,
.owner_func = func,
.param_inst_list = param_inst_list,
};
defer mod.gpa.free(sema.inst_map);
//for (param_inst_list) |*param_inst, param_index| {
// const param_type = fn_ty.fnParamType(param_index);
// const name = func.zir.nullTerminatedString(func.zir.extra[param_index]);
// const arg_inst = try arena.allocator.create(ir.Inst.Arg);
// arg_inst.* = .{
// .base = .{
// .tag = .arg,
// .ty = param_type,
// .src = .unneeded,
// },
// .name = name,
// };
// param_inst.* = &arg_inst.base;
//}
var inner_block: Scope.Block = .{
.parent = null,
.sema = &sema,
.src_decl = decl,
.instructions = .{},
.inlining = null,
.is_comptime = false,
};
defer inner_block.instructions.deinit(mod.gpa);
//var sema: Sema = .{
// .mod = mod,
// .gpa = mod.gpa,
// .arena = &arena.allocator,
// .code = func.zir,
// .inst_map = try mod.gpa.alloc(*ir.Inst, func.zir.instructions.len),
// .owner_decl = decl,
// .namespace = decl.namespace,
// .func = func,
// .owner_func = func,
// .param_inst_list = param_inst_list,
//};
//defer mod.gpa.free(sema.inst_map);
// AIR currently requires the arg parameters to be the first N instructions
try inner_block.instructions.appendSlice(mod.gpa, param_inst_list);
//var inner_block: Scope.Block = .{
// .parent = null,
// .sema = &sema,
// .src_decl = decl,
// .instructions = .{},
// .inlining = null,
// .is_comptime = false,
//};
//defer inner_block.instructions.deinit(mod.gpa);
func.state = .in_progress;
log.debug("set {s} to in_progress", .{decl.name});
//// AIR currently requires the arg parameters to be the first N instructions
//try inner_block.instructions.appendSlice(mod.gpa, param_inst_list);
_ = try sema.root(&inner_block);
//func.state = .in_progress;
//log.debug("set {s} to in_progress", .{decl.name});
const instructions = try arena.allocator.dupe(*ir.Inst, inner_block.instructions.items);
func.state = .success;
func.body = .{ .instructions = instructions };
log.debug("set {s} to success", .{decl.name});
//_ = try sema.root(&inner_block);
//const instructions = try arena.allocator.dupe(*ir.Inst, inner_block.instructions.items);
//func.state = .success;
//func.body = .{ .instructions = instructions };
//log.debug("set {s} to success", .{decl.name});
}
fn markOutdatedDecl(mod: *Module, decl: *Decl) !void {
@ -3756,12 +3826,7 @@ fn markOutdatedDecl(mod: *Module, decl: *Decl) !void {
decl.analysis = .outdated;
}
fn allocateNewDecl(
mod: *Module,
namespace: *Scope.Namespace,
src_node: ast.Node.Index,
contents_hash: std.zig.SrcHash,
) !*Decl {
fn allocateNewDecl(mod: *Module, namespace: *Scope.Namespace, src_node: ast.Node.Index) !*Decl {
// If we have emit-h then we must allocate a bigger structure to store the emit-h state.
const new_decl: *Decl = if (mod.emit_h != null) blk: {
const parent_struct = try mod.gpa.create(DeclPlusEmitH);
@ -3776,10 +3841,15 @@ fn allocateNewDecl(
.name = "",
.namespace = namespace,
.src_node = src_node,
.typed_value = .{ .never_succeeded = {} },
.has_tv = false,
.ty = undefined,
.val = undefined,
.align_val = undefined,
.linksection_val = undefined,
.analysis = .unreferenced,
.deletion_flag = false,
.contents_hash = contents_hash,
.contents_hash = undefined,
.zir_block_index = undefined,
.link = switch (mod.comp.bin_file.tag) {
.coff => .{ .coff = link.File.Coff.TextBlock.empty },
.elf => .{ .elf = link.File.Elf.TextBlock.empty },
@ -3798,23 +3868,11 @@ fn allocateNewDecl(
},
.generation = 0,
.is_pub = false,
.is_exported = false,
};
return new_decl;
}
fn createNewDecl(
mod: *Module,
namespace: *Scope.Namespace,
decl_name: []const u8,
src_node: ast.Node.Index,
contents_hash: std.zig.SrcHash,
) !*Decl {
const new_decl = try mod.allocateNewDecl(namespace, src_node, contents_hash);
errdefer mod.gpa.destroy(new_decl);
new_decl.name = try mem.dupeZ(mod.gpa, u8, decl_name);
return new_decl;
}
/// Get error value for error tag `name`.
pub fn getErrorValue(mod: *Module, name: []const u8) !std.StringHashMapUnmanaged(ErrorInt).Entry {
const gop = try mod.global_error_set.getOrPut(mod.gpa, name);
@ -3837,10 +3895,9 @@ pub fn analyzeExport(
exported_decl: *Decl,
) !void {
try mod.ensureDeclAnalyzed(exported_decl);
const typed_value = exported_decl.typed_value.most_recent.typed_value;
switch (typed_value.ty.zigTypeTag()) {
switch (exported_decl.ty.zigTypeTag()) {
.Fn => {},
else => return mod.fail(scope, src, "unable to export type '{}'", .{typed_value.ty}),
else => return mod.fail(scope, src, "unable to export type '{}'", .{exported_decl.ty}),
}
try mod.decl_exports.ensureCapacity(mod.gpa, mod.decl_exports.items().len + 1);
@ -4017,20 +4074,18 @@ pub fn createAnonymousDecl(
) !*Decl {
const name_index = mod.getNextAnonNameIndex();
const scope_decl = scope.ownerDecl().?;
const name = try std.fmt.allocPrint(mod.gpa, "{s}__anon_{d}", .{ scope_decl.name, name_index });
defer mod.gpa.free(name);
const name = try std.fmt.allocPrintZ(mod.gpa, "{s}__anon_{d}", .{ scope_decl.name, name_index });
errdefer mod.gpa.free(name);
const namespace = scope_decl.namespace;
const src_hash: std.zig.SrcHash = undefined;
const new_decl = try mod.createNewDecl(namespace, name, scope_decl.src_node, src_hash);
const new_decl = try mod.allocateNewDecl(namespace, scope_decl.src_node);
new_decl.name = name;
const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
decl_arena_state.* = decl_arena.state;
new_decl.typed_value = .{
.most_recent = .{
.typed_value = typed_value,
.arena = decl_arena_state,
},
};
new_decl.ty = typed_value.ty;
new_decl.val = typed_value.val;
new_decl.has_tv = true;
new_decl.analysis = .complete;
new_decl.generation = mod.generation;

View File

@ -64,28 +64,6 @@ const LazySrcLoc = Module.LazySrcLoc;
const RangeSet = @import("RangeSet.zig");
const AstGen = @import("AstGen.zig");
pub fn root(sema: *Sema, root_block: *Scope.Block) !Zir.Inst.Index {
const inst_data = sema.code.instructions.items(.data)[0].pl_node;
const extra = sema.code.extraData(Zir.Inst.Block, inst_data.payload_index);
const root_body = sema.code.extra[extra.end..][0..extra.data.body_len];
return sema.analyzeBody(root_block, root_body);
}
pub fn rootAsRef(sema: *Sema, root_block: *Scope.Block) !Zir.Inst.Ref {
const break_inst = try sema.root(root_block);
return sema.code.instructions.items(.data)[break_inst].@"break".operand;
}
/// Assumes that `root_block` ends with `break_inline`.
pub fn rootAsType(sema: *Sema, root_block: *Scope.Block) !Type {
assert(root_block.is_comptime);
const zir_inst_ref = try sema.rootAsRef(root_block);
// Source location is unneeded because resolveConstValue must have already
// been successfully called when coercing the value to a type, from the
// result location.
return sema.resolveType(root_block, .unneeded, zir_inst_ref);
}
/// Returns only the result from the body that is specified.
/// Only appropriate to call when it is determined at comptime that this body
/// has no peers.
@ -997,7 +975,7 @@ fn zirRetPtr(
const src: LazySrcLoc = .{ .node_offset = @bitCast(i32, extended.operand) };
try sema.requireFunctionBlock(block, src);
const fn_ty = sema.func.?.owner_decl.typed_value.most_recent.typed_value.ty;
const fn_ty = sema.func.?.owner_decl.ty;
const ret_type = fn_ty.fnReturnType();
const ptr_type = try sema.mod.simplePtrType(sema.arena, ret_type, true, .One);
return block.addNoOp(src, ptr_type, .alloc);
@ -1022,7 +1000,7 @@ fn zirRetType(
const src: LazySrcLoc = .{ .node_offset = @bitCast(i32, extended.operand) };
try sema.requireFunctionBlock(block, src);
const fn_ty = sema.func.?.owner_decl.typed_value.most_recent.typed_value.ty;
const fn_ty = sema.func.?.owner_decl.ty;
const ret_type = fn_ty.fnReturnType();
return sema.mod.constType(sema.arena, src, ret_type);
}
@ -2022,6 +2000,9 @@ fn analyzeCall(
.block_inst = block_inst,
},
};
if (true) {
@panic("TODO reimplement inline fn call after whole-file astgen");
}
var inline_sema: Sema = .{
.mod = sema.mod,
.gpa = sema.mod.gpa,
@ -4949,7 +4930,7 @@ fn analyzeRet(
if (need_coercion) {
if (sema.func) |func| {
const fn_ty = func.owner_decl.typed_value.most_recent.typed_value.ty;
const fn_ty = func.owner_decl.ty;
const fn_ret_ty = fn_ty.fnReturnType();
const casted_operand = try sema.coerce(block, fn_ret_ty, operand, src);
if (fn_ret_ty.zigTypeTag() == .Void)

View File

@ -400,7 +400,8 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
const module_fn = typed_value.val.castTag(.function).?.data;
const fn_type = module_fn.owner_decl.typed_value.most_recent.typed_value.ty;
assert(module_fn.owner_decl.has_tv);
const fn_type = module_fn.owner_decl.ty;
var branch_stack = std.ArrayList(Branch).init(bin_file.allocator);
defer {
@ -1925,7 +1926,8 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
else
unreachable;
const return_type = func.owner_decl.typed_value.most_recent.typed_value.ty.fnReturnType();
assert(func.owner_decl.has_tv);
const return_type = func.owner_decl.ty.fnReturnType();
// First, push the return address, then jump; if noreturn, don't bother with the first step
// TODO: implement packed struct -> u16 at comptime and move the bitcast here
var instr = Instruction{ .condition = .always, .input0 = .immediate, .input1 = .zero, .modify_flags = false, .output = .jump, .command = .load16 };

View File

@ -190,8 +190,8 @@ pub const DeclGen = struct {
const decl = val.castTag(.decl_ref).?.data;
// Determine if we must pointer cast.
const decl_tv = decl.typed_value.most_recent.typed_value;
if (t.eql(decl_tv.ty)) {
assert(decl.has_tv);
if (t.eql(decl.ty)) {
try writer.print("&{s}", .{decl.name});
} else {
try writer.writeAll("(");
@ -326,12 +326,11 @@ pub const DeclGen = struct {
if (!is_global) {
try w.writeAll("static ");
}
const tv = dg.decl.typed_value.most_recent.typed_value;
try dg.renderType(w, tv.ty.fnReturnType());
try dg.renderType(w, dg.decl.ty.fnReturnType());
const decl_name = mem.span(dg.decl.name);
try w.print(" {s}(", .{decl_name});
const param_len = tv.ty.fnParamLen();
const is_var_args = tv.ty.fnIsVarArgs();
const param_len = dg.decl.ty.fnParamLen();
const is_var_args = dg.decl.ty.fnIsVarArgs();
if (param_len == 0 and !is_var_args)
try w.writeAll("void")
else {
@ -340,7 +339,7 @@ pub const DeclGen = struct {
if (index > 0) {
try w.writeAll(", ");
}
try dg.renderType(w, tv.ty.fnParamType(index));
try dg.renderType(w, dg.decl.ty.fnParamType(index));
try w.print(" a{d}", .{index});
}
}
@ -545,8 +544,10 @@ pub fn genDecl(o: *Object) !void {
const tracy = trace(@src());
defer tracy.end();
const tv = o.dg.decl.typed_value.most_recent.typed_value;
const tv: TypedValue = .{
.ty = o.dg.decl.ty,
.val = o.dg.decl.val,
};
if (tv.val.castTag(.function)) |func_payload| {
const is_global = o.dg.functionIsGlobal(tv);
const fwd_decl_writer = o.dg.fwd_decl.writer();
@ -589,7 +590,10 @@ pub fn genHeader(dg: *DeclGen) error{ AnalysisFail, OutOfMemory }!void {
const tracy = trace(@src());
defer tracy.end();
const tv = dg.decl.typed_value.most_recent.typed_value;
const tv: TypedValue = .{
.ty = dg.decl.ty,
.val = dg.decl.val,
};
const writer = dg.fwd_decl.writer();
switch (tv.ty.zigTypeTag()) {
@ -842,7 +846,7 @@ fn genCall(o: *Object, inst: *Inst.Call) !CValue {
else
unreachable;
const fn_ty = fn_decl.typed_value.most_recent.typed_value.ty;
const fn_ty = fn_decl.ty;
const ret_ty = fn_ty.fnReturnType();
const unused_result = inst.base.isUnused();
var result_local: CValue = .none;

View File

@ -325,17 +325,17 @@ pub const DeclGen = struct {
fn genDecl(self: *DeclGen) !void {
const decl = self.decl;
const typed_value = decl.typed_value.most_recent.typed_value;
assert(decl.has_tv);
log.debug("gen: {s} type: {}, value: {}", .{ decl.name, typed_value.ty, typed_value.val });
log.debug("gen: {s} type: {}, value: {}", .{ decl.name, decl.ty, decl.val });
if (typed_value.val.castTag(.function)) |func_payload| {
if (decl.val.castTag(.function)) |func_payload| {
const func = func_payload.data;
const llvm_func = try self.resolveLLVMFunction(func.owner_decl);
// This gets the LLVM values from the function and stores them in `self.args`.
const fn_param_len = func.owner_decl.typed_value.most_recent.typed_value.ty.fnParamLen();
const fn_param_len = func.owner_decl.ty.fnParamLen();
var args = try self.gpa.alloc(*const llvm.Value, fn_param_len);
for (args) |*arg, i| {
@ -368,7 +368,7 @@ pub const DeclGen = struct {
defer fg.deinit();
try fg.genBody(func.body);
} else if (typed_value.val.castTag(.extern_fn)) |extern_fn| {
} else if (decl.val.castTag(.extern_fn)) |extern_fn| {
_ = try self.resolveLLVMFunction(extern_fn.data);
} else {
_ = try self.resolveGlobalDecl(decl);
@ -380,7 +380,8 @@ pub const DeclGen = struct {
// TODO: do we want to store this in our own datastructure?
if (self.llvmModule().getNamedFunction(func.name)) |llvm_fn| return llvm_fn;
const zig_fn_type = func.typed_value.most_recent.typed_value.ty;
assert(func.has_tv);
const zig_fn_type = func.ty;
const return_type = zig_fn_type.fnReturnType();
const fn_param_len = zig_fn_type.fnParamLen();
@ -415,11 +416,11 @@ pub const DeclGen = struct {
// TODO: do we want to store this in our own datastructure?
if (self.llvmModule().getNamedGlobal(decl.name)) |val| return val;
const typed_value = decl.typed_value.most_recent.typed_value;
assert(decl.has_tv);
// TODO: remove this redundant `getLLVMType`, it is also called in `genTypedValue`.
const llvm_type = try self.getLLVMType(typed_value.ty);
const val = try self.genTypedValue(typed_value, null);
const llvm_type = try self.getLLVMType(decl.ty);
const val = try self.genTypedValue(.{ .ty = decl.ty, .val = decl.val }, null);
const global = self.llvmModule().addGlobal(llvm_type, decl.name);
llvm.setInitializer(global, val);
@ -688,7 +689,8 @@ pub const FuncGen = struct {
else
unreachable;
const zig_fn_type = fn_decl.typed_value.most_recent.typed_value.ty;
assert(fn_decl.has_tv);
const zig_fn_type = fn_decl.ty;
const llvm_fn = try self.dg.resolveLLVMFunction(fn_decl);
const num_args = inst.args.len;

View File

@ -591,7 +591,8 @@ pub const Context = struct {
}
fn genFunctype(self: *Context) InnerError!void {
const ty = self.decl.typed_value.most_recent.typed_value.ty;
assert(self.decl.has_tv);
const ty = self.decl.ty;
const writer = self.func_type_data.writer();
try writer.writeByte(wasm.function_type);

View File

@ -300,6 +300,7 @@ pub const File = struct {
/// May be called before or after updateDeclExports but must be called
/// after allocateDeclIndexes for any given Decl.
pub fn updateDecl(base: *File, module: *Module, decl: *Module.Decl) !void {
assert(decl.has_tv);
switch (base.tag) {
.coff => return @fieldParentPtr(Coff, "base", base).updateDecl(module, decl),
.elf => return @fieldParentPtr(Elf, "base", base).updateDecl(module, decl),
@ -311,6 +312,7 @@ pub const File = struct {
}
pub fn updateDeclLineNumber(base: *File, module: *Module, decl: *Module.Decl) !void {
assert(decl.has_tv);
switch (base.tag) {
.coff => return @fieldParentPtr(Coff, "base", base).updateDeclLineNumber(module, decl),
.elf => return @fieldParentPtr(Elf, "base", base).updateDeclLineNumber(module, decl),
@ -461,6 +463,7 @@ pub const File = struct {
decl: *Module.Decl,
exports: []const *Module.Export,
) !void {
assert(decl.has_tv);
switch (base.tag) {
.coff => return @fieldParentPtr(Coff, "base", base).updateDeclExports(module, decl, exports),
.elf => return @fieldParentPtr(Elf, "base", base).updateDeclExports(module, decl, exports),

View File

@ -206,34 +206,30 @@ pub fn flushModule(self: *C, comp: *Compilation) !void {
// generate, rather than querying here, be faster?
for (self.decl_table.items()) |kv| {
const decl = kv.key;
switch (decl.typed_value) {
.most_recent => |tvm| {
const buf = buf: {
if (tvm.typed_value.val.castTag(.function)) |_| {
var it = decl.fn_link.c.typedefs.iterator();
while (it.next()) |new| {
if (typedefs.get(new.key)) |previous| {
try err_typedef_writer.print("typedef {s} {s};\n", .{ previous, new.value.name });
} else {
try typedefs.ensureCapacity(typedefs.capacity() + 1);
try err_typedef_writer.writeAll(new.value.rendered);
typedefs.putAssumeCapacityNoClobber(new.key, new.value.name);
}
}
fn_count += 1;
break :buf decl.fn_link.c.fwd_decl.items;
if (!decl.has_tv) continue;
const buf = buf: {
if (decl.val.castTag(.function)) |_| {
var it = decl.fn_link.c.typedefs.iterator();
while (it.next()) |new| {
if (typedefs.get(new.key)) |previous| {
try err_typedef_writer.print("typedef {s} {s};\n", .{ previous, new.value.name });
} else {
break :buf decl.link.c.code.items;
try typedefs.ensureCapacity(typedefs.capacity() + 1);
try err_typedef_writer.writeAll(new.value.rendered);
typedefs.putAssumeCapacityNoClobber(new.key, new.value.name);
}
};
all_buffers.appendAssumeCapacity(.{
.iov_base = buf.ptr,
.iov_len = buf.len,
});
file_size += buf.len;
},
.never_succeeded => continue,
}
}
fn_count += 1;
break :buf decl.fn_link.c.fwd_decl.items;
} else {
break :buf decl.link.c.code.items;
}
};
all_buffers.appendAssumeCapacity(.{
.iov_base = buf.ptr,
.iov_len = buf.len,
});
file_size += buf.len;
}
err_typedef_item.* = .{
@ -246,18 +242,14 @@ pub fn flushModule(self: *C, comp: *Compilation) !void {
try all_buffers.ensureCapacity(all_buffers.items.len + fn_count);
for (self.decl_table.items()) |kv| {
const decl = kv.key;
switch (decl.typed_value) {
.most_recent => |tvm| {
if (tvm.typed_value.val.castTag(.function)) |_| {
const buf = decl.link.c.code.items;
all_buffers.appendAssumeCapacity(.{
.iov_base = buf.ptr,
.iov_len = buf.len,
});
file_size += buf.len;
}
},
.never_succeeded => continue,
if (!decl.has_tv) continue;
if (decl.val.castTag(.function)) |_| {
const buf = decl.link.c.code.items;
all_buffers.appendAssumeCapacity(.{
.iov_base = buf.ptr,
.iov_len = buf.len,
});
file_size += buf.len;
}
}

View File

@ -662,15 +662,17 @@ pub fn updateDecl(self: *Coff, module: *Module, decl: *Module.Decl) !void {
if (build_options.have_llvm)
if (self.llvm_object) |llvm_object| return try llvm_object.updateDecl(module, decl);
const typed_value = decl.typed_value.most_recent.typed_value;
if (typed_value.val.tag() == .extern_fn) {
if (decl.val.tag() == .extern_fn) {
return; // TODO Should we do more when front-end analyzed extern decl?
}
var code_buffer = std.ArrayList(u8).init(self.base.allocator);
defer code_buffer.deinit();
const res = try codegen.generateSymbol(&self.base, decl.srcLoc(), typed_value, &code_buffer, .none);
const res = try codegen.generateSymbol(&self.base, decl.srcLoc(), .{
.ty = decl.ty,
.val = decl.val,
}, &code_buffer, .none);
const code = switch (res) {
.externally_managed => |x| x,
.appended => code_buffer.items,
@ -681,7 +683,7 @@ pub fn updateDecl(self: *Coff, module: *Module, decl: *Module.Decl) !void {
},
};
const required_alignment = typed_value.ty.abiAlignment(self.base.options.target);
const required_alignment = decl.ty.abiAlignment(self.base.options.target);
const curr_size = decl.link.coff.size;
if (curr_size != 0) {
const capacity = decl.link.coff.capacity();

View File

@ -2191,8 +2191,7 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
if (build_options.have_llvm)
if (self.llvm_object) |llvm_object| return try llvm_object.updateDecl(module, decl);
const typed_value = decl.typed_value.most_recent.typed_value;
if (typed_value.val.tag() == .extern_fn) {
if (decl.val.tag() == .extern_fn) {
return; // TODO Should we do more when front-end analyzed extern decl?
}
@ -2214,7 +2213,7 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
dbg_info_type_relocs.deinit(self.base.allocator);
}
const is_fn: bool = switch (typed_value.ty.zigTypeTag()) {
const is_fn: bool = switch (decl.ty.zigTypeTag()) {
.Fn => true,
else => false,
};
@ -2270,7 +2269,7 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
const decl_name_with_null = decl.name[0 .. mem.lenZ(decl.name) + 1];
try dbg_info_buffer.ensureCapacity(dbg_info_buffer.items.len + 25 + decl_name_with_null.len);
const fn_ret_type = typed_value.ty.fnReturnType();
const fn_ret_type = decl.ty.fnReturnType();
const fn_ret_has_bits = fn_ret_type.hasCodeGenBits();
if (fn_ret_has_bits) {
dbg_info_buffer.appendAssumeCapacity(abbrev_subprogram);
@ -2299,7 +2298,10 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
} else {
// TODO implement .debug_info for global variables
}
const res = try codegen.generateSymbol(&self.base, decl.srcLoc(), typed_value, &code_buffer, .{
const res = try codegen.generateSymbol(&self.base, decl.srcLoc(), .{
.ty = decl.ty,
.val = decl.val,
}, &code_buffer, .{
.dwarf = .{
.dbg_line = &dbg_line_buffer,
.dbg_info = &dbg_info_buffer,
@ -2316,7 +2318,7 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
},
};
const required_alignment = typed_value.ty.abiAlignment(self.base.options.target);
const required_alignment = decl.ty.abiAlignment(self.base.options.target);
const stt_bits: u8 = if (is_fn) elf.STT_FUNC else elf.STT_OBJECT;
@ -2678,7 +2680,6 @@ pub fn updateDeclExports(
defer tracy.end();
try self.global_symbols.ensureCapacity(self.base.allocator, self.global_symbols.items.len + exports.len);
const typed_value = decl.typed_value.most_recent.typed_value;
if (decl.link.elf.local_sym_index == 0) return;
const decl_sym = self.local_symbols.items[decl.link.elf.local_sym_index];

View File

@ -1138,8 +1138,7 @@ pub fn updateDecl(self: *MachO, module: *Module, decl: *Module.Decl) !void {
const tracy = trace(@src());
defer tracy.end();
const typed_value = decl.typed_value.most_recent.typed_value;
if (typed_value.val.tag() == .extern_fn) {
if (decl.val.tag() == .extern_fn) {
return; // TODO Should we do more when front-end analyzed extern decl?
}
@ -1160,7 +1159,10 @@ pub fn updateDecl(self: *MachO, module: *Module, decl: *Module.Decl) !void {
}
const res = if (debug_buffers) |*dbg|
try codegen.generateSymbol(&self.base, decl.srcLoc(), typed_value, &code_buffer, .{
try codegen.generateSymbol(&self.base, decl.srcLoc(), .{
.ty = decl.ty,
.val = decl.val,
}, &code_buffer, .{
.dwarf = .{
.dbg_line = &dbg.dbg_line_buffer,
.dbg_info = &dbg.dbg_info_buffer,
@ -1168,7 +1170,10 @@ pub fn updateDecl(self: *MachO, module: *Module, decl: *Module.Decl) !void {
},
})
else
try codegen.generateSymbol(&self.base, decl.srcLoc(), typed_value, &code_buffer, .none);
try codegen.generateSymbol(&self.base, decl.srcLoc(), .{
.ty = decl.ty,
.val = decl.val,
}, &code_buffer, .none);
const code = switch (res) {
.externally_managed => |x| x,
@ -1184,7 +1189,7 @@ pub fn updateDecl(self: *MachO, module: *Module, decl: *Module.Decl) !void {
},
};
const required_alignment = typed_value.ty.abiAlignment(self.base.options.target);
const required_alignment = decl.ty.abiAlignment(self.base.options.target);
assert(decl.link.macho.local_sym_index != 0); // Caller forgot to call allocateDeclIndexes()
const symbol = &self.locals.items[decl.link.macho.local_sym_index];

View File

@ -946,8 +946,8 @@ pub fn initDeclDebugBuffers(
var dbg_info_buffer = std.ArrayList(u8).init(allocator);
var dbg_info_type_relocs: link.File.DbgInfoTypeRelocsTable = .{};
const typed_value = decl.typed_value.most_recent.typed_value;
switch (typed_value.ty.zigTypeTag()) {
assert(decl.has_tv);
switch (decl.ty.zigTypeTag()) {
.Fn => {
// For functions we need to add a prologue to the debug line program.
try dbg_line_buffer.ensureCapacity(26);
@ -999,7 +999,7 @@ pub fn initDeclDebugBuffers(
const decl_name_with_null = decl.name[0 .. mem.lenZ(decl.name) + 1];
try dbg_info_buffer.ensureCapacity(dbg_info_buffer.items.len + 27 + decl_name_with_null.len);
const fn_ret_type = typed_value.ty.fnReturnType();
const fn_ret_type = decl.ty.fnReturnType();
const fn_ret_has_bits = fn_ret_type.hasCodeGenBits();
if (fn_ret_has_bits) {
dbg_info_buffer.appendAssumeCapacity(abbrev_subprogram);
@ -1058,8 +1058,8 @@ pub fn commitDeclDebugInfo(
const symbol = self.base.locals.items[decl.link.macho.local_sym_index];
const text_block = &decl.link.macho;
// If the Decl is a function, we need to update the __debug_line program.
const typed_value = decl.typed_value.most_recent.typed_value;
switch (typed_value.ty.zigTypeTag()) {
assert(decl.has_tv);
switch (decl.ty.zigTypeTag()) {
.Fn => {
// Perform the relocations based on vaddr.
{

View File

@ -179,13 +179,9 @@ pub fn flushModule(self: *SpirV, comp: *Compilation) !void {
for (self.decl_table.items()) |entry| {
const decl = entry.key;
switch (decl.typed_value) {
.most_recent => |tvm| {
const fn_data = &decl.fn_link.spirv;
all_buffers.appendAssumeCapacity(wordsToIovConst(fn_data.code.items));
},
.never_succeeded => continue,
}
if (!decl.has_tv) continue;
const fn_data = &decl.fn_link.spirv;
all_buffers.appendAssumeCapacity(wordsToIovConst(fn_data.code.items));
}
var file_size: u64 = 0;

View File

@ -175,9 +175,8 @@ pub fn allocateDeclIndexes(self: *Wasm, decl: *Module.Decl) !void {
self.offset_table.items[block.offset_index] = 0;
const typed_value = decl.typed_value.most_recent.typed_value;
if (typed_value.ty.zigTypeTag() == .Fn) {
switch (typed_value.val.tag()) {
if (decl.ty.zigTypeTag() == .Fn) {
switch (decl.val.tag()) {
// dependent on function type, appends it to the correct list
.function => try self.funcs.append(self.base.allocator, decl),
.extern_fn => try self.ext_funcs.append(self.base.allocator, decl),
@ -191,7 +190,6 @@ pub fn allocateDeclIndexes(self: *Wasm, decl: *Module.Decl) !void {
pub fn updateDecl(self: *Wasm, module: *Module, decl: *Module.Decl) !void {
std.debug.assert(decl.link.wasm.init); // Must call allocateDeclIndexes()
const typed_value = decl.typed_value.most_recent.typed_value;
const fn_data = &decl.fn_link.wasm;
fn_data.functype.items.len = 0;
fn_data.code.items.len = 0;
@ -210,7 +208,7 @@ pub fn updateDecl(self: *Wasm, module: *Module, decl: *Module.Decl) !void {
defer context.deinit();
// generate the 'code' section for the function declaration
const result = context.gen(typed_value) catch |err| switch (err) {
const result = context.gen(.{ .ty = decl.ty, .val = decl.val }) catch |err| switch (err) {
error.CodegenFail => {
decl.analysis = .codegen_failure;
try module.failed_decls.put(module.gpa, decl, context.err_msg);
@ -228,7 +226,7 @@ pub fn updateDecl(self: *Wasm, module: *Module, decl: *Module.Decl) !void {
fn_data.functype = context.func_type_data.toUnmanaged();
const block = &decl.link.wasm;
if (typed_value.ty.zigTypeTag() == .Fn) {
if (decl.ty.zigTypeTag() == .Fn) {
// as locals are patched afterwards, the offsets of funcidx's are off,
// here we update them to correct them
for (fn_data.idx_refs.items) |*func| {
@ -262,7 +260,7 @@ pub fn updateDeclExports(
pub fn freeDecl(self: *Wasm, decl: *Module.Decl) void {
if (self.getFuncidx(decl)) |func_idx| {
switch (decl.typed_value.most_recent.typed_value.val.tag()) {
switch (decl.val.tag()) {
.function => _ = self.funcs.swapRemove(func_idx),
.extern_fn => _ = self.ext_funcs.swapRemove(func_idx),
else => unreachable,
@ -429,7 +427,7 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void {
try leb.writeULEB128(writer, @intCast(u32, exprt.options.name.len));
try writer.writeAll(exprt.options.name);
switch (exprt.exported_decl.typed_value.most_recent.typed_value.ty.zigTypeTag()) {
switch (exprt.exported_decl.ty.zigTypeTag()) {
.Fn => {
// Type of the export
try writer.writeByte(wasm.externalKind(.function));
@ -802,7 +800,7 @@ fn linkWithLLD(self: *Wasm, comp: *Compilation) !void {
/// TODO: we could maintain a hash map to potentially make this simpler
fn getFuncidx(self: Wasm, decl: *Module.Decl) ?u32 {
var offset: u32 = 0;
const slice = switch (decl.typed_value.most_recent.typed_value.val.tag()) {
const slice = switch (decl.val.tag()) {
.function => blk: {
// when the target is a regular function, we have to calculate
// the offset of where the index starts