Merge remote-tracking branch 'origin/master' into llvm15

This commit is contained in:
Andrew Kelley 2022-09-13 13:50:25 -07:00
commit 0a4cfb81bc
28 changed files with 1255 additions and 1275 deletions

View File

@ -404,7 +404,6 @@ fn callFn(comptime f: anytype, args: anytype) switch (Impl) {
}
// pthreads don't support exit status, ignore value
_ = status;
return default_value;
},
.ErrorUnion => |info| {

View File

@ -582,8 +582,6 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type {
old_align: u29,
ret_addr: usize,
) void {
_ = old_align;
const entry = self.large_allocations.getEntry(@ptrToInt(old_mem.ptr)) orelse {
if (config.safety) {
@panic("Invalid free");

View File

@ -171,7 +171,6 @@ pub fn approxEqRel(comptime T: type, x: T, y: T, tolerance: T) bool {
}
pub fn approxEq(comptime T: type, x: T, y: T, tolerance: T) bool {
_ = T;
_ = x;
_ = y;
_ = tolerance;

View File

@ -2685,11 +2685,7 @@ fn genDefers(
}
}
fn checkUsed(
gz: *GenZir,
outer_scope: *Scope,
inner_scope: *Scope,
) InnerError!void {
fn checkUsed(gz: *GenZir, outer_scope: *Scope, inner_scope: *Scope) InnerError!void {
const astgen = gz.astgen;
var scope = inner_scope;
@ -2698,15 +2694,23 @@ fn checkUsed(
.gen_zir => scope = scope.cast(GenZir).?.parent,
.local_val => {
const s = scope.cast(Scope.LocalVal).?;
if (!s.used) {
if (s.used == 0 and s.discarded == 0) {
try astgen.appendErrorTok(s.token_src, "unused {s}", .{@tagName(s.id_cat)});
} else if (s.used != 0 and s.discarded != 0) {
try astgen.appendErrorTokNotes(s.discarded, "pointless discard of {s}", .{@tagName(s.id_cat)}, &[_]u32{
try gz.astgen.errNoteTok(s.used, "used here", .{}),
});
}
scope = s.parent;
},
.local_ptr => {
const s = scope.cast(Scope.LocalPtr).?;
if (!s.used) {
if (s.used == 0 and s.discarded == 0) {
try astgen.appendErrorTok(s.token_src, "unused {s}", .{@tagName(s.id_cat)});
} else if (s.used != 0 and s.discarded != 0) {
try astgen.appendErrorTokNotes(s.discarded, "pointless discard of {s}", .{@tagName(s.id_cat)}, &[_]u32{
try gz.astgen.errNoteTok(s.used, "used here", .{}),
});
}
scope = s.parent;
},
@ -6848,11 +6852,10 @@ fn localVarRef(
scope: *Scope,
rl: ResultLoc,
ident: Ast.Node.Index,
ident_token: Ast.Node.Index,
ident_token: Ast.TokenIndex,
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const gpa = astgen.gpa;
const name_str_index = try astgen.identAsString(ident_token);
var s = scope;
var found_already: ?Ast.Node.Index = null; // we have found a decl with the same name already
@ -6865,7 +6868,11 @@ fn localVarRef(
if (local_val.name == name_str_index) {
// Locals cannot shadow anything, so we do not need to look for ambiguous
// references in this case.
local_val.used = true;
if (rl == .discard) {
local_val.discarded = ident_token;
} else {
local_val.used = ident_token;
}
const value_inst = try tunnelThroughClosure(
gz,
@ -6884,7 +6891,11 @@ fn localVarRef(
.local_ptr => {
const local_ptr = s.cast(Scope.LocalPtr).?;
if (local_ptr.name == name_str_index) {
local_ptr.used = true;
if (rl == .discard) {
local_ptr.discarded = ident_token;
} else {
local_ptr.used = ident_token;
}
// Can't close over a runtime variable
if (num_namespaces_out != 0 and !local_ptr.maybe_comptime) {
@ -7519,7 +7530,7 @@ fn builtinCall(
.local_val => {
const local_val = s.cast(Scope.LocalVal).?;
if (local_val.name == decl_name) {
local_val.used = true;
local_val.used = ident_token;
_ = try gz.addPlNode(.export_value, node, Zir.Inst.ExportValue{
.operand = local_val.inst,
.options = try comptimeExpr(gz, scope, .{ .coerced_ty = .export_options_type }, params[1]),
@ -7533,7 +7544,7 @@ fn builtinCall(
if (local_ptr.name == decl_name) {
if (!local_ptr.maybe_comptime)
return astgen.failNode(params[0], "unable to export runtime-known value", .{});
local_ptr.used = true;
local_ptr.used = ident_token;
const loaded = try gz.addUnNode(.load, local_ptr.ptr, node);
_ = try gz.addPlNode(.export_value, node, Zir.Inst.ExportValue{
.operand = loaded,
@ -10065,11 +10076,15 @@ const Scope = struct {
inst: Zir.Inst.Ref,
/// Source location of the corresponding variable declaration.
token_src: Ast.TokenIndex,
/// Track the first identifer where it is referenced.
/// 0 means never referenced.
used: Ast.TokenIndex = 0,
/// Track the identifier where it is discarded, like this `_ = foo;`.
/// 0 means never discarded.
discarded: Ast.TokenIndex = 0,
/// String table index.
name: u32,
id_cat: IdCat,
/// Track whether the name has been referenced.
used: bool = false,
};
/// This could be a `const` or `var` local. It has a pointer instead of a value.
@ -10084,14 +10099,18 @@ const Scope = struct {
ptr: Zir.Inst.Ref,
/// Source location of the corresponding variable declaration.
token_src: Ast.TokenIndex,
/// Track the first identifer where it is referenced.
/// 0 means never referenced.
used: Ast.TokenIndex = 0,
/// Track the identifier where it is discarded, like this `_ = foo;`.
/// 0 means never discarded.
discarded: Ast.TokenIndex = 0,
/// String table index.
name: u32,
id_cat: IdCat,
/// true means we find out during Sema whether the value is comptime.
/// false means it is already known at AstGen the value is runtime-known.
maybe_comptime: bool,
/// Track whether the name has been referenced.
used: bool = false,
};
const Defer = struct {

View File

@ -2506,7 +2506,6 @@ fn walkInstruction(
try self.srcLocInfo(file, sn, parent_src)
else
parent_src;
_ = src_info;
const decls_len = if (small.has_decls_len) blk: {
const decls_len = file.zir.extra[extra_index];
@ -2627,7 +2626,6 @@ fn walkInstruction(
extra_index += 1;
break :blk fields_len;
} else 0;
_ = fields_len;
const decls_len = if (small.has_decls_len) blk: {
const decls_len = file.zir.extra[extra_index];
@ -2759,7 +2757,6 @@ fn walkInstruction(
extra_index += 1;
break :blk fields_len;
} else 0;
_ = fields_len;
const decls_len = if (small.has_decls_len) blk: {
const decls_len = file.zir.extra[extra_index];
@ -2901,7 +2898,6 @@ fn walkInstruction(
extra_index += 1;
break :blk fields_len;
} else 0;
_ = fields_len;
const decls_len = if (small.has_decls_len) blk: {
const decls_len = file.zir.extra[extra_index];

View File

@ -17575,30 +17575,30 @@ fn zirIntToPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const operand_coerced = try sema.coerce(block, Type.usize, operand_res, operand_src);
const type_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = inst_data.src_node };
const type_res = try sema.resolveType(block, src, extra.lhs);
try sema.checkPtrType(block, type_src, type_res);
try sema.resolveTypeLayout(block, src, type_res.elemType2());
const ptr_align = type_res.ptrAlignment(sema.mod.getTarget());
const ptr_ty = try sema.resolveType(block, src, extra.lhs);
const elem_ty = ptr_ty.elemType2();
try sema.checkPtrType(block, type_src, ptr_ty);
const target = sema.mod.getTarget();
const ptr_align = try ptr_ty.ptrAlignmentAdvanced(target, sema.kit(block, src));
if (try sema.resolveDefinedValue(block, operand_src, operand_coerced)) |val| {
const addr = val.toUnsignedInt(target);
if (!type_res.isAllowzeroPtr() and addr == 0)
return sema.fail(block, operand_src, "pointer type '{}' does not allow address zero", .{type_res.fmt(sema.mod)});
if (!ptr_ty.isAllowzeroPtr() and addr == 0)
return sema.fail(block, operand_src, "pointer type '{}' does not allow address zero", .{ptr_ty.fmt(sema.mod)});
if (addr != 0 and addr % ptr_align != 0)
return sema.fail(block, operand_src, "pointer type '{}' requires aligned address", .{type_res.fmt(sema.mod)});
return sema.fail(block, operand_src, "pointer type '{}' requires aligned address", .{ptr_ty.fmt(sema.mod)});
const val_payload = try sema.arena.create(Value.Payload.U64);
val_payload.* = .{
.base = .{ .tag = .int_u64 },
.data = addr,
};
return sema.addConstant(type_res, Value.initPayload(&val_payload.base));
return sema.addConstant(ptr_ty, Value.initPayload(&val_payload.base));
}
try sema.requireRuntimeBlock(block, src, operand_src);
if (block.wantSafety() and try sema.typeHasRuntimeBits(block, sema.src, type_res.elemType2())) {
if (!type_res.isAllowzeroPtr()) {
if (block.wantSafety() and try sema.typeHasRuntimeBits(block, sema.src, elem_ty)) {
if (!ptr_ty.isAllowzeroPtr()) {
const is_non_zero = try block.addBinOp(.cmp_neq, operand_coerced, .zero_usize);
try sema.addSafetyCheck(block, is_non_zero, .cast_to_null);
}
@ -17618,7 +17618,7 @@ fn zirIntToPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
try sema.addSafetyCheck(block, is_aligned, .incorrect_alignment);
}
}
return block.addBitCast(type_res, operand_coerced);
return block.addBitCast(ptr_ty, operand_coerced);
}
fn zirErrSetCast(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) CompileError!Air.Inst.Ref {
@ -19670,8 +19670,6 @@ fn zirMemcpy(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void
if (try sema.resolveDefinedValue(block, src_src, src_ptr)) |src_ptr_val| {
if (!src_ptr_val.isComptimeMutablePtr()) break :rs src_src;
if (try sema.resolveDefinedValue(block, len_src, len)) |len_val| {
_ = dest_ptr_val;
_ = src_ptr_val;
_ = len_val;
return sema.fail(block, src, "TODO: Sema.zirMemcpy at comptime", .{});
} else break :rs len_src;
@ -19713,7 +19711,6 @@ fn zirMemset(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void
if (!ptr_val.isComptimeMutablePtr()) break :rs dest_src;
if (try sema.resolveDefinedValue(block, len_src, len)) |len_val| {
if (try sema.resolveMaybeUndefVal(block, value_src, value)) |val| {
_ = ptr_val;
_ = len_val;
_ = val;
return sema.fail(block, src, "TODO: Sema.zirMemset at comptime", .{});
@ -19941,7 +19938,6 @@ fn zirFuncFancy(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A
if (val.tag() == .generic_poison) {
break :blk FuncLinkSection{ .generic = {} };
}
_ = val;
return sema.fail(block, section_src, "TODO implement linksection on functions", .{});
} else if (extra.data.bits.has_section_ref) blk: {
const section_ref = @intToEnum(Zir.Inst.Ref, sema.code.extra[extra_index]);

View File

@ -2581,7 +2581,6 @@ fn airErrUnionPayloadPtrSet(self: *Self, inst: Air.Inst.Index) !void {
}
fn airErrReturnTrace(self: *Self, inst: Air.Inst.Index) !void {
_ = inst;
const result: MCValue = if (self.liveness.isUnused(inst))
.dead
else
@ -3614,7 +3613,6 @@ fn airRetLoad(self: *Self, inst: Air.Inst.Index) !void {
const ptr = try self.resolveInst(un_op);
const ptr_ty = self.air.typeOf(un_op);
const ret_ty = self.fn_type.fnReturnType();
_ = ret_ty;
switch (self.ret_mcv) {
.none => {},
@ -5099,7 +5097,6 @@ fn lowerDeclRef(self: *Self, tv: TypedValue, decl_index: Module.Decl.Index) Inne
} else {
return self.fail("TODO codegen non-ELF const Decl pointer", .{});
}
_ = tv;
}
fn lowerUnnamedConst(self: *Self, tv: TypedValue) InnerError!MCValue {

View File

@ -2111,7 +2111,6 @@ fn airErrUnionPayloadPtrSet(self: *Self, inst: Air.Inst.Index) !void {
}
fn airErrReturnTrace(self: *Self, inst: Air.Inst.Index) !void {
_ = inst;
const result: MCValue = if (self.liveness.isUnused(inst))
.dead
else
@ -3353,7 +3352,6 @@ fn divFloat(
) InnerError!MCValue {
_ = lhs_bind;
_ = rhs_bind;
_ = lhs_ty;
_ = rhs_ty;
_ = maybe_inst;
@ -3420,7 +3418,6 @@ fn divExact(
) InnerError!MCValue {
_ = lhs_bind;
_ = rhs_bind;
_ = lhs_ty;
_ = rhs_ty;
_ = maybe_inst;
@ -3506,7 +3503,6 @@ fn modulo(
) InnerError!MCValue {
_ = lhs_bind;
_ = rhs_bind;
_ = lhs_ty;
_ = rhs_ty;
_ = maybe_inst;

View File

@ -1316,7 +1316,6 @@ fn airErrUnionPayloadPtrSet(self: *Self, inst: Air.Inst.Index) !void {
}
fn airErrReturnTrace(self: *Self, inst: Air.Inst.Index) !void {
_ = inst;
const result: MCValue = if (self.liveness.isUnused(inst))
.dead
else
@ -1598,7 +1597,6 @@ fn airStructFieldPtrIndex(self: *Self, inst: Air.Inst.Index, index: u8) !void {
return self.structFieldPtr(ty_op.operand, ty_op.ty, index);
}
fn structFieldPtr(self: *Self, operand: Air.Inst.Ref, ty: Air.Inst.Ref, index: u32) !void {
_ = self;
_ = operand;
_ = ty;
_ = index;
@ -1615,7 +1613,6 @@ fn airStructFieldVal(self: *Self, inst: Air.Inst.Index) !void {
}
fn airFieldParentPtr(self: *Self, inst: Air.Inst.Index) !void {
_ = self;
_ = inst;
return self.fail("TODO implement codegen airFieldParentPtr", .{});
}

View File

@ -2084,9 +2084,7 @@ fn airStructFieldVal(self: *Self, inst: Air.Inst.Index) !void {
}
fn airSwitch(self: *Self, inst: Air.Inst.Index) !void {
_ = self;
_ = inst;
return self.fail("TODO implement switch for {}", .{self.target.cpu.arch});
}

View File

@ -1960,7 +1960,6 @@ fn airErrUnionPayloadPtrSet(self: *Self, inst: Air.Inst.Index) !void {
}
fn airErrReturnTrace(self: *Self, inst: Air.Inst.Index) !void {
_ = inst;
const result: MCValue = if (self.liveness.isUnused(inst))
.dead
else
@ -6590,7 +6589,6 @@ fn airFloatToInt(self: *Self, inst: Air.Inst.Index) !void {
fn airCmpxchg(self: *Self, inst: Air.Inst.Index) !void {
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const extra = self.air.extraData(Air.Block, ty_pl.payload);
_ = ty_pl;
_ = extra;
return self.fail("TODO implement x86 airCmpxchg", .{});
// return self.finishAir(inst, result, .{ extra.ptr, extra.expected_value, extra.new_value });

View File

@ -622,7 +622,6 @@ pub fn updateDeclExports(
) !void {
try self.seeDecl(decl_index);
// we do all the things in flush
_ = self;
_ = module;
_ = exports;
}

File diff suppressed because it is too large Load Diff

View File

@ -4,7 +4,6 @@ const std = @import("std");
const assert = std.debug.assert;
const fs = std.fs;
const log = std.log.scoped(.archive);
const macho = std.macho;
const mem = std.mem;
const Allocator = mem.Allocator;

View File

@ -55,37 +55,37 @@ pub const empty: Atom = .{
};
/// Frees all resources owned by this `Atom`.
pub fn deinit(self: *Atom, gpa: Allocator) void {
self.relocs.deinit(gpa);
self.code.deinit(gpa);
pub fn deinit(atom: *Atom, gpa: Allocator) void {
atom.relocs.deinit(gpa);
atom.code.deinit(gpa);
for (self.locals.items) |*local| {
for (atom.locals.items) |*local| {
local.deinit(gpa);
}
self.locals.deinit(gpa);
atom.locals.deinit(gpa);
}
/// Sets the length of relocations and code to '0',
/// effectively resetting them and allowing them to be re-populated.
pub fn clear(self: *Atom) void {
self.relocs.clearRetainingCapacity();
self.code.clearRetainingCapacity();
pub fn clear(atom: *Atom) void {
atom.relocs.clearRetainingCapacity();
atom.code.clearRetainingCapacity();
}
pub fn format(self: Atom, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void {
pub fn format(atom: Atom, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void {
_ = fmt;
_ = options;
try writer.print("Atom{{ .sym_index = {d}, .alignment = {d}, .size = {d}, .offset = 0x{x:0>8} }}", .{
self.sym_index,
self.alignment,
self.size,
self.offset,
atom.sym_index,
atom.alignment,
atom.size,
atom.offset,
});
}
/// Returns the first `Atom` from a given atom
pub fn getFirst(self: *Atom) *Atom {
var tmp = self;
pub fn getFirst(atom: *Atom) *Atom {
var tmp = atom;
while (tmp.prev) |prev| tmp = prev;
return tmp;
}
@ -94,9 +94,9 @@ pub fn getFirst(self: *Atom) *Atom {
/// produced from Zig code, rather than an object file.
/// This is useful for debug sections where we want to extend
/// the bytes, and don't want to overwrite existing Atoms.
pub fn getFirstZigAtom(self: *Atom) *Atom {
if (self.file == null) return self;
var tmp = self;
pub fn getFirstZigAtom(atom: *Atom) *Atom {
if (atom.file == null) return atom;
var tmp = atom;
return while (tmp.prev) |prev| {
if (prev.file == null) break prev;
tmp = prev;
@ -104,24 +104,24 @@ pub fn getFirstZigAtom(self: *Atom) *Atom {
}
/// Returns the location of the symbol that represents this `Atom`
pub fn symbolLoc(self: Atom) Wasm.SymbolLoc {
return .{ .file = self.file, .index = self.sym_index };
pub fn symbolLoc(atom: Atom) Wasm.SymbolLoc {
return .{ .file = atom.file, .index = atom.sym_index };
}
/// Resolves the relocations within the atom, writing the new value
/// at the calculated offset.
pub fn resolveRelocs(self: *Atom, wasm_bin: *const Wasm) void {
if (self.relocs.items.len == 0) return;
const symbol_name = self.symbolLoc().getName(wasm_bin);
pub fn resolveRelocs(atom: *Atom, wasm_bin: *const Wasm) void {
if (atom.relocs.items.len == 0) return;
const symbol_name = atom.symbolLoc().getName(wasm_bin);
log.debug("Resolving relocs in atom '{s}' count({d})", .{
symbol_name,
self.relocs.items.len,
atom.relocs.items.len,
});
for (self.relocs.items) |reloc| {
const value = self.relocationValue(reloc, wasm_bin);
for (atom.relocs.items) |reloc| {
const value = atom.relocationValue(reloc, wasm_bin);
log.debug("Relocating '{s}' referenced in '{s}' offset=0x{x:0>8} value={d}", .{
(Wasm.SymbolLoc{ .file = self.file, .index = reloc.index }).getName(wasm_bin),
(Wasm.SymbolLoc{ .file = atom.file, .index = reloc.index }).getName(wasm_bin),
symbol_name,
reloc.offset,
value,
@ -133,10 +133,10 @@ pub fn resolveRelocs(self: *Atom, wasm_bin: *const Wasm) void {
.R_WASM_GLOBAL_INDEX_I32,
.R_WASM_MEMORY_ADDR_I32,
.R_WASM_SECTION_OFFSET_I32,
=> std.mem.writeIntLittle(u32, self.code.items[reloc.offset..][0..4], @intCast(u32, value)),
=> std.mem.writeIntLittle(u32, atom.code.items[reloc.offset..][0..4], @intCast(u32, value)),
.R_WASM_TABLE_INDEX_I64,
.R_WASM_MEMORY_ADDR_I64,
=> std.mem.writeIntLittle(u64, self.code.items[reloc.offset..][0..8], value),
=> std.mem.writeIntLittle(u64, atom.code.items[reloc.offset..][0..8], value),
.R_WASM_GLOBAL_INDEX_LEB,
.R_WASM_EVENT_INDEX_LEB,
.R_WASM_FUNCTION_INDEX_LEB,
@ -145,11 +145,11 @@ pub fn resolveRelocs(self: *Atom, wasm_bin: *const Wasm) void {
.R_WASM_TABLE_INDEX_SLEB,
.R_WASM_TABLE_NUMBER_LEB,
.R_WASM_TYPE_INDEX_LEB,
=> leb.writeUnsignedFixed(5, self.code.items[reloc.offset..][0..5], @intCast(u32, value)),
=> leb.writeUnsignedFixed(5, atom.code.items[reloc.offset..][0..5], @intCast(u32, value)),
.R_WASM_MEMORY_ADDR_LEB64,
.R_WASM_MEMORY_ADDR_SLEB64,
.R_WASM_TABLE_INDEX_SLEB64,
=> leb.writeUnsignedFixed(10, self.code.items[reloc.offset..][0..10], value),
=> leb.writeUnsignedFixed(10, atom.code.items[reloc.offset..][0..10], value),
}
}
}
@ -157,8 +157,8 @@ pub fn resolveRelocs(self: *Atom, wasm_bin: *const Wasm) void {
/// From a given `relocation` will return the new value to be written.
/// All values will be represented as a `u64` as all values can fit within it.
/// The final value must be casted to the correct size.
fn relocationValue(self: Atom, relocation: types.Relocation, wasm_bin: *const Wasm) u64 {
const target_loc = (Wasm.SymbolLoc{ .file = self.file, .index = relocation.index }).finalLoc(wasm_bin);
fn relocationValue(atom: Atom, relocation: types.Relocation, wasm_bin: *const Wasm) u64 {
const target_loc = (Wasm.SymbolLoc{ .file = atom.file, .index = relocation.index }).finalLoc(wasm_bin);
const symbol = target_loc.getSymbol(wasm_bin).*;
switch (relocation.relocation_type) {
.R_WASM_FUNCTION_INDEX_LEB => return symbol.index,
@ -203,7 +203,7 @@ fn relocationValue(self: Atom, relocation: types.Relocation, wasm_bin: *const Wa
},
.R_WASM_FUNCTION_OFFSET_I32 => {
const target_atom = wasm_bin.symbol_atom.get(target_loc).?;
var atom = target_atom.getFirst();
var current_atom = target_atom.getFirst();
var offset: u32 = 0;
// TODO: Calculate this during atom allocation, rather than
// this linear calculation. For now it's done here as atoms
@ -211,8 +211,8 @@ fn relocationValue(self: Atom, relocation: types.Relocation, wasm_bin: *const Wa
// merged until later.
while (true) {
offset += 5; // each atom uses 5 bytes to store its body's size
if (atom == target_atom) break;
atom = atom.next.?;
if (current_atom == target_atom) break;
current_atom = current_atom.next.?;
}
return target_atom.offset + offset + (relocation.addend orelse 0);
},

View File

@ -88,28 +88,28 @@ const RelocatableData = struct {
/// meta data of the given object file.
/// NOTE: Alignment is encoded as a power of 2, so we shift the symbol's
/// alignment to retrieve the natural alignment.
pub fn getAlignment(self: RelocatableData, object: *const Object) u32 {
if (self.type != .data) return 1;
const data_alignment = object.segment_info[self.index].alignment;
pub fn getAlignment(relocatable_data: RelocatableData, object: *const Object) u32 {
if (relocatable_data.type != .data) return 1;
const data_alignment = object.segment_info[relocatable_data.index].alignment;
if (data_alignment == 0) return 1;
// Decode from power of 2 to natural alignment
return @as(u32, 1) << @intCast(u5, data_alignment);
}
/// Returns the symbol kind that corresponds to the relocatable section
pub fn getSymbolKind(self: RelocatableData) Symbol.Tag {
return switch (self.type) {
pub fn getSymbolKind(relocatable_data: RelocatableData) Symbol.Tag {
return switch (relocatable_data.type) {
.data => .data,
.code => .function,
.debug => .section,
};
}
/// Returns the index within a section itself, or in case of a debug section,
/// Returns the index within a section itrelocatable_data, or in case of a debug section,
/// returns the section index within the object file.
pub fn getIndex(self: RelocatableData) u32 {
if (self.type == .debug) return self.section_index;
return self.index;
pub fn getIndex(relocatable_data: RelocatableData) u32 {
if (relocatable_data.type == .debug) return relocatable_data.section_index;
return relocatable_data.index;
}
};
@ -153,51 +153,51 @@ pub fn create(gpa: Allocator, file: std.fs.File, name: []const u8, maybe_max_siz
/// Frees all memory of `Object` at once. The given `Allocator` must be
/// the same allocator that was used when `init` was called.
pub fn deinit(self: *Object, gpa: Allocator) void {
if (self.file) |file| {
pub fn deinit(object: *Object, gpa: Allocator) void {
if (object.file) |file| {
file.close();
}
for (self.func_types) |func_ty| {
for (object.func_types) |func_ty| {
gpa.free(func_ty.params);
gpa.free(func_ty.returns);
}
gpa.free(self.func_types);
gpa.free(self.functions);
gpa.free(self.imports);
gpa.free(self.tables);
gpa.free(self.memories);
gpa.free(self.globals);
gpa.free(self.exports);
for (self.elements) |el| {
gpa.free(object.func_types);
gpa.free(object.functions);
gpa.free(object.imports);
gpa.free(object.tables);
gpa.free(object.memories);
gpa.free(object.globals);
gpa.free(object.exports);
for (object.elements) |el| {
gpa.free(el.func_indexes);
}
gpa.free(self.elements);
gpa.free(self.features);
for (self.relocations.values()) |val| {
gpa.free(object.elements);
gpa.free(object.features);
for (object.relocations.values()) |val| {
gpa.free(val);
}
self.relocations.deinit(gpa);
gpa.free(self.symtable);
gpa.free(self.comdat_info);
gpa.free(self.init_funcs);
for (self.segment_info) |info| {
object.relocations.deinit(gpa);
gpa.free(object.symtable);
gpa.free(object.comdat_info);
gpa.free(object.init_funcs);
for (object.segment_info) |info| {
gpa.free(info.name);
}
gpa.free(self.segment_info);
for (self.relocatable_data) |rel_data| {
gpa.free(object.segment_info);
for (object.relocatable_data) |rel_data| {
gpa.free(rel_data.data[0..rel_data.size]);
}
gpa.free(self.relocatable_data);
self.string_table.deinit(gpa);
gpa.free(self.name);
self.* = undefined;
gpa.free(object.relocatable_data);
object.string_table.deinit(gpa);
gpa.free(object.name);
object.* = undefined;
}
/// Finds the import within the list of imports from a given kind and index of that kind.
/// Asserts the import exists
pub fn findImport(self: *const Object, import_kind: std.wasm.ExternalKind, index: u32) types.Import {
pub fn findImport(object: *const Object, import_kind: std.wasm.ExternalKind, index: u32) types.Import {
var i: u32 = 0;
return for (self.imports) |import| {
return for (object.imports) |import| {
if (std.meta.activeTag(import.kind) == import_kind) {
if (i == index) return import;
i += 1;
@ -206,16 +206,16 @@ pub fn findImport(self: *const Object, import_kind: std.wasm.ExternalKind, index
}
/// Counts the entries of imported `kind` and returns the result
pub fn importedCountByKind(self: *const Object, kind: std.wasm.ExternalKind) u32 {
pub fn importedCountByKind(object: *const Object, kind: std.wasm.ExternalKind) u32 {
var i: u32 = 0;
return for (self.imports) |imp| {
return for (object.imports) |imp| {
if (@as(std.wasm.ExternalKind, imp.kind) == kind) i += 1;
} else i;
}
/// From a given `RelocatableDate`, find the corresponding debug section name
pub fn getDebugName(self: *const Object, relocatable_data: RelocatableData) []const u8 {
return self.string_table.get(relocatable_data.index);
pub fn getDebugName(object: *const Object, relocatable_data: RelocatableData) []const u8 {
return object.string_table.get(relocatable_data.index);
}
/// Checks if the object file is an MVP version.
@ -224,13 +224,13 @@ pub fn getDebugName(self: *const Object, relocatable_data: RelocatableData) []co
/// we initialize a new table symbol that corresponds to that import and return that symbol.
///
/// When the object file is *NOT* MVP, we return `null`.
fn checkLegacyIndirectFunctionTable(self: *Object) !?Symbol {
fn checkLegacyIndirectFunctionTable(object: *Object) !?Symbol {
var table_count: usize = 0;
for (self.symtable) |sym| {
for (object.symtable) |sym| {
if (sym.tag == .table) table_count += 1;
}
const import_table_count = self.importedCountByKind(.table);
const import_table_count = object.importedCountByKind(.table);
// For each import table, we also have a symbol so this is not a legacy object file
if (import_table_count == table_count) return null;
@ -244,7 +244,7 @@ fn checkLegacyIndirectFunctionTable(self: *Object) !?Symbol {
}
// MVP object files cannot have any table definitions, only imports (for the indirect function table).
if (self.tables.len > 0) {
if (object.tables.len > 0) {
log.err("Unexpected table definition without representing table symbols.", .{});
return error.UnexpectedTable;
}
@ -254,14 +254,14 @@ fn checkLegacyIndirectFunctionTable(self: *Object) !?Symbol {
return error.MissingTableSymbols;
}
var table_import: types.Import = for (self.imports) |imp| {
var table_import: types.Import = for (object.imports) |imp| {
if (imp.kind == .table) {
break imp;
}
} else unreachable;
if (!std.mem.eql(u8, self.string_table.get(table_import.name), "__indirect_function_table")) {
log.err("Non-indirect function table import '{s}' is missing a corresponding symbol", .{self.string_table.get(table_import.name)});
if (!std.mem.eql(u8, object.string_table.get(table_import.name), "__indirect_function_table")) {
log.err("Non-indirect function table import '{s}' is missing a corresponding symbol", .{object.string_table.get(table_import.name)});
return error.MissingTableSymbols;
}
@ -313,41 +313,41 @@ pub const ParseError = error{
UnknownFeature,
};
fn parse(self: *Object, gpa: Allocator, reader: anytype, is_object_file: *bool) Parser(@TypeOf(reader)).Error!void {
var parser = Parser(@TypeOf(reader)).init(self, reader);
fn parse(object: *Object, gpa: Allocator, reader: anytype, is_object_file: *bool) Parser(@TypeOf(reader)).Error!void {
var parser = Parser(@TypeOf(reader)).init(object, reader);
return parser.parseObject(gpa, is_object_file);
}
fn Parser(comptime ReaderType: type) type {
return struct {
const Self = @This();
const ObjectParser = @This();
const Error = ReaderType.Error || ParseError;
reader: std.io.CountingReader(ReaderType),
/// Object file we're building
object: *Object,
fn init(object: *Object, reader: ReaderType) Self {
fn init(object: *Object, reader: ReaderType) ObjectParser {
return .{ .object = object, .reader = std.io.countingReader(reader) };
}
/// Verifies that the first 4 bytes contains \0Asm
fn verifyMagicBytes(self: *Self) Error!void {
fn verifyMagicBytes(parser: *ObjectParser) Error!void {
var magic_bytes: [4]u8 = undefined;
try self.reader.reader().readNoEof(&magic_bytes);
try parser.reader.reader().readNoEof(&magic_bytes);
if (!std.mem.eql(u8, &magic_bytes, &std.wasm.magic)) {
log.debug("Invalid magic bytes '{s}'", .{&magic_bytes});
return error.InvalidMagicByte;
}
}
fn parseObject(self: *Self, gpa: Allocator, is_object_file: *bool) Error!void {
errdefer self.object.deinit(gpa);
try self.verifyMagicBytes();
const version = try self.reader.reader().readIntLittle(u32);
fn parseObject(parser: *ObjectParser, gpa: Allocator, is_object_file: *bool) Error!void {
errdefer parser.object.deinit(gpa);
try parser.verifyMagicBytes();
const version = try parser.reader.reader().readIntLittle(u32);
self.object.version = version;
parser.object.version = version;
var relocatable_data = std.ArrayList(RelocatableData).init(gpa);
var debug_names = std.ArrayList(u8).init(gpa);
@ -360,9 +360,9 @@ fn Parser(comptime ReaderType: type) type {
}
var section_index: u32 = 0;
while (self.reader.reader().readByte()) |byte| : (section_index += 1) {
const len = try readLeb(u32, self.reader.reader());
var limited_reader = std.io.limitedReader(self.reader.reader(), len);
while (parser.reader.reader().readByte()) |byte| : (section_index += 1) {
const len = try readLeb(u32, parser.reader.reader());
var limited_reader = std.io.limitedReader(parser.reader.reader(), len);
const reader = limited_reader.reader();
switch (@intToEnum(std.wasm.Section, byte)) {
.custom => {
@ -373,12 +373,12 @@ fn Parser(comptime ReaderType: type) type {
if (std.mem.eql(u8, name, "linking")) {
is_object_file.* = true;
self.object.relocatable_data = relocatable_data.items; // at this point no new relocatable sections will appear so we're free to store them.
try self.parseMetadata(gpa, @intCast(usize, reader.context.bytes_left));
parser.object.relocatable_data = relocatable_data.items; // at this point no new relocatable sections will appear so we're free to store them.
try parser.parseMetadata(gpa, @intCast(usize, reader.context.bytes_left));
} else if (std.mem.startsWith(u8, name, "reloc")) {
try self.parseRelocations(gpa);
try parser.parseRelocations(gpa);
} else if (std.mem.eql(u8, name, "target_features")) {
try self.parseFeatures(gpa);
try parser.parseFeatures(gpa);
} else if (std.mem.startsWith(u8, name, ".debug")) {
const debug_size = @intCast(u32, reader.context.bytes_left);
const debug_content = try gpa.alloc(u8, debug_size);
@ -389,7 +389,7 @@ fn Parser(comptime ReaderType: type) type {
.type = .debug,
.data = debug_content.ptr,
.size = debug_size,
.index = try self.object.string_table.put(gpa, name),
.index = try parser.object.string_table.put(gpa, name),
.offset = 0, // debug sections only contain 1 entry, so no need to calculate offset
.section_index = section_index,
});
@ -398,7 +398,7 @@ fn Parser(comptime ReaderType: type) type {
}
},
.type => {
for (try readVec(&self.object.func_types, reader, gpa)) |*type_val| {
for (try readVec(&parser.object.func_types, reader, gpa)) |*type_val| {
if ((try reader.readByte()) != std.wasm.function_type) return error.ExpectedFuncType;
for (try readVec(&type_val.params, reader, gpa)) |*param| {
@ -412,7 +412,7 @@ fn Parser(comptime ReaderType: type) type {
try assertEnd(reader);
},
.import => {
for (try readVec(&self.object.imports, reader, gpa)) |*import| {
for (try readVec(&parser.object.imports, reader, gpa)) |*import| {
const module_len = try readLeb(u32, reader);
const module_name = try gpa.alloc(u8, module_len);
defer gpa.free(module_name);
@ -438,21 +438,21 @@ fn Parser(comptime ReaderType: type) type {
};
import.* = .{
.module_name = try self.object.string_table.put(gpa, module_name),
.name = try self.object.string_table.put(gpa, name),
.module_name = try parser.object.string_table.put(gpa, module_name),
.name = try parser.object.string_table.put(gpa, name),
.kind = kind_value,
};
}
try assertEnd(reader);
},
.function => {
for (try readVec(&self.object.functions, reader, gpa)) |*func| {
for (try readVec(&parser.object.functions, reader, gpa)) |*func| {
func.* = .{ .type_index = try readLeb(u32, reader) };
}
try assertEnd(reader);
},
.table => {
for (try readVec(&self.object.tables, reader, gpa)) |*table| {
for (try readVec(&parser.object.tables, reader, gpa)) |*table| {
table.* = .{
.reftype = try readEnum(std.wasm.RefType, reader),
.limits = try readLimits(reader),
@ -461,13 +461,13 @@ fn Parser(comptime ReaderType: type) type {
try assertEnd(reader);
},
.memory => {
for (try readVec(&self.object.memories, reader, gpa)) |*memory| {
for (try readVec(&parser.object.memories, reader, gpa)) |*memory| {
memory.* = .{ .limits = try readLimits(reader) };
}
try assertEnd(reader);
},
.global => {
for (try readVec(&self.object.globals, reader, gpa)) |*global| {
for (try readVec(&parser.object.globals, reader, gpa)) |*global| {
global.* = .{
.global_type = .{
.valtype = try readEnum(std.wasm.Valtype, reader),
@ -479,13 +479,13 @@ fn Parser(comptime ReaderType: type) type {
try assertEnd(reader);
},
.@"export" => {
for (try readVec(&self.object.exports, reader, gpa)) |*exp| {
for (try readVec(&parser.object.exports, reader, gpa)) |*exp| {
const name_len = try readLeb(u32, reader);
const name = try gpa.alloc(u8, name_len);
defer gpa.free(name);
try reader.readNoEof(name);
exp.* = .{
.name = try self.object.string_table.put(gpa, name),
.name = try parser.object.string_table.put(gpa, name),
.kind = try readEnum(std.wasm.ExternalKind, reader),
.index = try readLeb(u32, reader),
};
@ -493,11 +493,11 @@ fn Parser(comptime ReaderType: type) type {
try assertEnd(reader);
},
.start => {
self.object.start = try readLeb(u32, reader);
parser.object.start = try readLeb(u32, reader);
try assertEnd(reader);
},
.element => {
for (try readVec(&self.object.elements, reader, gpa)) |*elem| {
for (try readVec(&parser.object.elements, reader, gpa)) |*elem| {
elem.table_index = try readLeb(u32, reader);
elem.offset = try readInit(reader);
@ -521,7 +521,7 @@ fn Parser(comptime ReaderType: type) type {
.type = .code,
.data = data.ptr,
.size = code_len,
.index = self.object.importedCountByKind(.function) + index,
.index = parser.object.importedCountByKind(.function) + index,
.offset = offset,
.section_index = section_index,
});
@ -551,22 +551,22 @@ fn Parser(comptime ReaderType: type) type {
});
}
},
else => try self.reader.reader().skipBytes(len, .{}),
else => try parser.reader.reader().skipBytes(len, .{}),
}
} else |err| switch (err) {
error.EndOfStream => {}, // finished parsing the file
else => |e| return e,
}
self.object.relocatable_data = relocatable_data.toOwnedSlice();
parser.object.relocatable_data = relocatable_data.toOwnedSlice();
}
/// Based on the "features" custom section, parses it into a list of
/// features that tell the linker what features were enabled and may be mandatory
/// to be able to link.
/// Logs an info message when an undefined feature is detected.
fn parseFeatures(self: *Self, gpa: Allocator) !void {
const reader = self.reader.reader();
for (try readVec(&self.object.features, reader, gpa)) |*feature| {
fn parseFeatures(parser: *ObjectParser, gpa: Allocator) !void {
const reader = parser.reader.reader();
for (try readVec(&parser.object.features, reader, gpa)) |*feature| {
const prefix = try readEnum(types.Feature.Prefix, reader);
const name_len = try leb.readULEB128(u32, reader);
const name = try gpa.alloc(u8, name_len);
@ -587,8 +587,8 @@ fn Parser(comptime ReaderType: type) type {
/// Parses a "reloc" custom section into a list of relocations.
/// The relocations are mapped into `Object` where the key is the section
/// they apply to.
fn parseRelocations(self: *Self, gpa: Allocator) !void {
const reader = self.reader.reader();
fn parseRelocations(parser: *ObjectParser, gpa: Allocator) !void {
const reader = parser.reader.reader();
const section = try leb.readULEB128(u32, reader);
const count = try leb.readULEB128(u32, reader);
const relocations = try gpa.alloc(types.Relocation, count);
@ -616,15 +616,15 @@ fn Parser(comptime ReaderType: type) type {
});
}
try self.object.relocations.putNoClobber(gpa, section, relocations);
try parser.object.relocations.putNoClobber(gpa, section, relocations);
}
/// Parses the "linking" custom section. Versions that are not
/// supported will be an error. `payload_size` is required to be able
/// to calculate the subsections we need to parse, as that data is not
/// available within the section itself.
fn parseMetadata(self: *Self, gpa: Allocator, payload_size: usize) !void {
var limited = std.io.limitedReader(self.reader.reader(), payload_size);
/// available within the section itparser.
fn parseMetadata(parser: *ObjectParser, gpa: Allocator, payload_size: usize) !void {
var limited = std.io.limitedReader(parser.reader.reader(), payload_size);
const limited_reader = limited.reader();
const version = try leb.readULEB128(u32, limited_reader);
@ -632,7 +632,7 @@ fn Parser(comptime ReaderType: type) type {
if (version != 2) return error.UnsupportedVersion;
while (limited.bytes_left > 0) {
try self.parseSubsection(gpa, limited_reader);
try parser.parseSubsection(gpa, limited_reader);
}
}
@ -640,9 +640,9 @@ fn Parser(comptime ReaderType: type) type {
/// The `reader` param for this is to provide a `LimitedReader`, which allows
/// us to only read until a max length.
///
/// `self` is used to provide access to other sections that may be needed,
/// `parser` is used to provide access to other sections that may be needed,
/// such as access to the `import` section to find the name of a symbol.
fn parseSubsection(self: *Self, gpa: Allocator, reader: anytype) !void {
fn parseSubsection(parser: *ObjectParser, gpa: Allocator, reader: anytype) !void {
const sub_type = try leb.readULEB128(u8, reader);
log.debug("Found subsection: {s}", .{@tagName(@intToEnum(types.SubsectionType, sub_type))});
const payload_len = try leb.readULEB128(u32, reader);
@ -674,7 +674,7 @@ fn Parser(comptime ReaderType: type) type {
segment.flags,
});
}
self.object.segment_info = segments;
parser.object.segment_info = segments;
},
.WASM_INIT_FUNCS => {
const funcs = try gpa.alloc(types.InitFunc, count);
@ -686,7 +686,7 @@ fn Parser(comptime ReaderType: type) type {
};
log.debug("Found function - prio: {d}, index: {d}", .{ func.priority, func.symbol_index });
}
self.object.init_funcs = funcs;
parser.object.init_funcs = funcs;
},
.WASM_COMDAT_INFO => {
const comdats = try gpa.alloc(types.Comdat, count);
@ -719,7 +719,7 @@ fn Parser(comptime ReaderType: type) type {
};
}
self.object.comdat_info = comdats;
parser.object.comdat_info = comdats;
},
.WASM_SYMBOL_TABLE => {
var symbols = try std.ArrayList(Symbol).initCapacity(gpa, count);
@ -727,22 +727,22 @@ fn Parser(comptime ReaderType: type) type {
var i: usize = 0;
while (i < count) : (i += 1) {
const symbol = symbols.addOneAssumeCapacity();
symbol.* = try self.parseSymbol(gpa, reader);
symbol.* = try parser.parseSymbol(gpa, reader);
log.debug("Found symbol: type({s}) name({s}) flags(0b{b:0>8})", .{
@tagName(symbol.tag),
self.object.string_table.get(symbol.name),
parser.object.string_table.get(symbol.name),
symbol.flags,
});
}
// we found all symbols, check for indirect function table
// in case of an MVP object file
if (try self.object.checkLegacyIndirectFunctionTable()) |symbol| {
if (try parser.object.checkLegacyIndirectFunctionTable()) |symbol| {
try symbols.append(symbol);
log.debug("Found legacy indirect function table. Created symbol", .{});
}
self.object.symtable = symbols.toOwnedSlice();
parser.object.symtable = symbols.toOwnedSlice();
},
}
}
@ -750,7 +750,7 @@ fn Parser(comptime ReaderType: type) type {
/// Parses the symbol information based on its kind,
/// requires access to `Object` to find the name of a symbol when it's
/// an import and flag `WASM_SYM_EXPLICIT_NAME` is not set.
fn parseSymbol(self: *Self, gpa: Allocator, reader: anytype) !Symbol {
fn parseSymbol(parser: *ObjectParser, gpa: Allocator, reader: anytype) !Symbol {
const tag = @intToEnum(Symbol.Tag, try leb.readULEB128(u8, reader));
const flags = try leb.readULEB128(u32, reader);
var symbol: Symbol = .{
@ -766,7 +766,7 @@ fn Parser(comptime ReaderType: type) type {
const name = try gpa.alloc(u8, name_len);
defer gpa.free(name);
try reader.readNoEof(name);
symbol.name = try self.object.string_table.put(gpa, name);
symbol.name = try parser.object.string_table.put(gpa, name);
// Data symbols only have the following fields if the symbol is defined
if (symbol.isDefined()) {
@ -778,7 +778,7 @@ fn Parser(comptime ReaderType: type) type {
},
.section => {
symbol.index = try leb.readULEB128(u32, reader);
for (self.object.relocatable_data) |data| {
for (parser.object.relocatable_data) |data| {
if (data.section_index == symbol.index) {
symbol.name = data.index;
break;
@ -791,7 +791,7 @@ fn Parser(comptime ReaderType: type) type {
const is_undefined = symbol.isUndefined();
if (is_undefined) {
maybe_import = self.object.findImport(symbol.tag.externalType(), symbol.index);
maybe_import = parser.object.findImport(symbol.tag.externalType(), symbol.index);
}
const explicit_name = symbol.hasFlag(.WASM_SYM_EXPLICIT_NAME);
if (!(is_undefined and !explicit_name)) {
@ -799,7 +799,7 @@ fn Parser(comptime ReaderType: type) type {
const name = try gpa.alloc(u8, name_len);
defer gpa.free(name);
try reader.readNoEof(name);
symbol.name = try self.object.string_table.put(gpa, name);
symbol.name = try parser.object.string_table.put(gpa, name);
} else {
symbol.name = maybe_import.?.name;
}
@ -872,7 +872,7 @@ fn assertEnd(reader: anytype) !void {
}
/// Parses an object file into atoms, for code and data sections
pub fn parseIntoAtoms(self: *Object, gpa: Allocator, object_index: u16, wasm_bin: *Wasm) !void {
pub fn parseIntoAtoms(object: *Object, gpa: Allocator, object_index: u16, wasm_bin: *Wasm) !void {
const Key = struct {
kind: Symbol.Tag,
index: u32,
@ -882,7 +882,7 @@ pub fn parseIntoAtoms(self: *Object, gpa: Allocator, object_index: u16, wasm_bin
list.deinit();
} else symbol_for_segment.deinit();
for (self.symtable) |symbol, symbol_index| {
for (object.symtable) |symbol, symbol_index| {
switch (symbol.tag) {
.function, .data, .section => if (!symbol.isUndefined()) {
const gop = try symbol_for_segment.getOrPut(.{ .kind = symbol.tag, .index = symbol.index });
@ -896,7 +896,7 @@ pub fn parseIntoAtoms(self: *Object, gpa: Allocator, object_index: u16, wasm_bin
}
}
for (self.relocatable_data) |relocatable_data, index| {
for (object.relocatable_data) |relocatable_data, index| {
const final_index = (try wasm_bin.getMatchingSegment(object_index, @intCast(u32, index))) orelse {
continue; // found unknown section, so skip parsing into atom as we do not know how to handle it.
};
@ -911,12 +911,12 @@ pub fn parseIntoAtoms(self: *Object, gpa: Allocator, object_index: u16, wasm_bin
try wasm_bin.managed_atoms.append(gpa, atom);
atom.file = object_index;
atom.size = relocatable_data.size;
atom.alignment = relocatable_data.getAlignment(self);
atom.alignment = relocatable_data.getAlignment(object);
const relocations: []types.Relocation = self.relocations.get(relocatable_data.section_index) orelse &.{};
const relocations: []types.Relocation = object.relocations.get(relocatable_data.section_index) orelse &.{};
for (relocations) |relocation| {
if (isInbetween(relocatable_data.offset, atom.size, relocation.offset)) {
// set the offset relative to the offset of the segment itself,
// set the offset relative to the offset of the segment itobject,
// rather than within the entire section.
var reloc = relocation;
reloc.offset -= relocatable_data.offset;
@ -942,8 +942,8 @@ pub fn parseIntoAtoms(self: *Object, gpa: Allocator, object_index: u16, wasm_bin
// symbols referencing the same atom will be added as alias
// or as 'parent' when they are global.
while (symbols.popOrNull()) |idx| {
const alias_symbol = self.symtable[idx];
const symbol = self.symtable[atom.sym_index];
const alias_symbol = object.symtable[idx];
const symbol = object.symtable[atom.sym_index];
if (alias_symbol.isGlobal() and symbol.isLocal()) {
atom.sym_index = idx;
}
@ -957,7 +957,7 @@ pub fn parseIntoAtoms(self: *Object, gpa: Allocator, object_index: u16, wasm_bin
}
try wasm_bin.appendAtomAtIndex(final_index, atom);
log.debug("Parsed into atom: '{s}' at segment index {d}", .{ self.string_table.get(self.symtable[atom.sym_index].name), final_index });
log.debug("Parsed into atom: '{s}' at segment index {d}", .{ object.string_table.get(object.symtable[atom.sym_index].name), final_index });
}
}

View File

@ -34,8 +34,8 @@ pub const Tag = enum {
/// From a given symbol tag, returns the `ExternalType`
/// Asserts the given tag can be represented as an external type.
pub fn externalType(self: Tag) std.wasm.ExternalKind {
return switch (self) {
pub fn externalType(tag: Tag) std.wasm.ExternalKind {
return switch (tag) {
.function => .function,
.global => .global,
.data => .memory,
@ -78,85 +78,85 @@ pub const Flag = enum(u32) {
/// Verifies if the given symbol should be imported from the
/// host environment or not
pub fn requiresImport(self: Symbol) bool {
if (self.tag == .data) return false;
if (!self.isUndefined()) return false;
if (self.isWeak()) return false;
// if (self.isDefined() and self.isWeak()) return true; //TODO: Only when building shared lib
pub fn requiresImport(symbol: Symbol) bool {
if (symbol.tag == .data) return false;
if (!symbol.isUndefined()) return false;
if (symbol.isWeak()) return false;
// if (symbol.isDefined() and symbol.isWeak()) return true; //TODO: Only when building shared lib
return true;
}
pub fn hasFlag(self: Symbol, flag: Flag) bool {
return self.flags & @enumToInt(flag) != 0;
pub fn hasFlag(symbol: Symbol, flag: Flag) bool {
return symbol.flags & @enumToInt(flag) != 0;
}
pub fn setFlag(self: *Symbol, flag: Flag) void {
self.flags |= @enumToInt(flag);
pub fn setFlag(symbol: *Symbol, flag: Flag) void {
symbol.flags |= @enumToInt(flag);
}
pub fn isUndefined(self: Symbol) bool {
return self.flags & @enumToInt(Flag.WASM_SYM_UNDEFINED) != 0;
pub fn isUndefined(symbol: Symbol) bool {
return symbol.flags & @enumToInt(Flag.WASM_SYM_UNDEFINED) != 0;
}
pub fn setUndefined(self: *Symbol, is_undefined: bool) void {
pub fn setUndefined(symbol: *Symbol, is_undefined: bool) void {
if (is_undefined) {
self.setFlag(.WASM_SYM_UNDEFINED);
symbol.setFlag(.WASM_SYM_UNDEFINED);
} else {
self.flags &= ~@enumToInt(Flag.WASM_SYM_UNDEFINED);
symbol.flags &= ~@enumToInt(Flag.WASM_SYM_UNDEFINED);
}
}
pub fn setGlobal(self: *Symbol, is_global: bool) void {
pub fn setGlobal(symbol: *Symbol, is_global: bool) void {
if (is_global) {
self.flags &= ~@enumToInt(Flag.WASM_SYM_BINDING_LOCAL);
symbol.flags &= ~@enumToInt(Flag.WASM_SYM_BINDING_LOCAL);
} else {
self.setFlag(.WASM_SYM_BINDING_LOCAL);
symbol.setFlag(.WASM_SYM_BINDING_LOCAL);
}
}
pub fn isDefined(self: Symbol) bool {
return !self.isUndefined();
pub fn isDefined(symbol: Symbol) bool {
return !symbol.isUndefined();
}
pub fn isVisible(self: Symbol) bool {
return self.flags & @enumToInt(Flag.WASM_SYM_VISIBILITY_HIDDEN) == 0;
pub fn isVisible(symbol: Symbol) bool {
return symbol.flags & @enumToInt(Flag.WASM_SYM_VISIBILITY_HIDDEN) == 0;
}
pub fn isLocal(self: Symbol) bool {
return self.flags & @enumToInt(Flag.WASM_SYM_BINDING_LOCAL) != 0;
pub fn isLocal(symbol: Symbol) bool {
return symbol.flags & @enumToInt(Flag.WASM_SYM_BINDING_LOCAL) != 0;
}
pub fn isGlobal(self: Symbol) bool {
return self.flags & @enumToInt(Flag.WASM_SYM_BINDING_LOCAL) == 0;
pub fn isGlobal(symbol: Symbol) bool {
return symbol.flags & @enumToInt(Flag.WASM_SYM_BINDING_LOCAL) == 0;
}
pub fn isHidden(self: Symbol) bool {
return self.flags & @enumToInt(Flag.WASM_SYM_VISIBILITY_HIDDEN) != 0;
pub fn isHidden(symbol: Symbol) bool {
return symbol.flags & @enumToInt(Flag.WASM_SYM_VISIBILITY_HIDDEN) != 0;
}
pub fn isNoStrip(self: Symbol) bool {
return self.flags & @enumToInt(Flag.WASM_SYM_NO_STRIP) != 0;
pub fn isNoStrip(symbol: Symbol) bool {
return symbol.flags & @enumToInt(Flag.WASM_SYM_NO_STRIP) != 0;
}
pub fn isExported(self: Symbol) bool {
if (self.isUndefined() or self.isLocal()) return false;
if (self.isHidden()) return false;
if (self.hasFlag(.WASM_SYM_EXPORTED)) return true;
if (self.hasFlag(.WASM_SYM_BINDING_WEAK)) return false;
pub fn isExported(symbol: Symbol) bool {
if (symbol.isUndefined() or symbol.isLocal()) return false;
if (symbol.isHidden()) return false;
if (symbol.hasFlag(.WASM_SYM_EXPORTED)) return true;
if (symbol.hasFlag(.WASM_SYM_BINDING_WEAK)) return false;
return true;
}
pub fn isWeak(self: Symbol) bool {
return self.flags & @enumToInt(Flag.WASM_SYM_BINDING_WEAK) != 0;
pub fn isWeak(symbol: Symbol) bool {
return symbol.flags & @enumToInt(Flag.WASM_SYM_BINDING_WEAK) != 0;
}
/// Formats the symbol into human-readable text
pub fn format(self: Symbol, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void {
pub fn format(symbol: Symbol, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void {
_ = fmt;
_ = options;
const kind_fmt: u8 = switch (self.tag) {
const kind_fmt: u8 = switch (symbol.tag) {
.function => 'F',
.data => 'D',
.global => 'G',
@ -165,12 +165,12 @@ pub fn format(self: Symbol, comptime fmt: []const u8, options: std.fmt.FormatOpt
.table => 'T',
.dead => '-',
};
const visible: []const u8 = if (self.isVisible()) "yes" else "no";
const binding: []const u8 = if (self.isLocal()) "local" else "global";
const undef: []const u8 = if (self.isUndefined()) "undefined" else "";
const visible: []const u8 = if (symbol.isVisible()) "yes" else "no";
const binding: []const u8 = if (symbol.isLocal()) "local" else "global";
const undef: []const u8 = if (symbol.isUndefined()) "undefined" else "";
try writer.print(
"{c} binding={s} visible={s} id={d} name_offset={d} {s}",
.{ kind_fmt, binding, visible, self.index, self.name, undef },
.{ kind_fmt, binding, visible, symbol.index, symbol.name, undef },
);
}

View File

@ -202,22 +202,22 @@ pub const Feature = struct {
required = '=',
};
pub fn toString(self: Feature) []const u8 {
return switch (self.tag) {
pub fn toString(feature: Feature) []const u8 {
return switch (feature.tag) {
.bulk_memory => "bulk-memory",
.exception_handling => "exception-handling",
.mutable_globals => "mutable-globals",
.nontrapping_fptoint => "nontrapping-fptoint",
.sign_ext => "sign-ext",
.tail_call => "tail-call",
else => @tagName(self),
else => @tagName(feature),
};
}
pub fn format(self: Feature, comptime fmt: []const u8, opt: std.fmt.FormatOptions, writer: anytype) !void {
pub fn format(feature: Feature, comptime fmt: []const u8, opt: std.fmt.FormatOptions, writer: anytype) !void {
_ = opt;
_ = fmt;
try writer.print("{c} {s}", .{ self.prefix, self.toString() });
try writer.print("{c} {s}", .{ feature.prefix, feature.toString() });
}
};

View File

@ -324,7 +324,6 @@ const Writer = struct {
fn writeNoOp(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void {
_ = w;
_ = inst;
_ = s;
// no-op, no argument to write
}

View File

@ -1550,14 +1550,27 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
.main_token = try c.addToken(.identifier, "_"),
.data = undefined,
});
return c.addNode(.{
.tag = .assign,
.main_token = try c.addToken(.equal, "="),
.data = .{
.lhs = lhs,
.rhs = try renderNode(c, payload.value),
},
});
const main_token = try c.addToken(.equal, "=");
if (payload.value.tag() == .identifier) {
// Render as `_ = @TypeOf(foo);` to avoid tripping "pointless discard" error.
return c.addNode(.{
.tag = .assign,
.main_token = main_token,
.data = .{
.lhs = lhs,
.rhs = try renderBuiltinCall(c, "@TypeOf", &.{payload.value}),
},
});
} else {
return c.addNode(.{
.tag = .assign,
.main_token = main_token,
.data = .{
.lhs = lhs,
.rhs = try renderNode(c, payload.value),
},
});
}
},
.@"while" => {
const payload = node.castTag(.@"while").?.data;

View File

@ -2715,8 +2715,12 @@ pub const Type = extern union {
}
/// Returns 0 if the pointer is naturally aligned and the element type is 0-bit.
pub fn ptrAlignment(self: Type, target: Target) u32 {
switch (self.tag()) {
pub fn ptrAlignment(ty: Type, target: Target) u32 {
return ptrAlignmentAdvanced(ty, target, null) catch unreachable;
}
pub fn ptrAlignmentAdvanced(ty: Type, target: Target, sema_kit: ?Module.WipAnalysis) !u32 {
switch (ty.tag()) {
.single_const_pointer,
.single_mut_pointer,
.many_const_pointer,
@ -2728,8 +2732,12 @@ pub const Type = extern union {
.optional_single_const_pointer,
.optional_single_mut_pointer,
=> {
const child_type = self.cast(Payload.ElemType).?.data;
return child_type.abiAlignment(target);
const child_type = ty.cast(Payload.ElemType).?.data;
if (sema_kit) |sk| {
const res = try child_type.abiAlignmentAdvanced(target, .{ .sema_kit = sk });
return res.scalar;
}
return (child_type.abiAlignmentAdvanced(target, .eager) catch unreachable).scalar;
},
.manyptr_u8,
@ -2740,14 +2748,17 @@ pub const Type = extern union {
=> return 1,
.pointer => {
const ptr_info = self.castTag(.pointer).?.data;
const ptr_info = ty.castTag(.pointer).?.data;
if (ptr_info.@"align" != 0) {
return ptr_info.@"align";
} else if (sema_kit) |sk| {
const res = try ptr_info.pointee_type.abiAlignmentAdvanced(target, .{ .sema_kit = sk });
return res.scalar;
} else {
return ptr_info.pointee_type.abiAlignment(target);
return (ptr_info.pointee_type.abiAlignmentAdvanced(target, .eager) catch unreachable).scalar;
}
},
.optional => return self.castTag(.optional).?.data.ptrAlignment(target),
.optional => return ty.castTag(.optional).?.data.ptrAlignmentAdvanced(target, sema_kit),
else => unreachable,
}

View File

@ -14,7 +14,6 @@ test "bytes" {
.a = undefined,
.c = "12345".*, // this caused problems
};
_ = s_1;
var u_2 = U{ .s = s_1 };
_ = u_2;
@ -35,7 +34,6 @@ test "aggregate" {
.a = undefined,
.c = c, // this caused problems
};
_ = s_1;
var u_2 = U{ .s = s_1 };
_ = u_2;

View File

@ -486,7 +486,6 @@ test "Type.Union from Type.Enum" {
.decls = &.{},
},
});
_ = T;
_ = @typeInfo(T).Union;
}
@ -505,7 +504,6 @@ test "Type.Union from regular enum" {
.decls = &.{},
},
});
_ = T;
_ = @typeInfo(T).Union;
}

View File

@ -425,12 +425,8 @@ fn generic2(comptime T: type, param: T, param2: u8) void {
_ = param;
_ = param2;
}
fn generic3(param: anytype) @TypeOf(param) {
_ = param;
}
fn generic4(comptime param: anytype) @TypeOf(param) {
_ = param;
}
fn generic3(param: anytype) @TypeOf(param) {}
fn generic4(comptime param: anytype) @TypeOf(param) {}
test "typeInfo with comptime parameter in struct fn def" {
const S = struct {

View File

@ -6,7 +6,6 @@ export fn entry() void {
}
fn makeLlamas(count: usize) [count]u8 {
_ = count;
}
// error

View File

@ -0,0 +1,12 @@
export fn foo() void {
var x: i32 = 1234;
x += 1;
_ = x;
}
// error
// backend=stage2
// target=native
//
// :4:9: error: pointless discard of local variable
// :3:5: note: used here

View File

@ -1,6 +1,5 @@
pub fn main() void {
var x: usize = 0;
_ = x;
const z = @TypeOf(x, @as(u128, 5));
assert(z == u128);
}

View File

@ -116,10 +116,10 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\pub export fn foo() void {
\\ while (true) if (true) {
\\ var a: c_int = 1;
\\ _ = a;
\\ _ = @TypeOf(a);
\\ } else {
\\ var b: c_int = 2;
\\ _ = b;
\\ _ = @TypeOf(b);
\\ };
\\ if (true) if (true) {};
\\}
@ -192,7 +192,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ .B = 0,
\\ .C = 0,
\\ };
\\ _ = a;
\\ _ = @TypeOf(a);
\\ {
\\ const struct_Foo_1 = extern struct {
\\ A: c_int,
@ -204,7 +204,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ .B = 0,
\\ .C = 0,
\\ };
\\ _ = a_2;
\\ _ = @TypeOf(a_2);
\\ }
\\}
});
@ -233,24 +233,24 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ B: c_int,
\\ C: c_int,
\\ };
\\ _ = union_unnamed_1;
\\ _ = @TypeOf(union_unnamed_1);
\\ const Foo = union_unnamed_1;
\\ var a: Foo = Foo{
\\ .A = @as(c_int, 0),
\\ };
\\ _ = a;
\\ _ = @TypeOf(a);
\\ {
\\ const union_unnamed_2 = extern union {
\\ A: c_int,
\\ B: c_int,
\\ C: c_int,
\\ };
\\ _ = union_unnamed_2;
\\ _ = @TypeOf(union_unnamed_2);
\\ const Foo_1 = union_unnamed_2;
\\ var a_2: Foo_1 = Foo_1{
\\ .A = @as(c_int, 0),
\\ };
\\ _ = a_2;
\\ _ = @TypeOf(a_2);
\\ }
\\}
});
@ -318,7 +318,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ const bar_1 = struct {
\\ threadlocal var static: c_int = 2;
\\ };
\\ _ = bar_1;
\\ _ = @TypeOf(bar_1);
\\ return 0;
\\}
});
@ -337,7 +337,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\}
\\pub export fn bar() c_int {
\\ var a: c_int = 2;
\\ _ = a;
\\ _ = @TypeOf(a);
\\ return 0;
\\}
\\pub export fn baz() c_int {
@ -352,7 +352,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
, &[_][]const u8{
\\pub export fn main() void {
\\ var a: c_int = @bitCast(c_int, @truncate(c_uint, @alignOf(c_int)));
\\ _ = a;
\\ _ = @TypeOf(a);
\\}
});
@ -500,7 +500,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\#define bar(x) (&x, +3, 4 == 4, 5 * 6, baz(1, 2), 2 % 2, baz(1,2))
, &[_][]const u8{
\\pub const foo = blk: {
\\ _ = foo;
\\ _ = @TypeOf(foo);
\\ break :blk bar;
\\};
,
@ -724,7 +724,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\pub export fn function(arg_opaque_1: ?*struct_opaque) void {
\\ var opaque_1 = arg_opaque_1;
\\ var cast: ?*struct_opaque_2 = @ptrCast(?*struct_opaque_2, opaque_1);
\\ _ = cast;
\\ _ = @TypeOf(cast);
\\}
});
@ -761,7 +761,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\pub export fn my_fn() align(128) void {}
\\pub export fn other_fn() void {
\\ var ARR: [16]u8 align(16) = undefined;
\\ _ = ARR;
\\ _ = @TypeOf(ARR);
\\}
});
}
@ -798,17 +798,17 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
, &[_][]const u8{
\\pub export fn foo() void {
\\ var a: c_int = undefined;
\\ _ = a;
\\ _ = @TypeOf(a);
\\ var b: u8 = 123;
\\ _ = b;
\\ _ = @TypeOf(b);
\\ const c: c_int = undefined;
\\ _ = c;
\\ _ = @TypeOf(c);
\\ const d: c_uint = @bitCast(c_uint, @as(c_int, 440));
\\ _ = d;
\\ _ = @TypeOf(d);
\\ var e: c_int = 10;
\\ _ = e;
\\ _ = @TypeOf(e);
\\ var f: c_uint = 10;
\\ _ = f;
\\ _ = @TypeOf(f);
\\}
});
@ -867,7 +867,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ const v2 = struct {
\\ const static: [5:0]u8 = "2.2.2".*;
\\ };
\\ _ = v2;
\\ _ = @TypeOf(v2);
\\}
});
@ -911,7 +911,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\pub export fn bar() void {
\\ var func_ptr: ?*anyopaque = @ptrCast(?*anyopaque, foo);
\\ var typed_func_ptr: ?*const fn () callconv(.C) void = @intToPtr(?*const fn () callconv(.C) void, @intCast(c_ulong, @ptrToInt(func_ptr)));
\\ _ = typed_func_ptr;
\\ _ = @TypeOf(typed_func_ptr);
\\}
});
}
@ -1353,7 +1353,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
, &[_][]const u8{
\\pub export fn foo() void {
\\ var a: c_int = undefined;
\\ _ = a;
\\ _ = @TypeOf(a);
\\}
});
@ -1524,23 +1524,23 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ var p: ?*anyopaque = undefined;
\\ {
\\ var to_char: [*c]u8 = @ptrCast([*c]u8, @alignCast(@import("std").meta.alignment([*c]u8), p));
\\ _ = to_char;
\\ _ = @TypeOf(to_char);
\\ var to_short: [*c]c_short = @ptrCast([*c]c_short, @alignCast(@import("std").meta.alignment([*c]c_short), p));
\\ _ = to_short;
\\ _ = @TypeOf(to_short);
\\ var to_int: [*c]c_int = @ptrCast([*c]c_int, @alignCast(@import("std").meta.alignment([*c]c_int), p));
\\ _ = to_int;
\\ _ = @TypeOf(to_int);
\\ var to_longlong: [*c]c_longlong = @ptrCast([*c]c_longlong, @alignCast(@import("std").meta.alignment([*c]c_longlong), p));
\\ _ = to_longlong;
\\ _ = @TypeOf(to_longlong);
\\ }
\\ {
\\ var to_char: [*c]u8 = @ptrCast([*c]u8, @alignCast(@import("std").meta.alignment([*c]u8), p));
\\ _ = to_char;
\\ _ = @TypeOf(to_char);
\\ var to_short: [*c]c_short = @ptrCast([*c]c_short, @alignCast(@import("std").meta.alignment([*c]c_short), p));
\\ _ = to_short;
\\ _ = @TypeOf(to_short);
\\ var to_int: [*c]c_int = @ptrCast([*c]c_int, @alignCast(@import("std").meta.alignment([*c]c_int), p));
\\ _ = to_int;
\\ _ = @TypeOf(to_int);
\\ var to_longlong: [*c]c_longlong = @ptrCast([*c]c_longlong, @alignCast(@import("std").meta.alignment([*c]c_longlong), p));
\\ _ = to_longlong;
\\ _ = @TypeOf(to_longlong);
\\ }
\\}
});
@ -1786,11 +1786,11 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ var arr: [10]u8 = [1]u8{
\\ 1,
\\ } ++ [1]u8{0} ** 9;
\\ _ = arr;
\\ _ = @TypeOf(arr);
\\ var arr1: [10][*c]u8 = [1][*c]u8{
\\ null,
\\ } ++ [1][*c]u8{null} ** 9;
\\ _ = arr1;
\\ _ = @TypeOf(arr1);
\\}
});
@ -2038,16 +2038,16 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\pub var c: c_int = 4;
\\pub export fn foo(arg_c_1: u8) void {
\\ var c_1 = arg_c_1;
\\ _ = c_1;
\\ _ = @TypeOf(c_1);
\\ var a_2: c_int = undefined;
\\ var b_3: u8 = 123;
\\ b_3 = @bitCast(u8, @truncate(i8, a_2));
\\ {
\\ var d: c_int = 5;
\\ _ = d;
\\ _ = @TypeOf(d);
\\ }
\\ var d: c_uint = @bitCast(c_uint, @as(c_int, 440));
\\ _ = d;
\\ _ = @TypeOf(d);
\\}
});
@ -2146,7 +2146,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ {
\\ var i: c_int = 2;
\\ var b: c_int = 4;
\\ _ = b;
\\ _ = @TypeOf(b);
\\ while ((i + @as(c_int, 2)) != 0) : (i = 2) {
\\ var a: c_int = 2;
\\ _ = blk: {
@ -2159,7 +2159,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ }
\\ }
\\ var i: u8 = 2;
\\ _ = i;
\\ _ = @TypeOf(i);
\\}
});
@ -2396,27 +2396,27 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
, &[_][]const u8{
\\pub export fn escapes() [*c]const u8 {
\\ var a: u8 = '\'';
\\ _ = a;
\\ _ = @TypeOf(a);
\\ var b: u8 = '\\';
\\ _ = b;
\\ _ = @TypeOf(b);
\\ var c: u8 = '\x07';
\\ _ = c;
\\ _ = @TypeOf(c);
\\ var d: u8 = '\x08';
\\ _ = d;
\\ _ = @TypeOf(d);
\\ var e: u8 = '\x0c';
\\ _ = e;
\\ _ = @TypeOf(e);
\\ var f: u8 = '\n';
\\ _ = f;
\\ _ = @TypeOf(f);
\\ var g: u8 = '\r';
\\ _ = g;
\\ _ = @TypeOf(g);
\\ var h: u8 = '\t';
\\ _ = h;
\\ _ = @TypeOf(h);
\\ var i: u8 = '\x0b';
\\ _ = i;
\\ _ = @TypeOf(i);
\\ var j: u8 = '\x00';
\\ _ = j;
\\ _ = @TypeOf(j);
\\ var k: u8 = '"';
\\ _ = k;
\\ _ = @TypeOf(k);
\\ return "'\\\x07\x08\x0c\n\r\t\x0b\x00\"";
\\}
});
@ -2612,7 +2612,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\pub export fn foo() c_int {
\\ return blk: {
\\ var a: c_int = 1;
\\ _ = a;
\\ _ = @TypeOf(a);
\\ break :blk a;
\\ };
\\}
@ -2716,7 +2716,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\int bar(void) { return 0; }
, &[_][]const u8{
\\pub inline fn CALL(arg: anytype) @TypeOf(bar()) {
\\ _ = arg;
\\ _ = @TypeOf(arg);
\\ return bar();
\\}
});
@ -2775,14 +2775,14 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\pub export fn foo() void {
\\ if (true) {
\\ var a: c_int = 2;
\\ _ = a;
\\ _ = @TypeOf(a);
\\ }
\\ if ((blk: {
\\ _ = @as(c_int, 2);
\\ break :blk @as(c_int, 5);
\\ }) != 0) {
\\ var a: c_int = 2;
\\ _ = a;
\\ _ = @TypeOf(a);
\\ }
\\}
});
@ -3285,7 +3285,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\#define a 2
, &[_][]const u8{
\\pub inline fn FOO(bar: anytype) @TypeOf(baz(@import("std").zig.c_translation.cast(?*anyopaque, baz))) {
\\ _ = bar;
\\ _ = @TypeOf(bar);
\\ return baz(@import("std").zig.c_translation.cast(?*anyopaque, baz));
\\}
,
@ -3425,7 +3425,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
, &[_][]const u8{
\\pub export fn foo(arg_a: [*c]c_int) void {
\\ var a = arg_a;
\\ _ = a;
\\ _ = @TypeOf(a);
\\}
\\pub export fn bar(arg_a: [*c]const c_int) void {
\\ var a = arg_a;
@ -3785,12 +3785,12 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\pub export fn bar(arg_x: c_int, arg_y: c_int) c_int {
\\ var x = arg_x;
\\ var y = arg_y;
\\ _ = y;
\\ _ = @TypeOf(y);
\\ return x;
\\}
,
\\pub inline fn FOO(A: anytype, B: anytype) @TypeOf(A) {
\\ _ = B;
\\ _ = @TypeOf(B);
\\ return A;
\\}
});