self-hosted: working towards conditional branching test case

New features:
 * Functions can have parameters in semantic analysis. Codegen
   is not implemented yet.
 * Support for i8, i16, i32, i64, u8, u16, u32, u64 primitive
   identifiers.
 * New ZIR instructions: arg, block, and breakvoid

Implementation details:

 * Move Module.Body to ir.Body
 * Scope.Block gains a parent field and an optional Label field
 * Fix bug in integer type equality comparison.

Here's the test case I'm working towards:

```
@void = primitive(void)
@i32 = primitive(i32)
@fnty = fntype([@i32, @i32], @void)

@0 = str("entry")
@1 = export(@0, "entry")

@entry = fn(@fnty, {
  %0 = arg(0)
  %1 = arg(1)
  %2 = add(%0, %1)
  %3 = int(7)
  %4 = block("if", {
    %neq = cmp(%2, neq, %3)
    %5 = condbr(%neq, {
      %6 = unreachable()
    }, {
      %7 = breakvoid("if")
    })
  })
  %11 = returnvoid()
})
```

$ ./zig-cache/bin/zig build-obj test.zir
test.zir:9:12: error: TODO implement function parameters for Arch.x86_64

That's where I left off.
This commit is contained in:
Andrew Kelley 2020-06-26 02:25:09 -04:00
parent e820678ca1
commit 130c7fd23b
6 changed files with 844 additions and 99 deletions

View File

@ -15,6 +15,7 @@ const ir = @import("ir.zig");
const zir = @import("zir.zig");
const Module = @This();
const Inst = ir.Inst;
const Body = ir.Body;
const ast = std.zig.ast;
const trace = @import("tracy.zig").trace;
@ -649,11 +650,19 @@ pub const Scope = struct {
pub const Block = struct {
pub const base_tag: Tag = .block;
base: Scope = Scope{ .tag = base_tag },
parent: ?*Block,
func: ?*Fn,
decl: *Decl,
instructions: ArrayListUnmanaged(*Inst),
/// Points to the arena allocator of DeclAnalysis
arena: *Allocator,
label: ?Label = null,
pub const Label = struct {
name: []const u8,
results: ArrayListUnmanaged(*Inst),
block_inst: *Inst.Block,
};
};
/// This is a temporary structure, references to it are valid only
@ -676,10 +685,6 @@ pub const Scope = struct {
};
};
pub const Body = struct {
instructions: []*Inst,
};
pub const AllErrors = struct {
arena: std.heap.ArenaAllocator.State,
list: []const Message,
@ -1139,13 +1144,16 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
const body_node = fn_proto.body_node orelse
return self.failTok(&fn_type_scope.base, fn_proto.fn_token, "TODO implement extern functions", .{});
if (fn_proto.params_len != 0) {
return self.failTok(
&fn_type_scope.base,
fn_proto.params()[0].name_token.?,
"TODO implement function parameters",
.{},
);
const param_decls = fn_proto.params();
const param_types = try fn_type_scope.arena.allocator.alloc(*zir.Inst, param_decls.len);
for (param_decls) |param_decl, i| {
const param_type_node = switch (param_decl.param_type) {
.var_type => |node| return self.failNode(&fn_type_scope.base, node, "TODO implement anytype parameter", .{}),
.var_args => |tok| return self.failTok(&fn_type_scope.base, tok, "TODO implement var args", .{}),
.type_expr => |node| node,
};
param_types[i] = try self.astGenExpr(&fn_type_scope.base, param_type_node);
}
if (fn_proto.lib_name) |lib_name| {
return self.failNode(&fn_type_scope.base, lib_name, "TODO implement function library name", .{});
@ -1174,7 +1182,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
const fn_src = tree.token_locs[fn_proto.fn_token].start;
const fn_type_inst = try self.addZIRInst(&fn_type_scope.base, fn_src, zir.Inst.FnType, .{
.return_type = return_type_inst,
.param_types = &[0]*zir.Inst{},
.param_types = param_types,
}, .{});
_ = try self.addZIRInst(&fn_type_scope.base, fn_src, zir.Inst.Return, .{ .operand = fn_type_inst }, .{});
@ -1184,6 +1192,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
var block_scope: Scope.Block = .{
.parent = null,
.func = null,
.decl = decl,
.instructions = .{},
@ -1302,10 +1311,25 @@ fn astGenExpr(self: *Module, scope: *Scope, ast_node: *ast.Node) InnerError!*zir
.Call => return self.astGenCall(scope, @fieldParentPtr(ast.Node.Call, "base", ast_node)),
.Unreachable => return self.astGenUnreachable(scope, @fieldParentPtr(ast.Node.Unreachable, "base", ast_node)),
.ControlFlowExpression => return self.astGenControlFlowExpression(scope, @fieldParentPtr(ast.Node.ControlFlowExpression, "base", ast_node)),
.If => return self.astGenIf(scope, @fieldParentPtr(ast.Node.If, "base", ast_node)),
else => return self.failNode(scope, ast_node, "TODO implement astGenExpr for {}", .{@tagName(ast_node.id)}),
}
}
fn astGenIf(self: *Module, scope: *Scope, if_node: *ast.Node.If) InnerError!*zir.Inst {
if (if_node.payload) |payload| {
return self.failNode(scope, payload, "TODO implement astGenIf for optionals", .{});
}
if (if_node.@"else") |else_node| {
if (else_node.payload) |payload| {
return self.failNode(scope, payload, "TODO implement astGenIf for error unions", .{});
}
}
const cond = try self.astGenExpr(scope, if_node.condition);
const body = try self.astGenExpr(scope, if_node.condition);
return self.failNode(scope, if_node.condition, "TODO implement astGenIf", .{});
}
fn astGenControlFlowExpression(
self: *Module,
scope: *Scope,
@ -1351,7 +1375,18 @@ fn astGenIdent(self: *Module, scope: *Scope, ident: *ast.Node.Identifier) InnerE
),
error.InvalidCharacter => break :integer,
};
return self.failNode(scope, &ident.base, "TODO implement arbitrary integer bitwidth types", .{});
const val = switch (bit_count) {
8 => if (is_signed) Value.initTag(.i8_type) else Value.initTag(.u8_type),
16 => if (is_signed) Value.initTag(.i16_type) else Value.initTag(.u16_type),
32 => if (is_signed) Value.initTag(.i32_type) else Value.initTag(.u32_type),
64 => if (is_signed) Value.initTag(.i64_type) else Value.initTag(.u64_type),
else => return self.failNode(scope, &ident.base, "TODO implement arbitrary integer bitwidth types", .{}),
};
const src = tree.token_locs[ident.token].start;
return self.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.type),
.val = val,
});
}
}
@ -1494,16 +1529,18 @@ fn astGenBuiltinCall(self: *Module, scope: *Scope, call: *ast.Node.BuiltinCall)
fn astGenCall(self: *Module, scope: *Scope, call: *ast.Node.Call) InnerError!*zir.Inst {
const tree = scope.tree();
if (call.params_len != 0) {
return self.failNode(scope, &call.base, "TODO implement fn calls with parameters", .{});
}
const lhs = try self.astGenExpr(scope, call.lhs);
const param_nodes = call.params();
const args = try scope.cast(Scope.GenZIR).?.arena.allocator.alloc(*zir.Inst, param_nodes.len);
for (param_nodes) |param_node, i| {
args[i] = try self.astGenExpr(scope, param_node);
}
const src = tree.token_locs[call.lhs.firstToken()].start;
return self.addZIRInst(scope, src, zir.Inst.Call, .{
.func = lhs,
.args = &[0]*zir.Inst{},
.args = args,
}, .{});
}
@ -1871,6 +1908,7 @@ fn analyzeFnBody(self: *Module, decl: *Decl, func: *Fn) !void {
var arena = decl.typed_value.most_recent.arena.?.promote(self.allocator);
defer decl.typed_value.most_recent.arena.?.* = arena.state;
var inner_block: Scope.Block = .{
.parent = null,
.func = func,
.decl = decl,
.instructions = .{},
@ -2323,7 +2361,10 @@ fn analyzeInstConst(self: *Module, scope: *Scope, const_inst: *zir.Inst.Const) I
fn analyzeInst(self: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!*Inst {
switch (old_inst.tag) {
.arg => return self.analyzeInstArg(scope, old_inst.cast(zir.Inst.Arg).?),
.block => return self.analyzeInstBlock(scope, old_inst.cast(zir.Inst.Block).?),
.breakpoint => return self.analyzeInstBreakpoint(scope, old_inst.cast(zir.Inst.Breakpoint).?),
.breakvoid => return self.analyzeInstBreakVoid(scope, old_inst.cast(zir.Inst.BreakVoid).?),
.call => return self.analyzeInstCall(scope, old_inst.cast(zir.Inst.Call).?),
.compileerror => return self.analyzeInstCompileError(scope, old_inst.cast(zir.Inst.CompileError).?),
.@"const" => return self.analyzeInstConst(scope, old_inst.cast(zir.Inst.Const).?),
@ -2436,11 +2477,105 @@ fn analyzeInstCompileError(self: *Module, scope: *Scope, inst: *zir.Inst.Compile
return self.fail(scope, inst.base.src, "{}", .{inst.positionals.msg});
}
fn analyzeInstArg(self: *Module, scope: *Scope, inst: *zir.Inst.Arg) InnerError!*Inst {
const b = try self.requireRuntimeBlock(scope, inst.base.src);
const fn_ty = b.func.?.owner_decl.typed_value.most_recent.typed_value.ty;
const param_count = fn_ty.fnParamLen();
if (inst.positionals.index >= param_count) {
return self.fail(scope, inst.base.src, "parameter index {} outside list of length {}", .{
inst.positionals.index,
param_count,
});
}
const param_type = fn_ty.fnParamType(inst.positionals.index);
return self.addNewInstArgs(b, inst.base.src, param_type, Inst.Arg, .{
.index = inst.positionals.index,
});
}
fn analyzeInstBlock(self: *Module, scope: *Scope, inst: *zir.Inst.Block) InnerError!*Inst {
const parent_block = scope.cast(Scope.Block).?;
// Reserve space for a Block instruction so that generated Break instructions can
// point to it, even if it doesn't end up getting used because the code ends up being
// comptime evaluated.
const block_inst = try parent_block.arena.create(Inst.Block);
block_inst.* = .{
.base = .{
.tag = Inst.Block.base_tag,
.ty = undefined, // Set after analysis.
.src = inst.base.src,
},
.args = undefined,
};
var child_block: Scope.Block = .{
.parent = parent_block,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
// TODO @as here is working around a miscompilation compiler bug :(
.label = @as(?Scope.Block.Label, Scope.Block.Label{
.name = inst.positionals.label,
.results = .{},
.block_inst = block_inst,
}),
};
const label = &child_block.label.?;
defer child_block.instructions.deinit(self.allocator);
defer label.results.deinit(self.allocator);
try self.analyzeBody(&child_block.base, inst.positionals.body);
// Blocks must terminate with noreturn instruction.
assert(child_block.instructions.items.len != 0);
assert(child_block.instructions.items[child_block.instructions.items.len - 1].tag.isNoReturn());
if (label.results.items.len <= 1) {
// No need to add the Block instruction; we can add the instructions to the parent block directly.
// Blocks are terminated with a noreturn instruction which we do not want to include.
const instrs = child_block.instructions.items;
try parent_block.instructions.appendSlice(self.allocator, instrs[0 .. instrs.len - 1]);
if (label.results.items.len == 1) {
return label.results.items[0];
} else {
return self.constNoReturn(scope, inst.base.src);
}
}
// Need to set the type and emit the Block instruction. This allows machine code generation
// to emit a jump instruction to after the block when it encounters the break.
try parent_block.instructions.append(self.allocator, &block_inst.base);
block_inst.base.ty = try self.resolvePeerTypes(scope, label.results.items);
block_inst.args.body = .{ .instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items) };
return &block_inst.base;
}
fn analyzeInstBreakpoint(self: *Module, scope: *Scope, inst: *zir.Inst.Breakpoint) InnerError!*Inst {
const b = try self.requireRuntimeBlock(scope, inst.base.src);
return self.addNewInstArgs(b, inst.base.src, Type.initTag(.void), Inst.Breakpoint, {});
}
fn analyzeInstBreakVoid(self: *Module, scope: *Scope, inst: *zir.Inst.BreakVoid) InnerError!*Inst {
const label_name = inst.positionals.label;
const void_inst = try self.constVoid(scope, inst.base.src);
var opt_block = scope.cast(Scope.Block);
while (opt_block) |block| {
if (block.label) |*label| {
if (mem.eql(u8, label.name, label_name)) {
try label.results.append(self.allocator, void_inst);
return self.constNoReturn(scope, inst.base.src);
}
}
opt_block = block.parent;
} else {
return self.fail(scope, inst.base.src, "use of undeclared label '{}'", .{label_name});
}
}
fn analyzeInstDeclRefStr(self: *Module, scope: *Scope, inst: *zir.Inst.DeclRefStr) InnerError!*Inst {
const decl_name = try self.resolveConstString(scope, inst.positionals.name);
return self.analyzeDeclRefByName(scope, inst.base.src, decl_name);
@ -2602,35 +2737,38 @@ fn analyzeInstFn(self: *Module, scope: *Scope, fn_inst: *zir.Inst.Fn) InnerError
fn analyzeInstFnType(self: *Module, scope: *Scope, fntype: *zir.Inst.FnType) InnerError!*Inst {
const return_type = try self.resolveType(scope, fntype.positionals.return_type);
if (return_type.zigTypeTag() == .NoReturn and
fntype.positionals.param_types.len == 0 and
fntype.kw_args.cc == .Unspecified)
{
// Hot path for some common function types.
if (fntype.positionals.param_types.len == 0) {
if (return_type.zigTypeTag() == .NoReturn and fntype.kw_args.cc == .Unspecified) {
return self.constType(scope, fntype.base.src, Type.initTag(.fn_noreturn_no_args));
}
if (return_type.zigTypeTag() == .Void and
fntype.positionals.param_types.len == 0 and
fntype.kw_args.cc == .Unspecified)
{
if (return_type.zigTypeTag() == .Void and fntype.kw_args.cc == .Unspecified) {
return self.constType(scope, fntype.base.src, Type.initTag(.fn_void_no_args));
}
if (return_type.zigTypeTag() == .NoReturn and
fntype.positionals.param_types.len == 0 and
fntype.kw_args.cc == .Naked)
{
if (return_type.zigTypeTag() == .NoReturn and fntype.kw_args.cc == .Naked) {
return self.constType(scope, fntype.base.src, Type.initTag(.fn_naked_noreturn_no_args));
}
if (return_type.zigTypeTag() == .Void and
fntype.positionals.param_types.len == 0 and
fntype.kw_args.cc == .C)
{
if (return_type.zigTypeTag() == .Void and fntype.kw_args.cc == .C) {
return self.constType(scope, fntype.base.src, Type.initTag(.fn_ccc_void_no_args));
}
}
return self.fail(scope, fntype.base.src, "TODO implement fntype instruction more", .{});
const arena = scope.arena();
const param_types = try arena.alloc(Type, fntype.positionals.param_types.len);
for (fntype.positionals.param_types) |param_type, i| {
param_types[i] = try self.resolveType(scope, param_type);
}
const payload = try arena.create(Type.Payload.Function);
payload.* = .{
.cc = fntype.kw_args.cc,
.return_type = return_type,
.param_types = param_types,
};
return self.constType(scope, fntype.base.src, Type.initPayload(&payload.base));
}
fn analyzeInstPrimitive(self: *Module, scope: *Scope, primitive: *zir.Inst.Primitive) InnerError!*Inst {
@ -2757,10 +2895,17 @@ fn analyzeInstElemPtr(self: *Module, scope: *Scope, inst: *zir.Inst.ElemPtr) Inn
}
fn analyzeInstAdd(self: *Module, scope: *Scope, inst: *zir.Inst.Add) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const lhs = try self.resolveInst(scope, inst.positionals.lhs);
const rhs = try self.resolveInst(scope, inst.positionals.rhs);
if (lhs.ty.zigTypeTag() == .Int and rhs.ty.zigTypeTag() == .Int) {
if (!lhs.ty.eql(rhs.ty)) {
return self.fail(scope, inst.base.src, "TODO implement peer type resolution", .{});
}
if (lhs.value()) |lhs_val| {
if (rhs.value()) |rhs_val| {
// TODO is this a performance issue? maybe we should try the operation without
@ -2777,10 +2922,6 @@ fn analyzeInstAdd(self: *Module, scope: *Scope, inst: *zir.Inst.Add) InnerError!
result_bigint.add(lhs_bigint, rhs_bigint);
const result_limbs = result_bigint.limbs[0..result_bigint.len];
if (!lhs.ty.eql(rhs.ty)) {
return self.fail(scope, inst.base.src, "TODO implement peer type resolution", .{});
}
const val_payload = if (result_bigint.positive) blk: {
const val_payload = try scope.arena().create(Value.Payload.IntBigPositive);
val_payload.* = .{ .limbs = result_limbs };
@ -2797,6 +2938,12 @@ fn analyzeInstAdd(self: *Module, scope: *Scope, inst: *zir.Inst.Add) InnerError!
});
}
}
const b = try self.requireRuntimeBlock(scope, inst.base.src);
return self.addNewInstArgs(b, inst.base.src, lhs.ty, Inst.Add, .{
.lhs = lhs,
.rhs = rhs,
});
}
return self.fail(scope, inst.base.src, "TODO implement more analyze add", .{});
@ -2936,6 +3083,7 @@ fn analyzeInstCondBr(self: *Module, scope: *Scope, inst: *zir.Inst.CondBr) Inner
const parent_block = try self.requireRuntimeBlock(scope, inst.base.src);
var true_block: Scope.Block = .{
.parent = parent_block,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
@ -2945,6 +3093,7 @@ fn analyzeInstCondBr(self: *Module, scope: *Scope, inst: *zir.Inst.CondBr) Inner
try self.analyzeBody(&true_block.base, inst.positionals.true_body);
var false_block: Scope.Block = .{
.parent = parent_block,
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
@ -3178,7 +3327,7 @@ fn cmpNumeric(
const casted_lhs = try self.coerce(scope, dest_type, lhs);
const casted_rhs = try self.coerce(scope, dest_type, lhs);
return self.addNewInstArgs(b, src, dest_type, Inst.Cmp, .{
return self.addNewInstArgs(b, src, Type.initTag(.bool), Inst.Cmp, .{
.lhs = casted_lhs,
.rhs = casted_rhs,
.op = op,
@ -3197,6 +3346,12 @@ fn makeIntType(self: *Module, scope: *Scope, signed: bool, bits: u16) !Type {
}
}
fn resolvePeerTypes(self: *Module, scope: *Scope, instructions: []*Inst) !Type {
if (instructions.len == 0)
return Type.initTag(.noreturn);
return self.fail(scope, instructions[0].src, "TODO peer type resolution", .{});
}
fn coerce(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*Inst {
// If the types are the same, we can return the operand.
if (dest_type.eql(inst.ty))
@ -3238,7 +3393,10 @@ fn coerce(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*Inst {
if (inst.value()) |val| {
return self.constInst(scope, inst.src, .{ .ty = dest_type, .val = val });
} else {
return self.fail(scope, inst.src, "TODO implement runtime integer widening", .{});
return self.fail(scope, inst.src, "TODO implement runtime integer widening ({} to {})", .{
inst.ty,
dest_type,
});
}
} else {
return self.fail(scope, inst.src, "TODO implement more int widening {} to {}", .{ inst.ty, dest_type });

View File

@ -174,6 +174,9 @@ const Function = struct {
fn genFuncInst(self: *Function, inst: *ir.Inst) !MCValue {
switch (inst.tag) {
.add => return self.genAdd(inst.cast(ir.Inst.Add).?),
.arg => return self.genArg(inst.src),
.block => return self.genBlock(inst.cast(ir.Inst.Block).?),
.breakpoint => return self.genBreakpoint(inst.src),
.call => return self.genCall(inst.cast(ir.Inst.Call).?),
.unreach => return MCValue{ .unreach = {} },
@ -190,6 +193,19 @@ const Function = struct {
}
}
fn genAdd(self: *Function, inst: *ir.Inst.Add) !MCValue {
switch (self.target.cpu.arch) {
else => return self.fail(inst.base.src, "TODO implement add for {}", .{self.target.cpu.arch}),
}
}
fn genArg(self: *Function, src: usize) !MCValue {
switch (self.target.cpu.arch) {
else => return self.fail(src, "TODO implement function parameters for {}", .{self.target.cpu.arch}),
}
return .none;
}
fn genBreakpoint(self: *Function, src: usize) !MCValue {
switch (self.target.cpu.arch) {
.i386, .x86_64 => {
@ -302,6 +318,12 @@ const Function = struct {
}
}
fn genBlock(self: *Function, inst: *ir.Inst.Block) !MCValue {
switch (self.target.cpu.arch) {
else => return self.fail(inst.base.src, "TODO implement codegen Block for {}", .{self.target.cpu.arch}),
}
}
fn genAsm(self: *Function, inst: *ir.Inst.Assembly) !MCValue {
// TODO convert to inline function
switch (self.target.cpu.arch) {

View File

@ -15,8 +15,11 @@ pub const Inst = struct {
src: usize,
pub const Tag = enum {
add,
arg,
assembly,
bitcast,
block,
breakpoint,
call,
cmp,
@ -28,6 +31,33 @@ pub const Inst = struct {
ret,
retvoid,
unreach,
/// Returns whether the instruction is one of the control flow "noreturn" types.
/// Function calls do not count. When ZIR is generated, the compiler automatically
/// emits an `Unreach` after a function call with the `noreturn` return type.
pub fn isNoReturn(tag: Tag) bool {
return switch (tag) {
.add,
.arg,
.assembly,
.bitcast,
.block,
.breakpoint,
.cmp,
.constant,
.isnonnull,
.isnull,
.ptrtoint,
.call,
=> false,
.condbr,
.ret,
.retvoid,
.unreach,
=> true,
};
}
};
pub fn cast(base: *Inst, comptime T: type) ?*T {
@ -50,6 +80,25 @@ pub const Inst = struct {
return inst.val;
}
pub const Add = struct {
pub const base_tag = Tag.add;
base: Inst,
args: struct {
lhs: *Inst,
rhs: *Inst,
},
};
pub const Arg = struct {
pub const base_tag = Tag.arg;
base: Inst,
args: struct {
index: usize,
},
};
pub const Assembly = struct {
pub const base_tag = Tag.assembly;
base: Inst,
@ -73,6 +122,14 @@ pub const Inst = struct {
},
};
pub const Block = struct {
pub const base_tag = Tag.block;
base: Inst,
args: struct {
body: Body,
},
};
pub const Breakpoint = struct {
pub const base_tag = Tag.breakpoint;
base: Inst,
@ -105,8 +162,8 @@ pub const Inst = struct {
base: Inst,
args: struct {
condition: *Inst,
true_body: Module.Body,
false_body: Module.Body,
true_body: Body,
false_body: Body,
},
};
@ -164,3 +221,7 @@ pub const Inst = struct {
args: void,
};
};
pub const Body = struct {
instructions: []*Inst,
};

View File

@ -21,8 +21,14 @@ pub const Type = extern union {
switch (self.tag()) {
.u8,
.i8,
.isize,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
@ -57,6 +63,7 @@ pub const Type = extern union {
.fn_void_no_args => return .Fn,
.fn_naked_noreturn_no_args => return .Fn,
.fn_ccc_void_no_args => return .Fn,
.function => return .Fn,
.array, .array_u8_sentinel_0 => return .Array,
.single_const_pointer => return .Pointer,
@ -126,10 +133,14 @@ pub const Type = extern union {
@panic("TODO implement more pointer Type equality comparison");
},
.Int => {
if (a.tag() != b.tag()) {
// Detect that e.g. u64 != usize, even if the bits match on a particular target.
const a_is_named_int = a.isNamedInt();
const b_is_named_int = b.isNamedInt();
if (a_is_named_int != b_is_named_int)
return false;
}
if (a_is_named_int)
return a.tag() == b.tag();
// Remaining cases are arbitrary sized integers.
// The target will not be branched upon, because we handled target-dependent cases above.
const info_a = a.intInfo(@as(Target, undefined));
const info_b = b.intInfo(@as(Target, undefined));
@ -176,8 +187,14 @@ pub const Type = extern union {
} else switch (self.ptr_otherwise.tag) {
.u8,
.i8,
.isize,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
@ -231,6 +248,21 @@ pub const Type = extern union {
},
.int_signed => return self.copyPayloadShallow(allocator, Payload.IntSigned),
.int_unsigned => return self.copyPayloadShallow(allocator, Payload.IntUnsigned),
.function => {
const payload = @fieldParentPtr(Payload.Function, "base", self.ptr_otherwise);
const new_payload = try allocator.create(Payload.Function);
const param_types = try allocator.alloc(Type, payload.param_types.len);
for (payload.param_types) |param_type, i| {
param_types[i] = try param_type.copy(allocator);
}
new_payload.* = .{
.base = payload.base,
.return_type = try payload.return_type.copy(allocator),
.param_types = param_types,
.cc = payload.cc,
};
return Type{ .ptr_otherwise = &new_payload.base };
},
}
}
@ -246,7 +278,7 @@ pub const Type = extern union {
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
out_stream: var,
) !void {
) @TypeOf(out_stream).Error!void {
comptime assert(fmt.len == 0);
var ty = self;
while (true) {
@ -254,8 +286,14 @@ pub const Type = extern union {
switch (t) {
.u8,
.i8,
.isize,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
@ -288,6 +326,16 @@ pub const Type = extern union {
.fn_naked_noreturn_no_args => return out_stream.writeAll("fn() callconv(.Naked) noreturn"),
.fn_ccc_void_no_args => return out_stream.writeAll("fn() callconv(.C) void"),
.single_const_pointer_to_comptime_int => return out_stream.writeAll("*const comptime_int"),
.function => {
const payload = @fieldParentPtr(Payload.Function, "base", ty.ptr_otherwise);
try out_stream.writeAll("fn(");
for (payload.param_types) |param_type, i| {
if (i != 0) try out_stream.writeAll(", ");
try param_type.format("", .{}, out_stream);
}
try out_stream.writeAll(") ");
try payload.return_type.format("", .{}, out_stream);
},
.array_u8_sentinel_0 => {
const payload = @fieldParentPtr(Payload.Array_u8_Sentinel0, "base", ty.ptr_otherwise);
@ -322,8 +370,14 @@ pub const Type = extern union {
switch (self.tag()) {
.u8 => return Value.initTag(.u8_type),
.i8 => return Value.initTag(.i8_type),
.isize => return Value.initTag(.isize_type),
.u16 => return Value.initTag(.u16_type),
.i16 => return Value.initTag(.i16_type),
.u32 => return Value.initTag(.u32_type),
.i32 => return Value.initTag(.i32_type),
.u64 => return Value.initTag(.u64_type),
.i64 => return Value.initTag(.i64_type),
.usize => return Value.initTag(.usize_type),
.isize => return Value.initTag(.isize_type),
.c_short => return Value.initTag(.c_short_type),
.c_ushort => return Value.initTag(.c_ushort_type),
.c_int => return Value.initTag(.c_int_type),
@ -365,8 +419,14 @@ pub const Type = extern union {
return switch (self.tag()) {
.u8,
.i8,
.isize,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
@ -386,6 +446,7 @@ pub const Type = extern union {
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.function,
.single_const_pointer_to_comptime_int,
.const_slice_u8,
.array_u8_sentinel_0,
@ -417,9 +478,14 @@ pub const Type = extern union {
.fn_void_no_args, // represents machine code; not a pointer
.fn_naked_noreturn_no_args, // represents machine code; not a pointer
.fn_ccc_void_no_args, // represents machine code; not a pointer
.function, // represents machine code; not a pointer
.array_u8_sentinel_0,
=> return 1,
.i16, .u16 => return 2,
.i32, .u32 => return 4,
.i64, .u64 => return 8,
.isize,
.usize,
.single_const_pointer_to_comptime_int,
@ -473,8 +539,14 @@ pub const Type = extern union {
return switch (self.tag()) {
.u8,
.i8,
.isize,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
@ -505,6 +577,7 @@ pub const Type = extern union {
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.function,
.int_unsigned,
.int_signed,
=> false,
@ -519,8 +592,14 @@ pub const Type = extern union {
return switch (self.tag()) {
.u8,
.i8,
.isize,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
@ -552,6 +631,7 @@ pub const Type = extern union {
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.function,
.int_unsigned,
.int_signed,
=> false,
@ -565,8 +645,14 @@ pub const Type = extern union {
return switch (self.tag()) {
.u8,
.i8,
.isize,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
@ -596,6 +682,7 @@ pub const Type = extern union {
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.function,
.int_unsigned,
.int_signed,
=> unreachable,
@ -612,8 +699,14 @@ pub const Type = extern union {
return switch (self.tag()) {
.u8,
.i8,
.isize,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
@ -641,6 +734,7 @@ pub const Type = extern union {
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.function,
.int_unsigned,
.int_signed,
=> unreachable,
@ -657,8 +751,14 @@ pub const Type = extern union {
return switch (self.tag()) {
.u8,
.i8,
.isize,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
@ -686,6 +786,7 @@ pub const Type = extern union {
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.function,
.single_const_pointer,
.single_const_pointer_to_comptime_int,
.const_slice_u8,
@ -703,8 +804,14 @@ pub const Type = extern union {
return switch (self.tag()) {
.u8,
.i8,
.isize,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
@ -732,6 +839,7 @@ pub const Type = extern union {
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.function,
.single_const_pointer,
.single_const_pointer_to_comptime_int,
.const_slice_u8,
@ -766,6 +874,7 @@ pub const Type = extern union {
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.function,
.array,
.single_const_pointer,
.single_const_pointer_to_comptime_int,
@ -778,6 +887,9 @@ pub const Type = extern union {
.c_uint,
.c_ulong,
.c_ulonglong,
.u16,
.u32,
.u64,
=> false,
.int_signed,
@ -787,11 +899,14 @@ pub const Type = extern union {
.c_int,
.c_long,
.c_longlong,
.i16,
.i32,
.i64,
=> true,
};
}
/// Asserts the type is a fixed-width integer.
/// Asserts the type is an integer.
pub fn intInfo(self: Type, target: Target) struct { signed: bool, bits: u16 } {
return switch (self.tag()) {
.f16,
@ -813,6 +928,7 @@ pub const Type = extern union {
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.function,
.array,
.single_const_pointer,
.single_const_pointer_to_comptime_int,
@ -824,6 +940,12 @@ pub const Type = extern union {
.int_signed => .{ .signed = true, .bits = self.cast(Payload.IntSigned).?.bits },
.u8 => .{ .signed = false, .bits = 8 },
.i8 => .{ .signed = true, .bits = 8 },
.u16 => .{ .signed = false, .bits = 16 },
.i16 => .{ .signed = true, .bits = 16 },
.u32 => .{ .signed = false, .bits = 32 },
.i32 => .{ .signed = true, .bits = 32 },
.u64 => .{ .signed = false, .bits = 64 },
.i64 => .{ .signed = true, .bits = 64 },
.usize => .{ .signed = false, .bits = target.cpu.arch.ptrBitWidth() },
.isize => .{ .signed = true, .bits = target.cpu.arch.ptrBitWidth() },
.c_short => .{ .signed = true, .bits = CType.short.sizeInBits(target) },
@ -837,6 +959,59 @@ pub const Type = extern union {
};
}
pub fn isNamedInt(self: Type) bool {
return switch (self.tag()) {
.f16,
.f32,
.f64,
.f128,
.c_longdouble,
.c_void,
.bool,
.void,
.type,
.anyerror,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
.@"undefined",
.fn_noreturn_no_args,
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.function,
.array,
.single_const_pointer,
.single_const_pointer_to_comptime_int,
.array_u8_sentinel_0,
.const_slice_u8,
.int_unsigned,
.int_signed,
.u8,
.i8,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
=> false,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
=> true,
};
}
pub fn isFloat(self: Type) bool {
return switch (self.tag()) {
.f16,
@ -870,6 +1045,7 @@ pub const Type = extern union {
.fn_void_no_args => 0,
.fn_naked_noreturn_no_args => 0,
.fn_ccc_void_no_args => 0,
.function => @fieldParentPtr(Payload.Function, "base", self.ptr_otherwise).param_types.len,
.f16,
.f32,
@ -893,6 +1069,12 @@ pub const Type = extern union {
.const_slice_u8,
.u8,
.i8,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
@ -917,6 +1099,10 @@ pub const Type = extern union {
.fn_void_no_args => return,
.fn_naked_noreturn_no_args => return,
.fn_ccc_void_no_args => return,
.function => {
const payload = @fieldParentPtr(Payload.Function, "base", self.ptr_otherwise);
std.mem.copy(Type, types, payload.param_types);
},
.f16,
.f32,
@ -940,6 +1126,68 @@ pub const Type = extern union {
.const_slice_u8,
.u8,
.i8,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
.int_unsigned,
.int_signed,
=> unreachable,
}
}
/// Asserts the type is a function.
pub fn fnParamType(self: Type, index: usize) Type {
switch (self.tag()) {
.function => {
const payload = @fieldParentPtr(Payload.Function, "base", self.ptr_otherwise);
return payload.param_types[index];
},
.fn_noreturn_no_args,
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.f16,
.f32,
.f64,
.f128,
.c_longdouble,
.c_void,
.bool,
.void,
.type,
.anyerror,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
.@"undefined",
.array,
.single_const_pointer,
.single_const_pointer_to_comptime_int,
.array_u8_sentinel_0,
.const_slice_u8,
.u8,
.i8,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
@ -966,6 +1214,8 @@ pub const Type = extern union {
.fn_ccc_void_no_args,
=> Type.initTag(.void),
.function => @fieldParentPtr(Payload.Function, "base", self.ptr_otherwise).return_type,
.f16,
.f32,
.f64,
@ -988,6 +1238,12 @@ pub const Type = extern union {
.const_slice_u8,
.u8,
.i8,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
@ -1011,6 +1267,7 @@ pub const Type = extern union {
.fn_void_no_args => .Unspecified,
.fn_naked_noreturn_no_args => .Naked,
.fn_ccc_void_no_args => .C,
.function => @fieldParentPtr(Payload.Function, "base", self.ptr_otherwise).cc,
.f16,
.f32,
@ -1034,6 +1291,12 @@ pub const Type = extern union {
.const_slice_u8,
.u8,
.i8,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
@ -1057,6 +1320,7 @@ pub const Type = extern union {
.fn_void_no_args => false,
.fn_naked_noreturn_no_args => false,
.fn_ccc_void_no_args => false,
.function => false,
.f16,
.f32,
@ -1080,6 +1344,12 @@ pub const Type = extern union {
.const_slice_u8,
.u8,
.i8,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
@ -1107,6 +1377,12 @@ pub const Type = extern union {
.comptime_float,
.u8,
.i8,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
@ -1133,6 +1409,7 @@ pub const Type = extern union {
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.function,
.array,
.single_const_pointer,
.single_const_pointer_to_comptime_int,
@ -1154,6 +1431,12 @@ pub const Type = extern union {
.comptime_float,
.u8,
.i8,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
@ -1171,6 +1454,7 @@ pub const Type = extern union {
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.function,
.single_const_pointer_to_comptime_int,
.array_u8_sentinel_0,
.const_slice_u8,
@ -1211,6 +1495,12 @@ pub const Type = extern union {
.comptime_float,
.u8,
.i8,
.u16,
.i16,
.u32,
.i32,
.u64,
.i64,
.usize,
.isize,
.c_short,
@ -1228,6 +1518,7 @@ pub const Type = extern union {
.fn_void_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.function,
.single_const_pointer_to_comptime_int,
.array_u8_sentinel_0,
.const_slice_u8,
@ -1254,8 +1545,14 @@ pub const Type = extern union {
// The first section of this enum are tags that require no payload.
u8,
i8,
isize,
u16,
i16,
u32,
i32,
u64,
i64,
usize,
isize,
c_short,
c_ushort,
c_int,
@ -1292,6 +1589,7 @@ pub const Type = extern union {
single_const_pointer,
int_signed,
int_unsigned,
function,
pub const last_no_payload_tag = Tag.const_slice_u8;
pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1;
@ -1330,6 +1628,14 @@ pub const Type = extern union {
bits: u16,
};
pub const Function = struct {
base: Payload = Payload{ .tag = .function },
param_types: []Type,
return_type: Type,
cc: std.builtin.CallingConvention,
};
};
};

View File

@ -23,8 +23,14 @@ pub const Value = extern union {
// The first section of this enum are tags that require no payload.
u8_type,
i8_type,
isize_type,
u16_type,
i16_type,
u32_type,
i32_type,
u64_type,
i64_type,
usize_type,
isize_type,
c_short_type,
c_ushort_type,
c_int_type,
@ -114,8 +120,14 @@ pub const Value = extern union {
} else switch (self.ptr_otherwise.tag) {
.u8_type,
.i8_type,
.isize_type,
.u16_type,
.i16_type,
.u32_type,
.i32_type,
.u64_type,
.i64_type,
.usize_type,
.isize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
@ -222,6 +234,12 @@ pub const Value = extern union {
while (true) switch (val.tag()) {
.u8_type => return out_stream.writeAll("u8"),
.i8_type => return out_stream.writeAll("i8"),
.u16_type => return out_stream.writeAll("u16"),
.i16_type => return out_stream.writeAll("i16"),
.u32_type => return out_stream.writeAll("u32"),
.i32_type => return out_stream.writeAll("i32"),
.u64_type => return out_stream.writeAll("u64"),
.i64_type => return out_stream.writeAll("i64"),
.isize_type => return out_stream.writeAll("isize"),
.usize_type => return out_stream.writeAll("usize"),
.c_short_type => return out_stream.writeAll("c_short"),
@ -308,8 +326,14 @@ pub const Value = extern union {
.u8_type => Type.initTag(.u8),
.i8_type => Type.initTag(.i8),
.isize_type => Type.initTag(.isize),
.u16_type => Type.initTag(.u16),
.i16_type => Type.initTag(.i16),
.u32_type => Type.initTag(.u32),
.i32_type => Type.initTag(.i32),
.u64_type => Type.initTag(.u64),
.i64_type => Type.initTag(.i64),
.usize_type => Type.initTag(.usize),
.isize_type => Type.initTag(.isize),
.c_short_type => Type.initTag(.c_short),
.c_ushort_type => Type.initTag(.c_ushort),
.c_int_type => Type.initTag(.c_int),
@ -366,8 +390,14 @@ pub const Value = extern union {
.ty,
.u8_type,
.i8_type,
.isize_type,
.u16_type,
.i16_type,
.u32_type,
.i32_type,
.u64_type,
.i64_type,
.usize_type,
.isize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
@ -426,8 +456,14 @@ pub const Value = extern union {
.ty,
.u8_type,
.i8_type,
.isize_type,
.u16_type,
.i16_type,
.u32_type,
.i32_type,
.u64_type,
.i64_type,
.usize_type,
.isize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
@ -487,8 +523,14 @@ pub const Value = extern union {
.ty,
.u8_type,
.i8_type,
.isize_type,
.u16_type,
.i16_type,
.u32_type,
.i32_type,
.u64_type,
.i64_type,
.usize_type,
.isize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
@ -553,8 +595,14 @@ pub const Value = extern union {
.ty,
.u8_type,
.i8_type,
.isize_type,
.u16_type,
.i16_type,
.u32_type,
.i32_type,
.u64_type,
.i64_type,
.usize_type,
.isize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
@ -648,8 +696,14 @@ pub const Value = extern union {
.ty,
.u8_type,
.i8_type,
.isize_type,
.u16_type,
.i16_type,
.u32_type,
.i32_type,
.u64_type,
.i64_type,
.usize_type,
.isize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
@ -705,8 +759,14 @@ pub const Value = extern union {
.ty,
.u8_type,
.i8_type,
.isize_type,
.u16_type,
.i16_type,
.u32_type,
.i32_type,
.u64_type,
.i64_type,
.usize_type,
.isize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
@ -807,8 +867,14 @@ pub const Value = extern union {
.ty,
.u8_type,
.i8_type,
.isize_type,
.u16_type,
.i16_type,
.u32_type,
.i32_type,
.u64_type,
.i64_type,
.usize_type,
.isize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
@ -870,8 +936,14 @@ pub const Value = extern union {
.ty,
.u8_type,
.i8_type,
.isize_type,
.u16_type,
.i16_type,
.u32_type,
.i32_type,
.u64_type,
.i64_type,
.usize_type,
.isize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
@ -950,8 +1022,14 @@ pub const Value = extern union {
.ty,
.u8_type,
.i8_type,
.isize_type,
.u16_type,
.i16_type,
.u32_type,
.i32_type,
.u64_type,
.i64_type,
.usize_type,
.isize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,

View File

@ -34,7 +34,13 @@ pub const Inst = struct {
/// These names are used directly as the instruction names in the text format.
pub const Tag = enum {
/// Function parameter value.
arg,
/// A labeled block of code, which can return a value.
block,
breakpoint,
/// Same as `break` but without an operand; the operand is assumed to be the void value.
breakvoid,
call,
compileerror,
/// Special case, has no textual representation.
@ -75,7 +81,10 @@ pub const Inst = struct {
pub fn TagToType(tag: Tag) type {
return switch (tag) {
.arg => Arg,
.block => Block,
.breakpoint => Breakpoint,
.breakvoid => BreakVoid,
.call => Call,
.declref => DeclRef,
.declref_str => DeclRefStr,
@ -115,6 +124,27 @@ pub const Inst = struct {
return @fieldParentPtr(T, "base", base);
}
pub const Arg = struct {
pub const base_tag = Tag.arg;
base: Inst,
positionals: struct {
index: usize,
},
kw_args: struct {},
};
pub const Block = struct {
pub const base_tag = Tag.block;
base: Inst,
positionals: struct {
label: []const u8,
body: Module.Body,
},
kw_args: struct {},
};
pub const Breakpoint = struct {
pub const base_tag = Tag.breakpoint;
base: Inst,
@ -123,6 +153,16 @@ pub const Inst = struct {
kw_args: struct {},
};
pub const BreakVoid = struct {
pub const base_tag = Tag.breakvoid;
base: Inst,
positionals: struct {
label: []const u8,
},
kw_args: struct {},
};
pub const Call = struct {
pub const base_tag = Tag.call;
base: Inst,
@ -347,6 +387,14 @@ pub const Inst = struct {
kw_args: struct {},
pub const Builtin = enum {
i8,
u8,
i16,
u16,
i32,
u32,
i64,
u64,
isize,
usize,
c_short,
@ -378,6 +426,14 @@ pub const Inst = struct {
pub fn toTypedValue(self: Builtin) TypedValue {
return switch (self) {
.i8 => .{ .ty = Type.initTag(.type), .val = Value.initTag(.i8_type) },
.u8 => .{ .ty = Type.initTag(.type), .val = Value.initTag(.u8_type) },
.i16 => .{ .ty = Type.initTag(.type), .val = Value.initTag(.i16_type) },
.u16 => .{ .ty = Type.initTag(.type), .val = Value.initTag(.u16_type) },
.i32 => .{ .ty = Type.initTag(.type), .val = Value.initTag(.i32_type) },
.u32 => .{ .ty = Type.initTag(.type), .val = Value.initTag(.u32_type) },
.i64 => .{ .ty = Type.initTag(.type), .val = Value.initTag(.i64_type) },
.u64 => .{ .ty = Type.initTag(.type), .val = Value.initTag(.u64_type) },
.isize => .{ .ty = Type.initTag(.type), .val = Value.initTag(.isize_type) },
.usize => .{ .ty = Type.initTag(.type), .val = Value.initTag(.usize_type) },
.c_short => .{ .ty = Type.initTag(.type), .val = Value.initTag(.c_short_type) },
@ -591,7 +647,10 @@ pub const Module = struct {
) @TypeOf(stream).Error!void {
// TODO I tried implementing this with an inline for loop and hit a compiler bug
switch (inst.tag) {
.arg => return self.writeInstToStreamGeneric(stream, .arg, inst, inst_table),
.block => return self.writeInstToStreamGeneric(stream, .block, inst, inst_table),
.breakpoint => return self.writeInstToStreamGeneric(stream, .breakpoint, inst, inst_table),
.breakvoid => return self.writeInstToStreamGeneric(stream, .breakvoid, inst, inst_table),
.call => return self.writeInstToStreamGeneric(stream, .call, inst, inst_table),
.declref => return self.writeInstToStreamGeneric(stream, .declref, inst, inst_table),
.declref_str => return self.writeInstToStreamGeneric(stream, .declref_str, inst, inst_table),
@ -691,7 +750,7 @@ pub const Module = struct {
},
bool => return stream.writeByte("01"[@boolToInt(param)]),
[]u8, []const u8 => return std.zig.renderStringLiteral(param, stream),
BigIntConst => return stream.print("{}", .{param}),
BigIntConst, usize => return stream.print("{}", .{param}),
TypedValue => unreachable, // this is a special case
*IrModule.Decl => unreachable, // this is a special case
else => |T| @compileError("unimplemented: rendering parameter of type " ++ @typeName(T)),
@ -718,7 +777,7 @@ pub const Module = struct {
};
pub fn parse(allocator: *Allocator, source: [:0]const u8) Allocator.Error!Module {
var global_name_map = std.StringHashMap(usize).init(allocator);
var global_name_map = std.StringHashMap(*Inst).init(allocator);
defer global_name_map.deinit();
var parser: Parser = .{
@ -752,22 +811,24 @@ const Parser = struct {
i: usize,
source: [:0]const u8,
decls: std.ArrayListUnmanaged(*Decl),
global_name_map: *std.StringHashMap(usize),
global_name_map: *std.StringHashMap(*Inst),
error_msg: ?ErrorMsg = null,
unnamed_index: usize,
const Body = struct {
instructions: std.ArrayList(*Inst),
name_map: std.StringHashMap(usize),
name_map: *std.StringHashMap(*Inst),
};
fn parseBody(self: *Parser) !Module.Body {
fn parseBody(self: *Parser, body_ctx: ?*Body) !Module.Body {
var name_map = std.StringHashMap(*Inst).init(self.allocator);
defer name_map.deinit();
var body_context = Body{
.instructions = std.ArrayList(*Inst).init(self.allocator),
.name_map = std.StringHashMap(usize).init(self.allocator),
.name_map = if (body_ctx) |bctx| bctx.name_map else &name_map,
};
defer body_context.instructions.deinit();
defer body_context.name_map.deinit();
try requireEatBytes(self, "{");
skipSpace(self);
@ -782,7 +843,7 @@ const Parser = struct {
skipSpace(self);
const decl = try parseInstruction(self, &body_context, ident);
const ident_index = body_context.instructions.items.len;
if (try body_context.name_map.put(ident, ident_index)) |_| {
if (try body_context.name_map.put(ident, decl.inst)) |_| {
return self.fail("redefinition of identifier '{}'", .{ident});
}
try body_context.instructions.append(decl.inst);
@ -866,12 +927,12 @@ const Parser = struct {
skipSpace(self);
try requireEatBytes(self, "=");
skipSpace(self);
const inst = try parseInstruction(self, null, ident);
const decl = try parseInstruction(self, null, ident);
const ident_index = self.decls.items.len;
if (try self.global_name_map.put(ident, ident_index)) |_| {
if (try self.global_name_map.put(ident, decl.inst)) |_| {
return self.fail("redefinition of identifier '{}'", .{ident});
}
try self.decls.append(self.allocator, inst);
try self.decls.append(self.allocator, decl);
},
' ', '\n' => self.i += 1,
0 => break,
@ -1032,7 +1093,7 @@ const Parser = struct {
};
}
switch (T) {
Module.Body => return parseBody(self),
Module.Body => return parseBody(self, body_ctx),
bool => {
const bool_value = switch (self.source[self.i]) {
'0' => false,
@ -1060,6 +1121,10 @@ const Parser = struct {
*Inst => return parseParameterInst(self, body_ctx),
[]u8, []const u8 => return self.parseStringLiteral(),
BigIntConst => return self.parseIntegerLiteral(),
usize => {
const big_int = try self.parseIntegerLiteral();
return big_int.to(usize) catch |err| return self.fail("integer literal: {}", .{@errorName(err)});
},
TypedValue => return self.fail("'const' is a special instruction; not legal in ZIR text", .{}),
*IrModule.Decl => return self.fail("'declval_in_module' is a special instruction; not legal in ZIR text", .{}),
else => @compileError("Unimplemented: ir parseParameterGeneric for type " ++ @typeName(T)),
@ -1075,7 +1140,7 @@ const Parser = struct {
};
const map = if (local_ref)
if (body_ctx) |bc|
&bc.name_map
bc.name_map
else
return self.fail("referencing a % instruction in global scope", .{})
else
@ -1107,11 +1172,7 @@ const Parser = struct {
return &declval.base;
}
};
if (local_ref) {
return body_ctx.?.instructions.items[kv.value];
} else {
return self.decls.items[kv.value].inst;
}
return kv.value;
}
fn generateName(self: *Parser) ![]u8 {
@ -1456,7 +1517,7 @@ const EmitZIR = struct {
fn emitBody(
self: *EmitZIR,
body: IrModule.Body,
body: ir.Body,
inst_table: *std.AutoHashMap(*ir.Inst, *Inst),
instructions: *std.ArrayList(*Inst),
) Allocator.Error!void {
@ -1466,6 +1527,57 @@ const EmitZIR = struct {
};
for (body.instructions) |inst| {
const new_inst = switch (inst.tag) {
.add => blk: {
const old_inst = inst.cast(ir.Inst.Add).?;
const new_inst = try self.arena.allocator.create(Inst.Add);
new_inst.* = .{
.base = .{
.src = inst.src,
.tag = Inst.Add.base_tag,
},
.positionals = .{
.lhs = try self.resolveInst(new_body, old_inst.args.lhs),
.rhs = try self.resolveInst(new_body, old_inst.args.rhs),
},
.kw_args = .{},
};
break :blk &new_inst.base;
},
.arg => blk: {
const old_inst = inst.cast(ir.Inst.Arg).?;
const new_inst = try self.arena.allocator.create(Inst.Arg);
new_inst.* = .{
.base = .{
.src = inst.src,
.tag = Inst.Arg.base_tag,
},
.positionals = .{ .index = old_inst.args.index },
.kw_args = .{},
};
break :blk &new_inst.base;
},
.block => blk: {
const old_inst = inst.cast(ir.Inst.Block).?;
const new_inst = try self.arena.allocator.create(Inst.Block);
var block_body = std.ArrayList(*Inst).init(self.allocator);
defer block_body.deinit();
try self.emitBody(old_inst.args.body, inst_table, &block_body);
new_inst.* = .{
.base = .{
.src = inst.src,
.tag = Inst.Block.base_tag,
},
.positionals = .{
.label = try self.autoName(),
.body = .{ .instructions = block_body.toOwnedSlice() },
},
.kw_args = .{},
};
break :blk &new_inst.base;
},
.breakpoint => try self.emitTrivial(inst.src, Inst.Breakpoint),
.call => blk: {
const old_inst = inst.cast(ir.Inst.Call).?;
@ -1660,6 +1772,14 @@ const EmitZIR = struct {
fn emitType(self: *EmitZIR, src: usize, ty: Type) Allocator.Error!*Decl {
switch (ty.tag()) {
.i8 => return self.emitPrimitive(src, .i8),
.u8 => return self.emitPrimitive(src, .u8),
.i16 => return self.emitPrimitive(src, .i16),
.u16 => return self.emitPrimitive(src, .u16),
.i32 => return self.emitPrimitive(src, .i32),
.u32 => return self.emitPrimitive(src, .u32),
.i64 => return self.emitPrimitive(src, .i64),
.u64 => return self.emitPrimitive(src, .u64),
.isize => return self.emitPrimitive(src, .isize),
.usize => return self.emitPrimitive(src, .usize),
.c_short => return self.emitPrimitive(src, .c_short),