mirror of
https://github.com/ziglang/zig.git
synced 2026-02-12 20:37:54 +00:00
stage2: if AST=>ZIR
This commit is contained in:
parent
be0546d877
commit
8e425c0c8d
@ -959,6 +959,8 @@ pub const Node = struct {
|
||||
};
|
||||
|
||||
/// The params are directly after the FnProto in memory.
|
||||
/// TODO have a flags field for the optional nodes, and have them appended
|
||||
/// before or after the parameters in memory.
|
||||
pub const FnProto = struct {
|
||||
base: Node = Node{ .id = .FnProto },
|
||||
doc_comments: ?*DocComment,
|
||||
|
||||
@ -303,14 +303,14 @@ pub const Scope = struct {
|
||||
switch (self.tag) {
|
||||
.block => return self.cast(Block).?.arena,
|
||||
.decl => return &self.cast(DeclAnalysis).?.arena.allocator,
|
||||
.gen_zir => return &self.cast(GenZIR).?.arena.allocator,
|
||||
.gen_zir => return self.cast(GenZIR).?.arena,
|
||||
.zir_module => return &self.cast(ZIRModule).?.contents.module.arena.allocator,
|
||||
.file => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
/// Asserts the scope has a parent which is a DeclAnalysis and
|
||||
/// returns the Decl.
|
||||
/// If the scope has a parent which is a `DeclAnalysis`,
|
||||
/// returns the `Decl`, otherwise returns `null`.
|
||||
pub fn decl(self: *Scope) ?*Decl {
|
||||
return switch (self.tag) {
|
||||
.block => self.cast(Block).?.decl,
|
||||
@ -653,7 +653,7 @@ pub const Scope = struct {
|
||||
label: ?Label = null,
|
||||
|
||||
pub const Label = struct {
|
||||
name: []const u8,
|
||||
zir_block: *zir.Inst.Block,
|
||||
results: ArrayListUnmanaged(*Inst),
|
||||
block_inst: *Inst.Block,
|
||||
};
|
||||
@ -674,8 +674,8 @@ pub const Scope = struct {
|
||||
pub const base_tag: Tag = .gen_zir;
|
||||
base: Scope = Scope{ .tag = base_tag },
|
||||
decl: *Decl,
|
||||
arena: std.heap.ArenaAllocator,
|
||||
instructions: std.ArrayList(*zir.Inst),
|
||||
arena: *Allocator,
|
||||
instructions: std.ArrayListUnmanaged(*zir.Inst) = .{},
|
||||
};
|
||||
};
|
||||
|
||||
@ -1115,19 +1115,19 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
|
||||
// This arena allocator's memory is discarded at the end of this function. It is used
|
||||
// to determine the type of the function, and hence the type of the decl, which is needed
|
||||
// to complete the Decl analysis.
|
||||
var fn_type_scope_arena = std.heap.ArenaAllocator.init(self.gpa);
|
||||
defer fn_type_scope_arena.deinit();
|
||||
var fn_type_scope: Scope.GenZIR = .{
|
||||
.decl = decl,
|
||||
.arena = std.heap.ArenaAllocator.init(self.gpa),
|
||||
.instructions = std.ArrayList(*zir.Inst).init(self.gpa),
|
||||
.arena = &fn_type_scope_arena.allocator,
|
||||
};
|
||||
defer fn_type_scope.arena.deinit();
|
||||
defer fn_type_scope.instructions.deinit();
|
||||
defer fn_type_scope.instructions.deinit(self.gpa);
|
||||
|
||||
const body_node = fn_proto.body_node orelse
|
||||
return self.failTok(&fn_type_scope.base, fn_proto.fn_token, "TODO implement extern functions", .{});
|
||||
|
||||
const param_decls = fn_proto.params();
|
||||
const param_types = try fn_type_scope.arena.allocator.alloc(*zir.Inst, param_decls.len);
|
||||
const param_types = try fn_type_scope.arena.alloc(*zir.Inst, param_decls.len);
|
||||
for (param_decls) |param_decl, i| {
|
||||
const param_type_node = switch (param_decl.param_type) {
|
||||
.var_type => |node| return self.failNode(&fn_type_scope.base, node, "TODO implement anytype parameter", .{}),
|
||||
@ -1190,24 +1190,24 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
|
||||
const fn_zir = blk: {
|
||||
// This scope's arena memory is discarded after the ZIR generation
|
||||
// pass completes, and semantic analysis of it completes.
|
||||
var gen_scope_arena = std.heap.ArenaAllocator.init(self.gpa);
|
||||
errdefer gen_scope_arena.deinit();
|
||||
var gen_scope: Scope.GenZIR = .{
|
||||
.decl = decl,
|
||||
.arena = std.heap.ArenaAllocator.init(self.gpa),
|
||||
.instructions = std.ArrayList(*zir.Inst).init(self.gpa),
|
||||
.arena = &gen_scope_arena.allocator,
|
||||
};
|
||||
errdefer gen_scope.arena.deinit();
|
||||
defer gen_scope.instructions.deinit();
|
||||
defer gen_scope.instructions.deinit(self.gpa);
|
||||
|
||||
const body_block = body_node.cast(ast.Node.Block).?;
|
||||
|
||||
try self.astGenBlock(&gen_scope.base, body_block);
|
||||
|
||||
const fn_zir = try gen_scope.arena.allocator.create(Fn.ZIR);
|
||||
const fn_zir = try gen_scope_arena.allocator.create(Fn.ZIR);
|
||||
fn_zir.* = .{
|
||||
.body = .{
|
||||
.instructions = try gen_scope.arena.allocator.dupe(*zir.Inst, gen_scope.instructions.items),
|
||||
.instructions = try gen_scope.arena.dupe(*zir.Inst, gen_scope.instructions.items),
|
||||
},
|
||||
.arena = gen_scope.arena.state,
|
||||
.arena = gen_scope_arena.state,
|
||||
};
|
||||
break :blk fn_zir;
|
||||
};
|
||||
@ -1351,9 +1351,70 @@ fn astGenIf(self: *Module, scope: *Scope, if_node: *ast.Node.If) InnerError!*zir
|
||||
return self.failNode(scope, payload, "TODO implement astGenIf for error unions", .{});
|
||||
}
|
||||
}
|
||||
const cond = try self.astGenExpr(scope, if_node.condition);
|
||||
const body = try self.astGenExpr(scope, if_node.condition);
|
||||
return self.failNode(scope, if_node.condition, "TODO implement astGenIf", .{});
|
||||
var block_scope: Scope.GenZIR = .{
|
||||
.decl = scope.decl().?,
|
||||
.arena = scope.arena(),
|
||||
.instructions = .{},
|
||||
};
|
||||
defer block_scope.instructions.deinit(self.gpa);
|
||||
|
||||
const cond = try self.astGenExpr(&block_scope.base, if_node.condition);
|
||||
|
||||
const tree = scope.tree();
|
||||
const if_src = tree.token_locs[if_node.if_token].start;
|
||||
const condbr = try self.addZIRInstSpecial(&block_scope.base, if_src, zir.Inst.CondBr, .{
|
||||
.condition = cond,
|
||||
.true_body = undefined, // populated below
|
||||
.false_body = undefined, // populated below
|
||||
}, .{});
|
||||
|
||||
const block = try self.addZIRInstBlock(scope, if_src, .{
|
||||
.instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
|
||||
});
|
||||
var then_scope: Scope.GenZIR = .{
|
||||
.decl = block_scope.decl,
|
||||
.arena = block_scope.arena,
|
||||
.instructions = .{},
|
||||
};
|
||||
defer then_scope.instructions.deinit(self.gpa);
|
||||
|
||||
const then_result = try self.astGenExpr(&then_scope.base, if_node.body);
|
||||
const then_src = tree.token_locs[if_node.body.lastToken()].start;
|
||||
_ = try self.addZIRInst(&then_scope.base, then_src, zir.Inst.Break, .{
|
||||
.block = block,
|
||||
.operand = then_result,
|
||||
}, .{});
|
||||
condbr.positionals.true_body = .{
|
||||
.instructions = try then_scope.arena.dupe(*zir.Inst, then_scope.instructions.items),
|
||||
};
|
||||
|
||||
var else_scope: Scope.GenZIR = .{
|
||||
.decl = block_scope.decl,
|
||||
.arena = block_scope.arena,
|
||||
.instructions = .{},
|
||||
};
|
||||
defer else_scope.instructions.deinit(self.gpa);
|
||||
|
||||
if (if_node.@"else") |else_node| {
|
||||
const else_result = try self.astGenExpr(&else_scope.base, else_node.body);
|
||||
const else_src = tree.token_locs[else_node.body.lastToken()].start;
|
||||
_ = try self.addZIRInst(&else_scope.base, else_src, zir.Inst.Break, .{
|
||||
.block = block,
|
||||
.operand = else_result,
|
||||
}, .{});
|
||||
} else {
|
||||
// TODO Optimization opportunity: we can avoid an allocation and a memcpy here
|
||||
// by directly allocating the body for this one instruction.
|
||||
const else_src = tree.token_locs[if_node.lastToken()].start;
|
||||
_ = try self.addZIRInst(&else_scope.base, else_src, zir.Inst.BreakVoid, .{
|
||||
.block = block,
|
||||
}, .{});
|
||||
}
|
||||
condbr.positionals.false_body = .{
|
||||
.instructions = try else_scope.arena.dupe(*zir.Inst, else_scope.instructions.items),
|
||||
};
|
||||
|
||||
return &block.base;
|
||||
}
|
||||
|
||||
fn astGenControlFlowExpression(
|
||||
@ -1379,12 +1440,12 @@ fn astGenControlFlowExpression(
|
||||
fn astGenIdent(self: *Module, scope: *Scope, ident: *ast.Node.Identifier) InnerError!*zir.Inst {
|
||||
const tree = scope.tree();
|
||||
const ident_name = tree.tokenSlice(ident.token);
|
||||
const src = tree.token_locs[ident.token].start;
|
||||
if (mem.eql(u8, ident_name, "_")) {
|
||||
return self.failNode(scope, &ident.base, "TODO implement '_' identifier", .{});
|
||||
}
|
||||
|
||||
if (getSimplePrimitiveValue(ident_name)) |typed_value| {
|
||||
const src = tree.token_locs[ident.token].start;
|
||||
return self.addZIRInstConst(scope, src, typed_value);
|
||||
}
|
||||
|
||||
@ -1408,7 +1469,6 @@ fn astGenIdent(self: *Module, scope: *Scope, ident: *ast.Node.Identifier) InnerE
|
||||
64 => if (is_signed) Value.initTag(.i64_type) else Value.initTag(.u64_type),
|
||||
else => return self.failNode(scope, &ident.base, "TODO implement arbitrary integer bitwidth types", .{}),
|
||||
};
|
||||
const src = tree.token_locs[ident.token].start;
|
||||
return self.addZIRInstConst(scope, src, .{
|
||||
.ty = Type.initTag(.type),
|
||||
.val = val,
|
||||
@ -1417,10 +1477,21 @@ fn astGenIdent(self: *Module, scope: *Scope, ident: *ast.Node.Identifier) InnerE
|
||||
}
|
||||
|
||||
if (self.lookupDeclName(scope, ident_name)) |decl| {
|
||||
const src = tree.token_locs[ident.token].start;
|
||||
return try self.addZIRInst(scope, src, zir.Inst.DeclValInModule, .{ .decl = decl }, .{});
|
||||
}
|
||||
|
||||
// Function parameter
|
||||
if (scope.decl()) |decl| {
|
||||
if (tree.root_node.decls()[decl.src_index].cast(ast.Node.FnProto)) |fn_proto| {
|
||||
for (fn_proto.params()) |param, i| {
|
||||
const param_name = tree.tokenSlice(param.name_token.?);
|
||||
if (mem.eql(u8, param_name, ident_name)) {
|
||||
return try self.addZIRInst(scope, src, zir.Inst.Arg, .{ .index = i }, .{});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return self.failNode(scope, &ident.base, "TODO implement local variable identifier lookup", .{});
|
||||
}
|
||||
|
||||
@ -1563,7 +1634,7 @@ fn astGenCall(self: *Module, scope: *Scope, call: *ast.Node.Call) InnerError!*zi
|
||||
const lhs = try self.astGenExpr(scope, call.lhs);
|
||||
|
||||
const param_nodes = call.params();
|
||||
const args = try scope.cast(Scope.GenZIR).?.arena.allocator.alloc(*zir.Inst, param_nodes.len);
|
||||
const args = try scope.cast(Scope.GenZIR).?.arena.alloc(*zir.Inst, param_nodes.len);
|
||||
for (param_nodes) |param_node, i| {
|
||||
args[i] = try self.astGenExpr(scope, param_node);
|
||||
}
|
||||
@ -2239,7 +2310,7 @@ fn newZIRInst(
|
||||
comptime T: type,
|
||||
positionals: std.meta.fieldInfo(T, "positionals").field_type,
|
||||
kw_args: std.meta.fieldInfo(T, "kw_args").field_type,
|
||||
) !*zir.Inst {
|
||||
) !*T {
|
||||
const inst = try gpa.create(T);
|
||||
inst.* = .{
|
||||
.base = .{
|
||||
@ -2249,7 +2320,22 @@ fn newZIRInst(
|
||||
.positionals = positionals,
|
||||
.kw_args = kw_args,
|
||||
};
|
||||
return &inst.base;
|
||||
return inst;
|
||||
}
|
||||
|
||||
fn addZIRInstSpecial(
|
||||
self: *Module,
|
||||
scope: *Scope,
|
||||
src: usize,
|
||||
comptime T: type,
|
||||
positionals: std.meta.fieldInfo(T, "positionals").field_type,
|
||||
kw_args: std.meta.fieldInfo(T, "kw_args").field_type,
|
||||
) !*T {
|
||||
const gen_zir = scope.cast(Scope.GenZIR).?;
|
||||
try gen_zir.instructions.ensureCapacity(self.gpa, gen_zir.instructions.items.len + 1);
|
||||
const inst = try newZIRInst(gen_zir.arena, src, T, positionals, kw_args);
|
||||
gen_zir.instructions.appendAssumeCapacity(&inst.base);
|
||||
return inst;
|
||||
}
|
||||
|
||||
fn addZIRInst(
|
||||
@ -2260,11 +2346,8 @@ fn addZIRInst(
|
||||
positionals: std.meta.fieldInfo(T, "positionals").field_type,
|
||||
kw_args: std.meta.fieldInfo(T, "kw_args").field_type,
|
||||
) !*zir.Inst {
|
||||
const gen_zir = scope.cast(Scope.GenZIR).?;
|
||||
try gen_zir.instructions.ensureCapacity(gen_zir.instructions.items.len + 1);
|
||||
const inst = try newZIRInst(&gen_zir.arena.allocator, src, T, positionals, kw_args);
|
||||
gen_zir.instructions.appendAssumeCapacity(inst);
|
||||
return inst;
|
||||
const inst_special = try self.addZIRInstSpecial(scope, src, T, positionals, kw_args);
|
||||
return &inst_special.base;
|
||||
}
|
||||
|
||||
/// TODO The existence of this function is a workaround for a bug in stage1.
|
||||
@ -2273,6 +2356,12 @@ fn addZIRInstConst(self: *Module, scope: *Scope, src: usize, typed_value: TypedV
|
||||
return self.addZIRInst(scope, src, zir.Inst.Const, P{ .typed_value = typed_value }, .{});
|
||||
}
|
||||
|
||||
/// TODO The existence of this function is a workaround for a bug in stage1.
|
||||
fn addZIRInstBlock(self: *Module, scope: *Scope, src: usize, body: zir.Module.Body) !*zir.Inst.Block {
|
||||
const P = std.meta.fieldInfo(zir.Inst.Block, "positionals").field_type;
|
||||
return self.addZIRInstSpecial(scope, src, zir.Inst.Block, P{ .body = body }, .{});
|
||||
}
|
||||
|
||||
fn addNewInst(self: *Module, block: *Scope.Block, src: usize, ty: Type, comptime T: type) !*T {
|
||||
const inst = try block.arena.create(T);
|
||||
inst.* = .{
|
||||
@ -2403,6 +2492,7 @@ fn analyzeInst(self: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!*In
|
||||
switch (old_inst.tag) {
|
||||
.arg => return self.analyzeInstArg(scope, old_inst.cast(zir.Inst.Arg).?),
|
||||
.block => return self.analyzeInstBlock(scope, old_inst.cast(zir.Inst.Block).?),
|
||||
.@"break" => return self.analyzeInstBreak(scope, old_inst.cast(zir.Inst.Break).?),
|
||||
.breakpoint => return self.analyzeInstBreakpoint(scope, old_inst.cast(zir.Inst.Breakpoint).?),
|
||||
.breakvoid => return self.analyzeInstBreakVoid(scope, old_inst.cast(zir.Inst.BreakVoid).?),
|
||||
.call => return self.analyzeInstCall(scope, old_inst.cast(zir.Inst.Call).?),
|
||||
@ -2559,7 +2649,7 @@ fn analyzeInstBlock(self: *Module, scope: *Scope, inst: *zir.Inst.Block) InnerEr
|
||||
.arena = parent_block.arena,
|
||||
// TODO @as here is working around a miscompilation compiler bug :(
|
||||
.label = @as(?Scope.Block.Label, Scope.Block.Label{
|
||||
.name = inst.positionals.label,
|
||||
.zir_block = inst,
|
||||
.results = .{},
|
||||
.block_inst = block_inst,
|
||||
}),
|
||||
@ -2588,25 +2678,39 @@ fn analyzeInstBreakpoint(self: *Module, scope: *Scope, inst: *zir.Inst.Breakpoin
|
||||
return self.addNewInstArgs(b, inst.base.src, Type.initTag(.void), Inst.Breakpoint, {});
|
||||
}
|
||||
|
||||
fn analyzeInstBreakVoid(self: *Module, scope: *Scope, inst: *zir.Inst.BreakVoid) InnerError!*Inst {
|
||||
const label_name = inst.positionals.label;
|
||||
const void_inst = try self.constVoid(scope, inst.base.src);
|
||||
fn analyzeInstBreak(self: *Module, scope: *Scope, inst: *zir.Inst.Break) InnerError!*Inst {
|
||||
const operand = try self.resolveInst(scope, inst.positionals.operand);
|
||||
const block = inst.positionals.block;
|
||||
return self.analyzeBreak(scope, inst.base.src, block, operand);
|
||||
}
|
||||
|
||||
fn analyzeInstBreakVoid(self: *Module, scope: *Scope, inst: *zir.Inst.BreakVoid) InnerError!*Inst {
|
||||
const block = inst.positionals.block;
|
||||
const void_inst = try self.constVoid(scope, inst.base.src);
|
||||
return self.analyzeBreak(scope, inst.base.src, block, void_inst);
|
||||
}
|
||||
|
||||
fn analyzeBreak(
|
||||
self: *Module,
|
||||
scope: *Scope,
|
||||
src: usize,
|
||||
zir_block: *zir.Inst.Block,
|
||||
operand: *Inst,
|
||||
) InnerError!*Inst {
|
||||
var opt_block = scope.cast(Scope.Block);
|
||||
while (opt_block) |block| {
|
||||
if (block.label) |*label| {
|
||||
if (mem.eql(u8, label.name, label_name)) {
|
||||
try label.results.append(self.gpa, void_inst);
|
||||
const b = try self.requireRuntimeBlock(scope, inst.base.src);
|
||||
return self.addNewInstArgs(b, inst.base.src, Type.initTag(.noreturn), Inst.BreakVoid, .{
|
||||
if (label.zir_block == zir_block) {
|
||||
try label.results.append(self.gpa, operand);
|
||||
const b = try self.requireRuntimeBlock(scope, src);
|
||||
return self.addNewInstArgs(b, src, Type.initTag(.noreturn), Inst.Br, .{
|
||||
.block = label.block_inst,
|
||||
.operand = operand,
|
||||
});
|
||||
}
|
||||
}
|
||||
opt_block = block.parent;
|
||||
} else {
|
||||
return self.fail(scope, inst.base.src, "use of undeclared label '{}'", .{label_name});
|
||||
}
|
||||
} else unreachable;
|
||||
}
|
||||
|
||||
fn analyzeInstDeclRefStr(self: *Module, scope: *Scope, inst: *zir.Inst.DeclRefStr) InnerError!*Inst {
|
||||
|
||||
@ -418,8 +418,9 @@ const Function = struct {
|
||||
.assembly => return self.genAsm(inst.cast(ir.Inst.Assembly).?, arch),
|
||||
.bitcast => return self.genBitCast(inst.cast(ir.Inst.BitCast).?),
|
||||
.block => return self.genBlock(inst.cast(ir.Inst.Block).?, arch),
|
||||
.br => return self.genBr(inst.cast(ir.Inst.Br).?, arch),
|
||||
.breakpoint => return self.genBreakpoint(inst.src, arch),
|
||||
.breakvoid => return self.genBreakVoid(inst.cast(ir.Inst.BreakVoid).?, arch),
|
||||
.brvoid => return self.genBrVoid(inst.cast(ir.Inst.BrVoid).?, arch),
|
||||
.call => return self.genCall(inst.cast(ir.Inst.Call).?, arch),
|
||||
.cmp => return self.genCmp(inst.cast(ir.Inst.Cmp).?, arch),
|
||||
.condbr => return self.genCondBr(inst.cast(ir.Inst.CondBr).?, arch),
|
||||
@ -767,7 +768,13 @@ const Function = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn genBreakVoid(self: *Function, inst: *ir.Inst.BreakVoid, comptime arch: std.Target.Cpu.Arch) !MCValue {
|
||||
fn genBr(self: *Function, inst: *ir.Inst.Br, comptime arch: std.Target.Cpu.Arch) !MCValue {
|
||||
switch (arch) {
|
||||
else => return self.fail(inst.base.src, "TODO implement br for {}", .{self.target.cpu.arch}),
|
||||
}
|
||||
}
|
||||
|
||||
fn genBrVoid(self: *Function, inst: *ir.Inst.BrVoid, comptime arch: std.Target.Cpu.Arch) !MCValue {
|
||||
// Emit a jump with a relocation. It will be patched up after the block ends.
|
||||
try inst.args.block.codegen.relocs.ensureCapacity(self.gpa, inst.args.block.codegen.relocs.items.len + 1);
|
||||
|
||||
@ -780,7 +787,7 @@ const Function = struct {
|
||||
// Leave the jump offset undefined
|
||||
inst.args.block.codegen.relocs.appendAssumeCapacity(.{ .rel32 = self.code.items.len - 4 });
|
||||
},
|
||||
else => return self.fail(inst.base.src, "TODO implement breakvoid for {}", .{self.target.cpu.arch}),
|
||||
else => return self.fail(inst.base.src, "TODO implement brvoid for {}", .{self.target.cpu.arch}),
|
||||
}
|
||||
return .none;
|
||||
}
|
||||
|
||||
@ -46,8 +46,9 @@ pub const Inst = struct {
|
||||
assembly,
|
||||
bitcast,
|
||||
block,
|
||||
br,
|
||||
breakpoint,
|
||||
breakvoid,
|
||||
brvoid,
|
||||
call,
|
||||
cmp,
|
||||
condbr,
|
||||
@ -80,7 +81,8 @@ pub const Inst = struct {
|
||||
.sub,
|
||||
=> false,
|
||||
|
||||
.breakvoid,
|
||||
.br,
|
||||
.brvoid,
|
||||
.condbr,
|
||||
.ret,
|
||||
.retvoid,
|
||||
@ -162,14 +164,23 @@ pub const Inst = struct {
|
||||
codegen: codegen.BlockData = .{},
|
||||
};
|
||||
|
||||
pub const Br = struct {
|
||||
pub const base_tag = Tag.br;
|
||||
base: Inst,
|
||||
args: struct {
|
||||
block: *Block,
|
||||
operand: *Inst,
|
||||
},
|
||||
};
|
||||
|
||||
pub const Breakpoint = struct {
|
||||
pub const base_tag = Tag.breakpoint;
|
||||
base: Inst,
|
||||
args: void,
|
||||
};
|
||||
|
||||
pub const BreakVoid = struct {
|
||||
pub const base_tag = Tag.breakvoid;
|
||||
pub const BrVoid = struct {
|
||||
pub const base_tag = Tag.brvoid;
|
||||
base: Inst,
|
||||
args: struct {
|
||||
block: *Block,
|
||||
|
||||
@ -38,6 +38,8 @@ pub const Inst = struct {
|
||||
arg,
|
||||
/// A labeled block of code, which can return a value.
|
||||
block,
|
||||
/// Return a value from a `Block`.
|
||||
@"break",
|
||||
breakpoint,
|
||||
/// Same as `break` but without an operand; the operand is assumed to be the void value.
|
||||
breakvoid,
|
||||
@ -85,6 +87,7 @@ pub const Inst = struct {
|
||||
return switch (tag) {
|
||||
.arg => Arg,
|
||||
.block => Block,
|
||||
.@"break" => Break,
|
||||
.breakpoint => Breakpoint,
|
||||
.breakvoid => BreakVoid,
|
||||
.call => Call,
|
||||
@ -143,12 +146,22 @@ pub const Inst = struct {
|
||||
base: Inst,
|
||||
|
||||
positionals: struct {
|
||||
label: []const u8,
|
||||
body: Module.Body,
|
||||
},
|
||||
kw_args: struct {},
|
||||
};
|
||||
|
||||
pub const Break = struct {
|
||||
pub const base_tag = Tag.@"break";
|
||||
base: Inst,
|
||||
|
||||
positionals: struct {
|
||||
block: *Block,
|
||||
operand: *Inst,
|
||||
},
|
||||
kw_args: struct {},
|
||||
};
|
||||
|
||||
pub const Breakpoint = struct {
|
||||
pub const base_tag = Tag.breakpoint;
|
||||
base: Inst,
|
||||
@ -162,7 +175,7 @@ pub const Inst = struct {
|
||||
base: Inst,
|
||||
|
||||
positionals: struct {
|
||||
label: []const u8,
|
||||
block: *Block,
|
||||
},
|
||||
kw_args: struct {},
|
||||
};
|
||||
@ -610,8 +623,6 @@ pub const Module = struct {
|
||||
self.writeToStream(std.heap.page_allocator, std.io.getStdErr().outStream()) catch {};
|
||||
}
|
||||
|
||||
const InstPtrTable = std.AutoHashMap(*Inst, struct { inst: *Inst, index: ?usize, name: []const u8 });
|
||||
|
||||
const DeclAndIndex = struct {
|
||||
decl: *Decl,
|
||||
index: usize,
|
||||
@ -645,84 +656,100 @@ pub const Module = struct {
|
||||
/// The allocator is used for temporary storage, but this function always returns
|
||||
/// with no resources allocated.
|
||||
pub fn writeToStream(self: Module, allocator: *Allocator, stream: var) !void {
|
||||
// First, build a map of *Inst to @ or % indexes
|
||||
var inst_table = InstPtrTable.init(allocator);
|
||||
defer inst_table.deinit();
|
||||
var write = Writer{
|
||||
.module = &self,
|
||||
.inst_table = InstPtrTable.init(allocator),
|
||||
.block_table = std.AutoHashMap(*Inst.Block, []const u8).init(allocator),
|
||||
.arena = std.heap.ArenaAllocator.init(allocator),
|
||||
.indent = 2,
|
||||
};
|
||||
defer write.arena.deinit();
|
||||
defer write.inst_table.deinit();
|
||||
defer write.block_table.deinit();
|
||||
|
||||
try inst_table.ensureCapacity(self.decls.len);
|
||||
// First, build a map of *Inst to @ or % indexes
|
||||
try write.inst_table.ensureCapacity(self.decls.len);
|
||||
|
||||
for (self.decls) |decl, decl_i| {
|
||||
try inst_table.putNoClobber(decl.inst, .{ .inst = decl.inst, .index = null, .name = decl.name });
|
||||
try write.inst_table.putNoClobber(decl.inst, .{ .inst = decl.inst, .index = null, .name = decl.name });
|
||||
|
||||
if (decl.inst.cast(Inst.Fn)) |fn_inst| {
|
||||
for (fn_inst.positionals.body.instructions) |inst, inst_i| {
|
||||
try inst_table.putNoClobber(inst, .{ .inst = inst, .index = inst_i, .name = undefined });
|
||||
try write.inst_table.putNoClobber(inst, .{ .inst = inst, .index = inst_i, .name = undefined });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (self.decls) |decl, i| {
|
||||
try stream.print("@{} ", .{decl.name});
|
||||
try self.writeInstToStream(stream, decl.inst, &inst_table, 2);
|
||||
try write.writeInstToStream(stream, decl.inst);
|
||||
try stream.writeByte('\n');
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
const InstPtrTable = std.AutoHashMap(*Inst, struct { inst: *Inst, index: ?usize, name: []const u8 });
|
||||
|
||||
const Writer = struct {
|
||||
module: *const Module,
|
||||
inst_table: InstPtrTable,
|
||||
block_table: std.AutoHashMap(*Inst.Block, []const u8),
|
||||
arena: std.heap.ArenaAllocator,
|
||||
indent: usize,
|
||||
|
||||
fn writeInstToStream(
|
||||
self: Module,
|
||||
self: *Writer,
|
||||
stream: var,
|
||||
inst: *Inst,
|
||||
inst_table: *const InstPtrTable,
|
||||
indent: usize,
|
||||
) @TypeOf(stream).Error!void {
|
||||
) (@TypeOf(stream).Error || error{OutOfMemory})!void {
|
||||
// TODO I tried implementing this with an inline for loop and hit a compiler bug
|
||||
switch (inst.tag) {
|
||||
.arg => return self.writeInstToStreamGeneric(stream, .arg, inst, inst_table, indent),
|
||||
.block => return self.writeInstToStreamGeneric(stream, .block, inst, inst_table, indent),
|
||||
.breakpoint => return self.writeInstToStreamGeneric(stream, .breakpoint, inst, inst_table, indent),
|
||||
.breakvoid => return self.writeInstToStreamGeneric(stream, .breakvoid, inst, inst_table, indent),
|
||||
.call => return self.writeInstToStreamGeneric(stream, .call, inst, inst_table, indent),
|
||||
.declref => return self.writeInstToStreamGeneric(stream, .declref, inst, inst_table, indent),
|
||||
.declref_str => return self.writeInstToStreamGeneric(stream, .declref_str, inst, inst_table, indent),
|
||||
.declval => return self.writeInstToStreamGeneric(stream, .declval, inst, inst_table, indent),
|
||||
.declval_in_module => return self.writeInstToStreamGeneric(stream, .declval_in_module, inst, inst_table, indent),
|
||||
.compileerror => return self.writeInstToStreamGeneric(stream, .compileerror, inst, inst_table, indent),
|
||||
.@"const" => return self.writeInstToStreamGeneric(stream, .@"const", inst, inst_table, indent),
|
||||
.str => return self.writeInstToStreamGeneric(stream, .str, inst, inst_table, indent),
|
||||
.int => return self.writeInstToStreamGeneric(stream, .int, inst, inst_table, indent),
|
||||
.inttype => return self.writeInstToStreamGeneric(stream, .inttype, inst, inst_table, indent),
|
||||
.ptrtoint => return self.writeInstToStreamGeneric(stream, .ptrtoint, inst, inst_table, indent),
|
||||
.fieldptr => return self.writeInstToStreamGeneric(stream, .fieldptr, inst, inst_table, indent),
|
||||
.deref => return self.writeInstToStreamGeneric(stream, .deref, inst, inst_table, indent),
|
||||
.as => return self.writeInstToStreamGeneric(stream, .as, inst, inst_table, indent),
|
||||
.@"asm" => return self.writeInstToStreamGeneric(stream, .@"asm", inst, inst_table, indent),
|
||||
.@"unreachable" => return self.writeInstToStreamGeneric(stream, .@"unreachable", inst, inst_table, indent),
|
||||
.@"return" => return self.writeInstToStreamGeneric(stream, .@"return", inst, inst_table, indent),
|
||||
.returnvoid => return self.writeInstToStreamGeneric(stream, .returnvoid, inst, inst_table, indent),
|
||||
.@"fn" => return self.writeInstToStreamGeneric(stream, .@"fn", inst, inst_table, indent),
|
||||
.@"export" => return self.writeInstToStreamGeneric(stream, .@"export", inst, inst_table, indent),
|
||||
.primitive => return self.writeInstToStreamGeneric(stream, .primitive, inst, inst_table, indent),
|
||||
.fntype => return self.writeInstToStreamGeneric(stream, .fntype, inst, inst_table, indent),
|
||||
.intcast => return self.writeInstToStreamGeneric(stream, .intcast, inst, inst_table, indent),
|
||||
.bitcast => return self.writeInstToStreamGeneric(stream, .bitcast, inst, inst_table, indent),
|
||||
.elemptr => return self.writeInstToStreamGeneric(stream, .elemptr, inst, inst_table, indent),
|
||||
.add => return self.writeInstToStreamGeneric(stream, .add, inst, inst_table, indent),
|
||||
.sub => return self.writeInstToStreamGeneric(stream, .sub, inst, inst_table, indent),
|
||||
.cmp => return self.writeInstToStreamGeneric(stream, .cmp, inst, inst_table, indent),
|
||||
.condbr => return self.writeInstToStreamGeneric(stream, .condbr, inst, inst_table, indent),
|
||||
.isnull => return self.writeInstToStreamGeneric(stream, .isnull, inst, inst_table, indent),
|
||||
.isnonnull => return self.writeInstToStreamGeneric(stream, .isnonnull, inst, inst_table, indent),
|
||||
.arg => return self.writeInstToStreamGeneric(stream, .arg, inst),
|
||||
.block => return self.writeInstToStreamGeneric(stream, .block, inst),
|
||||
.@"break" => return self.writeInstToStreamGeneric(stream, .@"break", inst),
|
||||
.breakpoint => return self.writeInstToStreamGeneric(stream, .breakpoint, inst),
|
||||
.breakvoid => return self.writeInstToStreamGeneric(stream, .breakvoid, inst),
|
||||
.call => return self.writeInstToStreamGeneric(stream, .call, inst),
|
||||
.declref => return self.writeInstToStreamGeneric(stream, .declref, inst),
|
||||
.declref_str => return self.writeInstToStreamGeneric(stream, .declref_str, inst),
|
||||
.declval => return self.writeInstToStreamGeneric(stream, .declval, inst),
|
||||
.declval_in_module => return self.writeInstToStreamGeneric(stream, .declval_in_module, inst),
|
||||
.compileerror => return self.writeInstToStreamGeneric(stream, .compileerror, inst),
|
||||
.@"const" => return self.writeInstToStreamGeneric(stream, .@"const", inst),
|
||||
.str => return self.writeInstToStreamGeneric(stream, .str, inst),
|
||||
.int => return self.writeInstToStreamGeneric(stream, .int, inst),
|
||||
.inttype => return self.writeInstToStreamGeneric(stream, .inttype, inst),
|
||||
.ptrtoint => return self.writeInstToStreamGeneric(stream, .ptrtoint, inst),
|
||||
.fieldptr => return self.writeInstToStreamGeneric(stream, .fieldptr, inst),
|
||||
.deref => return self.writeInstToStreamGeneric(stream, .deref, inst),
|
||||
.as => return self.writeInstToStreamGeneric(stream, .as, inst),
|
||||
.@"asm" => return self.writeInstToStreamGeneric(stream, .@"asm", inst),
|
||||
.@"unreachable" => return self.writeInstToStreamGeneric(stream, .@"unreachable", inst),
|
||||
.@"return" => return self.writeInstToStreamGeneric(stream, .@"return", inst),
|
||||
.returnvoid => return self.writeInstToStreamGeneric(stream, .returnvoid, inst),
|
||||
.@"fn" => return self.writeInstToStreamGeneric(stream, .@"fn", inst),
|
||||
.@"export" => return self.writeInstToStreamGeneric(stream, .@"export", inst),
|
||||
.primitive => return self.writeInstToStreamGeneric(stream, .primitive, inst),
|
||||
.fntype => return self.writeInstToStreamGeneric(stream, .fntype, inst),
|
||||
.intcast => return self.writeInstToStreamGeneric(stream, .intcast, inst),
|
||||
.bitcast => return self.writeInstToStreamGeneric(stream, .bitcast, inst),
|
||||
.elemptr => return self.writeInstToStreamGeneric(stream, .elemptr, inst),
|
||||
.add => return self.writeInstToStreamGeneric(stream, .add, inst),
|
||||
.sub => return self.writeInstToStreamGeneric(stream, .sub, inst),
|
||||
.cmp => return self.writeInstToStreamGeneric(stream, .cmp, inst),
|
||||
.condbr => return self.writeInstToStreamGeneric(stream, .condbr, inst),
|
||||
.isnull => return self.writeInstToStreamGeneric(stream, .isnull, inst),
|
||||
.isnonnull => return self.writeInstToStreamGeneric(stream, .isnonnull, inst),
|
||||
}
|
||||
}
|
||||
|
||||
fn writeInstToStreamGeneric(
|
||||
self: Module,
|
||||
self: *Writer,
|
||||
stream: var,
|
||||
comptime inst_tag: Inst.Tag,
|
||||
base: *Inst,
|
||||
inst_table: *const InstPtrTable,
|
||||
indent: usize,
|
||||
) @TypeOf(stream).Error!void {
|
||||
) (@TypeOf(stream).Error || error{OutOfMemory})!void {
|
||||
const SpecificInst = Inst.TagToType(inst_tag);
|
||||
const inst = @fieldParentPtr(SpecificInst, "base", base);
|
||||
const Positionals = @TypeOf(inst.positionals);
|
||||
@ -732,7 +759,7 @@ pub const Module = struct {
|
||||
if (i != 0) {
|
||||
try stream.writeAll(", ");
|
||||
}
|
||||
try self.writeParamToStream(stream, @field(inst.positionals, arg_field.name), inst_table, indent);
|
||||
try self.writeParamToStream(stream, @field(inst.positionals, arg_field.name));
|
||||
}
|
||||
|
||||
comptime var need_comma = pos_fields.len != 0;
|
||||
@ -742,13 +769,13 @@ pub const Module = struct {
|
||||
if (@field(inst.kw_args, arg_field.name)) |non_optional| {
|
||||
if (need_comma) try stream.writeAll(", ");
|
||||
try stream.print("{}=", .{arg_field.name});
|
||||
try self.writeParamToStream(stream, non_optional, inst_table, indent);
|
||||
try self.writeParamToStream(stream, non_optional);
|
||||
need_comma = true;
|
||||
}
|
||||
} else {
|
||||
if (need_comma) try stream.writeAll(", ");
|
||||
try stream.print("{}=", .{arg_field.name});
|
||||
try self.writeParamToStream(stream, @field(inst.kw_args, arg_field.name), inst_table, indent);
|
||||
try self.writeParamToStream(stream, @field(inst.kw_args, arg_field.name));
|
||||
need_comma = true;
|
||||
}
|
||||
}
|
||||
@ -756,31 +783,37 @@ pub const Module = struct {
|
||||
try stream.writeByte(')');
|
||||
}
|
||||
|
||||
fn writeParamToStream(self: Module, stream: var, param: var, inst_table: *const InstPtrTable, indent: usize) !void {
|
||||
fn writeParamToStream(self: *Writer, stream: var, param: var) !void {
|
||||
if (@typeInfo(@TypeOf(param)) == .Enum) {
|
||||
return stream.writeAll(@tagName(param));
|
||||
}
|
||||
switch (@TypeOf(param)) {
|
||||
*Inst => return self.writeInstParamToStream(stream, param, inst_table),
|
||||
*Inst => return self.writeInstParamToStream(stream, param),
|
||||
[]*Inst => {
|
||||
try stream.writeByte('[');
|
||||
for (param) |inst, i| {
|
||||
if (i != 0) {
|
||||
try stream.writeAll(", ");
|
||||
}
|
||||
try self.writeInstParamToStream(stream, inst, inst_table);
|
||||
try self.writeInstParamToStream(stream, inst);
|
||||
}
|
||||
try stream.writeByte(']');
|
||||
},
|
||||
Module.Body => {
|
||||
try stream.writeAll("{\n");
|
||||
for (param.instructions) |inst, i| {
|
||||
try stream.writeByteNTimes(' ', indent);
|
||||
try stream.writeByteNTimes(' ', self.indent);
|
||||
try stream.print("%{} ", .{i});
|
||||
try self.writeInstToStream(stream, inst, inst_table, indent + 2);
|
||||
if (inst.cast(Inst.Block)) |block| {
|
||||
const name = try std.fmt.allocPrint(&self.arena.allocator, "label_{}", .{i});
|
||||
try self.block_table.put(block, name);
|
||||
}
|
||||
self.indent += 2;
|
||||
try self.writeInstToStream(stream, inst);
|
||||
self.indent -= 2;
|
||||
try stream.writeByte('\n');
|
||||
}
|
||||
try stream.writeByteNTimes(' ', indent - 2);
|
||||
try stream.writeByteNTimes(' ', self.indent - 2);
|
||||
try stream.writeByte('}');
|
||||
},
|
||||
bool => return stream.writeByte("01"[@boolToInt(param)]),
|
||||
@ -788,12 +821,16 @@ pub const Module = struct {
|
||||
BigIntConst, usize => return stream.print("{}", .{param}),
|
||||
TypedValue => unreachable, // this is a special case
|
||||
*IrModule.Decl => unreachable, // this is a special case
|
||||
*Inst.Block => {
|
||||
const name = self.block_table.get(param).?;
|
||||
return std.zig.renderStringLiteral(name, stream);
|
||||
},
|
||||
else => |T| @compileError("unimplemented: rendering parameter of type " ++ @typeName(T)),
|
||||
}
|
||||
}
|
||||
|
||||
fn writeInstParamToStream(self: Module, stream: var, inst: *Inst, inst_table: *const InstPtrTable) !void {
|
||||
if (inst_table.get(inst)) |info| {
|
||||
fn writeInstParamToStream(self: *Writer, stream: var, inst: *Inst) !void {
|
||||
if (self.inst_table.get(inst)) |info| {
|
||||
if (info.index) |i| {
|
||||
try stream.print("%{}", .{info.index});
|
||||
} else {
|
||||
@ -823,7 +860,9 @@ pub fn parse(allocator: *Allocator, source: [:0]const u8) Allocator.Error!Module
|
||||
.global_name_map = &global_name_map,
|
||||
.decls = .{},
|
||||
.unnamed_index = 0,
|
||||
.block_table = std.StringHashMap(*Inst.Block).init(allocator),
|
||||
};
|
||||
defer parser.block_table.deinit();
|
||||
errdefer parser.arena.deinit();
|
||||
|
||||
parser.parseRoot() catch |err| switch (err) {
|
||||
@ -849,6 +888,7 @@ const Parser = struct {
|
||||
global_name_map: *std.StringHashMap(*Inst),
|
||||
error_msg: ?ErrorMsg = null,
|
||||
unnamed_index: usize,
|
||||
block_table: std.StringHashMap(*Inst.Block),
|
||||
|
||||
const Body = struct {
|
||||
instructions: std.ArrayList(*Inst),
|
||||
@ -1057,6 +1097,10 @@ const Parser = struct {
|
||||
.tag = InstType.base_tag,
|
||||
};
|
||||
|
||||
if (InstType == Inst.Block) {
|
||||
try self.block_table.put(inst_name, inst_specific);
|
||||
}
|
||||
|
||||
if (@hasField(InstType, "ty")) {
|
||||
inst_specific.ty = opt_type orelse {
|
||||
return self.fail("instruction '" ++ fn_name ++ "' requires type", .{});
|
||||
@ -1162,6 +1206,10 @@ const Parser = struct {
|
||||
},
|
||||
TypedValue => return self.fail("'const' is a special instruction; not legal in ZIR text", .{}),
|
||||
*IrModule.Decl => return self.fail("'declval_in_module' is a special instruction; not legal in ZIR text", .{}),
|
||||
*Inst.Block => {
|
||||
const name = try self.parseStringLiteral();
|
||||
return self.block_table.get(name).?;
|
||||
},
|
||||
else => @compileError("Unimplemented: ir parseParameterGeneric for type " ++ @typeName(T)),
|
||||
}
|
||||
return self.fail("TODO parse parameter {}", .{@typeName(T)});
|
||||
@ -1226,7 +1274,9 @@ pub fn emit(allocator: *Allocator, old_module: IrModule) !Module {
|
||||
.names = std.StringHashMap(void).init(allocator),
|
||||
.primitive_table = std.AutoHashMap(Inst.Primitive.Builtin, *Decl).init(allocator),
|
||||
.indent = 0,
|
||||
.block_table = std.AutoHashMap(*ir.Inst.Block, *Inst.Block).init(allocator),
|
||||
};
|
||||
defer ctx.block_table.deinit();
|
||||
defer ctx.decls.deinit(allocator);
|
||||
defer ctx.names.deinit();
|
||||
defer ctx.primitive_table.deinit();
|
||||
@ -1249,6 +1299,7 @@ const EmitZIR = struct {
|
||||
next_auto_name: usize,
|
||||
primitive_table: std.AutoHashMap(Inst.Primitive.Builtin, *Decl),
|
||||
indent: usize,
|
||||
block_table: std.AutoHashMap(*ir.Inst.Block, *Inst.Block),
|
||||
|
||||
fn emit(self: *EmitZIR) !void {
|
||||
// Put all the Decls in a list and sort them by name to avoid nondeterminism introduced
|
||||
@ -1611,33 +1662,47 @@ const EmitZIR = struct {
|
||||
const old_inst = inst.cast(ir.Inst.Block).?;
|
||||
const new_inst = try self.arena.allocator.create(Inst.Block);
|
||||
|
||||
// We do this now so that the break instructions within the block
|
||||
// can find it.
|
||||
try inst_table.put(&old_inst.base, &new_inst.base);
|
||||
try self.block_table.put(old_inst, new_inst);
|
||||
|
||||
var block_body = std.ArrayList(*Inst).init(self.allocator);
|
||||
defer block_body.deinit();
|
||||
|
||||
try self.emitBody(old_inst.args.body, inst_table, &block_body);
|
||||
|
||||
new_inst.* = .{
|
||||
.base = .{
|
||||
.src = inst.src,
|
||||
.tag = Inst.Block.base_tag,
|
||||
},
|
||||
.positionals = .{
|
||||
.label = try self.autoName(),
|
||||
.body = undefined,
|
||||
.body = .{ .instructions = block_body.toOwnedSlice() },
|
||||
},
|
||||
.kw_args = .{},
|
||||
};
|
||||
|
||||
var block_body = std.ArrayList(*Inst).init(self.allocator);
|
||||
defer block_body.deinit();
|
||||
|
||||
try self.emitBody(old_inst.args.body, inst_table, &block_body);
|
||||
new_inst.positionals.body = .{ .instructions = block_body.toOwnedSlice() };
|
||||
|
||||
break :blk &new_inst.base;
|
||||
},
|
||||
.br => blk: {
|
||||
const old_inst = inst.cast(ir.Inst.Br).?;
|
||||
const new_block = self.block_table.get(old_inst.args.block).?;
|
||||
const new_inst = try self.arena.allocator.create(Inst.Break);
|
||||
new_inst.* = .{
|
||||
.base = .{
|
||||
.src = inst.src,
|
||||
.tag = Inst.Break.base_tag,
|
||||
},
|
||||
.positionals = .{
|
||||
.block = new_block,
|
||||
.operand = try self.resolveInst(new_body, old_inst.args.operand),
|
||||
},
|
||||
.kw_args = .{},
|
||||
};
|
||||
break :blk &new_inst.base;
|
||||
},
|
||||
.breakpoint => try self.emitTrivial(inst.src, Inst.Breakpoint),
|
||||
.breakvoid => blk: {
|
||||
const old_inst = inst.cast(ir.Inst.BreakVoid).?;
|
||||
const new_block = inst_table.get(&old_inst.args.block.base).?;
|
||||
.brvoid => blk: {
|
||||
const old_inst = inst.cast(ir.Inst.BrVoid).?;
|
||||
const new_block = self.block_table.get(old_inst.args.block).?;
|
||||
const new_inst = try self.arena.allocator.create(Inst.BreakVoid);
|
||||
new_inst.* = .{
|
||||
.base = .{
|
||||
@ -1645,7 +1710,7 @@ const EmitZIR = struct {
|
||||
.tag = Inst.BreakVoid.base_tag,
|
||||
},
|
||||
.positionals = .{
|
||||
.label = new_block.cast(Inst.Block).?.positionals.label,
|
||||
.block = new_block,
|
||||
},
|
||||
.kw_args = .{},
|
||||
};
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user