stage2: progress towards Block and CondBr codegen

This commit is contained in:
Andrew Kelley 2020-07-07 08:01:54 +00:00
parent 4d01385e14
commit b55d0193e4
4 changed files with 55 additions and 15 deletions

View File

@ -2545,18 +2545,6 @@ fn analyzeInstBlock(self: *Module, scope: *Scope, inst: *zir.Inst.Block) InnerEr
assert(child_block.instructions.items.len != 0);
assert(child_block.instructions.items[child_block.instructions.items.len - 1].tag.isNoReturn());
if (label.results.items.len <= 1) {
// No need to add the Block instruction; we can add the instructions to the parent block directly.
// Blocks are terminated with a noreturn instruction which we do not want to include.
const instrs = child_block.instructions.items;
try parent_block.instructions.appendSlice(self.gpa, instrs[0 .. instrs.len - 1]);
if (label.results.items.len == 1) {
return label.results.items[0];
} else {
return self.constNoReturn(scope, inst.base.src);
}
}
// Need to set the type and emit the Block instruction. This allows machine code generation
// to emit a jump instruction to after the block when it encounters the break.
try parent_block.instructions.append(self.gpa, &block_inst.base);
@ -2579,7 +2567,10 @@ fn analyzeInstBreakVoid(self: *Module, scope: *Scope, inst: *zir.Inst.BreakVoid)
if (block.label) |*label| {
if (mem.eql(u8, label.name, label_name)) {
try label.results.append(self.gpa, void_inst);
return self.constNoReturn(scope, inst.base.src);
const b = try self.requireRuntimeBlock(scope, inst.base.src);
return self.addNewInstArgs(b, inst.base.src, Type.initTag(.noreturn), Inst.BreakVoid, .{
.block = label.block_inst,
});
}
}
opt_block = block.parent;
@ -3366,6 +3357,8 @@ fn makeIntType(self: *Module, scope: *Scope, signed: bool, bits: u16) !Type {
fn resolvePeerTypes(self: *Module, scope: *Scope, instructions: []*Inst) !Type {
if (instructions.len == 0)
return Type.initTag(.noreturn);
if (instructions.len == 1)
return instructions[0].ty;
return self.fail(scope, instructions[0].src, "TODO peer type resolution", .{});
}

View File

@ -218,6 +218,11 @@ pub fn generateSymbol(
}
}
const InnerError = error {
OutOfMemory,
CodegenFail,
};
const Function = struct {
gpa: *Allocator,
bin_file: *link.ElfFile,
@ -379,8 +384,12 @@ const Function = struct {
}
fn genArch(self: *Function, comptime arch: std.Target.Cpu.Arch) !void {
return self.genBody(self.mod_fn.analysis.success, arch);
}
fn genBody(self: *Function, body: ir.Body, comptime arch: std.Target.Cpu.Arch) InnerError!void {
const inst_table = &self.branch_stack.items[0].inst_table;
for (self.mod_fn.analysis.success.instructions) |inst| {
for (body.instructions) |inst| {
const new_inst = try self.genFuncInst(inst, arch);
try inst_table.putNoClobber(self.gpa, inst, new_inst);
}
@ -394,6 +403,7 @@ const Function = struct {
.bitcast => return self.genBitCast(inst.cast(ir.Inst.BitCast).?),
.block => return self.genBlock(inst.cast(ir.Inst.Block).?, arch),
.breakpoint => return self.genBreakpoint(inst.src, arch),
.breakvoid => return self.genBreakVoid(inst.cast(ir.Inst.BreakVoid).?, arch),
.call => return self.genCall(inst.cast(ir.Inst.Call).?, arch),
.cmp => return self.genCmp(inst.cast(ir.Inst.Cmp).?, arch),
.condbr => return self.genCondBr(inst.cast(ir.Inst.CondBr).?, arch),
@ -686,9 +696,16 @@ const Function = struct {
}
fn genBlock(self: *Function, inst: *ir.Inst.Block, comptime arch: std.Target.Cpu.Arch) !MCValue {
// A block is nothing but a setup to be able to jump to the end.
try self.genBody(inst.args.body, arch);
return self.fail(inst.base.src, "TODO process jump relocs after block end", .{});
}
fn genBreakVoid(self: *Function, inst: *ir.Inst.BreakVoid, comptime arch: std.Target.Cpu.Arch) !MCValue {
switch (arch) {
else => return self.fail(inst.base.src, "TODO implement codegen Block for {}", .{self.target.cpu.arch}),
else => return self.fail(inst.base.src, "TODO implement breakvoid for {}", .{self.target.cpu.arch}),
}
return .none;
}
fn genAsm(self: *Function, inst: *ir.Inst.Assembly, comptime arch: Target.Cpu.Arch) !MCValue {

View File

@ -35,6 +35,10 @@ pub const Inst = struct {
return @truncate(u1, self.deaths << index) != 0;
}
pub fn specialOperandDeaths(self: Inst) bool {
return (self.deaths & 0b1000_0000) != 0;
}
pub const Tag = enum {
add,
arg,
@ -42,6 +46,7 @@ pub const Inst = struct {
bitcast,
block,
breakpoint,
breakvoid,
call,
cmp,
condbr,
@ -74,6 +79,7 @@ pub const Inst = struct {
.sub,
=> false,
.breakvoid,
.condbr,
.ret,
.retvoid,
@ -159,6 +165,14 @@ pub const Inst = struct {
args: void,
};
pub const BreakVoid = struct {
pub const base_tag = Tag.breakvoid;
base: Inst,
args: struct {
block: *Block,
},
};
pub const Call = struct {
pub const base_tag = Tag.call;
base: Inst,

View File

@ -1608,6 +1608,22 @@ const EmitZIR = struct {
break :blk &new_inst.base;
},
.breakpoint => try self.emitTrivial(inst.src, Inst.Breakpoint),
.breakvoid => blk: {
const old_inst = inst.cast(ir.Inst.BreakVoid).?;
const new_block = inst_table.get(&old_inst.args.block.base).?;
const new_inst = try self.arena.allocator.create(Inst.BreakVoid);
new_inst.* = .{
.base = .{
.src = inst.src,
.tag = Inst.BreakVoid.base_tag,
},
.positionals = .{
.label = new_block.cast(Inst.Block).?.positionals.label,
},
.kw_args = .{},
};
break :blk &new_inst.base;
},
.call => blk: {
const old_inst = inst.cast(ir.Inst.Call).?;
const new_inst = try self.arena.allocator.create(Inst.Call);