Merge pull request #7847 from ziglang/astgen-rl-rework

stage2: rework astgen result locations
This commit is contained in:
Andrew Kelley 2021-01-31 20:15:08 -08:00 committed by GitHub
commit bf76501b5d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 1281 additions and 1399 deletions

View File

@ -375,6 +375,10 @@ pub const Scope = struct {
}
}
pub fn isComptime(self: *Scope) bool {
return self.getGenZIR().force_comptime;
}
pub fn ownerDecl(self: *Scope) ?*Decl {
return switch (self.tag) {
.block => self.cast(Block).?.owner_decl,
@ -671,14 +675,36 @@ pub const Scope = struct {
};
pub const Merges = struct {
results: ArrayListUnmanaged(*Inst),
block_inst: *Inst.Block,
/// Separate array list from break_inst_list so that it can be passed directly
/// to resolvePeerTypes.
results: ArrayListUnmanaged(*Inst),
/// Keeps track of the break instructions so that the operand can be replaced
/// if we need to add type coercion at the end of block analysis.
/// Same indexes, capacity, length as `results`.
br_list: ArrayListUnmanaged(*Inst.Br),
};
/// For debugging purposes.
pub fn dump(self: *Block, mod: Module) void {
zir.dumpBlock(mod, self);
}
pub fn makeSubBlock(parent: *Block) Block {
return .{
.parent = parent,
.inst_table = parent.inst_table,
.func = parent.func,
.owner_decl = parent.owner_decl,
.src_decl = parent.src_decl,
.instructions = .{},
.arena = parent.arena,
.label = null,
.inlining = parent.inlining,
.is_comptime = parent.is_comptime,
.branch_quota = parent.branch_quota,
};
}
};
/// This is a temporary structure, references to it are valid only
@ -690,13 +716,32 @@ pub const Scope = struct {
parent: *Scope,
decl: *Decl,
arena: *Allocator,
force_comptime: bool,
/// The first N instructions in a function body ZIR are arg instructions.
instructions: std.ArrayListUnmanaged(*zir.Inst) = .{},
label: ?Label = null,
break_block: ?*zir.Inst.Block = null,
continue_block: ?*zir.Inst.Block = null,
/// only valid if label != null or (continue_block and break_block) != null
/// Only valid when setBlockResultLoc is called.
break_result_loc: astgen.ResultLoc = undefined,
/// When a block has a pointer result location, here it is.
rl_ptr: ?*zir.Inst = null,
/// Keeps track of how many branches of a block did not actually
/// consume the result location. astgen uses this to figure out
/// whether to rely on break instructions or writing to the result
/// pointer for the result instruction.
rvalue_rl_count: usize = 0,
/// Keeps track of how many break instructions there are. When astgen is finished
/// with a block, it can check this against rvalue_rl_count to find out whether
/// the break instructions should be downgraded to break_void.
break_count: usize = 0,
/// Tracks `break :foo bar` instructions so they can possibly be elided later if
/// the labeled block ends up not needing a result location pointer.
labeled_breaks: std.ArrayListUnmanaged(*zir.Inst.Break) = .{},
/// Tracks `store_to_block_ptr` instructions that correspond to break instructions
/// so they can possibly be elided later if the labeled block ends up not needing
/// a result location pointer.
labeled_store_to_block_ptr_list: std.ArrayListUnmanaged(*zir.Inst.BinOp) = .{},
pub const Label = struct {
token: ast.TokenIndex,
@ -968,6 +1013,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.decl = decl,
.arena = &fn_type_scope_arena.allocator,
.parent = &decl.container.base,
.force_comptime = true,
};
defer fn_type_scope.instructions.deinit(self.gpa);
@ -1131,6 +1177,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.decl = decl,
.arena = &decl_arena.allocator,
.parent = &decl.container.base,
.force_comptime = false,
};
defer gen_scope.instructions.deinit(self.gpa);
@ -1171,7 +1218,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
!gen_scope.instructions.items[gen_scope.instructions.items.len - 1].tag.isNoReturn())
{
const src = tree.token_locs[body_block.rbrace].start;
_ = try astgen.addZIRNoOp(self, &gen_scope.base, src, .returnvoid);
_ = try astgen.addZIRNoOp(self, &gen_scope.base, src, .return_void);
}
if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
@ -1329,6 +1376,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.decl = decl,
.arena = &gen_scope_arena.allocator,
.parent = &decl.container.base,
.force_comptime = false,
};
defer gen_scope.instructions.deinit(self.gpa);
@ -1388,6 +1436,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.decl = decl,
.arena = &type_scope_arena.allocator,
.parent = &decl.container.base,
.force_comptime = true,
};
defer type_scope.instructions.deinit(self.gpa);
@ -1457,13 +1506,15 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
decl.analysis = .in_progress;
// A comptime decl does not store any value so we can just deinit this arena after analysis is done.
// A comptime decl does not store any value so we can just deinit
// this arena after analysis is done.
var analysis_arena = std.heap.ArenaAllocator.init(self.gpa);
defer analysis_arena.deinit();
var gen_scope: Scope.GenZIR = .{
.decl = decl,
.arena = &analysis_arena.allocator,
.parent = &decl.container.base,
.force_comptime = true,
};
defer gen_scope.instructions.deinit(self.gpa);
@ -2100,7 +2151,7 @@ pub fn addBr(
src: usize,
target_block: *Inst.Block,
operand: *Inst,
) !*Inst {
) !*Inst.Br {
const inst = try scope_block.arena.create(Inst.Br);
inst.* = .{
.base = .{
@ -2112,7 +2163,7 @@ pub fn addBr(
.block = target_block,
};
try scope_block.instructions.append(self.gpa, &inst.base);
return &inst.base;
return inst;
}
pub fn addCondBr(
@ -3466,18 +3517,18 @@ pub fn addSafetyCheck(mod: *Module, parent_block: *Scope.Block, ok: *Inst, panic
};
const ok_body: ir.Body = .{
.instructions = try parent_block.arena.alloc(*Inst, 1), // Only need space for the brvoid.
.instructions = try parent_block.arena.alloc(*Inst, 1), // Only need space for the br_void.
};
const brvoid = try parent_block.arena.create(Inst.BrVoid);
brvoid.* = .{
const br_void = try parent_block.arena.create(Inst.BrVoid);
br_void.* = .{
.base = .{
.tag = .brvoid,
.tag = .br_void,
.ty = Type.initTag(.noreturn),
.src = ok.src,
},
.block = block_inst,
};
ok_body.instructions[0] = &brvoid.base;
ok_body.instructions[0] = &br_void.base;
var fail_block: Scope.Block = .{
.parent = parent_block,

File diff suppressed because it is too large Load Diff

View File

@ -840,14 +840,15 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
.arg => return self.genArg(inst.castTag(.arg).?),
.assembly => return self.genAsm(inst.castTag(.assembly).?),
.bitcast => return self.genBitCast(inst.castTag(.bitcast).?),
.bitand => return self.genBitAnd(inst.castTag(.bitand).?),
.bitor => return self.genBitOr(inst.castTag(.bitor).?),
.bit_and => return self.genBitAnd(inst.castTag(.bit_and).?),
.bit_or => return self.genBitOr(inst.castTag(.bit_or).?),
.block => return self.genBlock(inst.castTag(.block).?),
.br => return self.genBr(inst.castTag(.br).?),
.br_block_flat => return self.genBrBlockFlat(inst.castTag(.br_block_flat).?),
.breakpoint => return self.genBreakpoint(inst.src),
.brvoid => return self.genBrVoid(inst.castTag(.brvoid).?),
.booland => return self.genBoolOp(inst.castTag(.booland).?),
.boolor => return self.genBoolOp(inst.castTag(.boolor).?),
.br_void => return self.genBrVoid(inst.castTag(.br_void).?),
.bool_and => return self.genBoolOp(inst.castTag(.bool_and).?),
.bool_or => return self.genBoolOp(inst.castTag(.bool_or).?),
.call => return self.genCall(inst.castTag(.call).?),
.cmp_lt => return self.genCmp(inst.castTag(.cmp_lt).?, .lt),
.cmp_lte => return self.genCmp(inst.castTag(.cmp_lte).?, .lte),
@ -1097,7 +1098,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
if (inst.base.isUnused())
return MCValue.dead;
switch (arch) {
.arm, .armeb => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bitand),
.arm, .armeb => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bit_and),
else => return self.fail(inst.base.src, "TODO implement bitwise and for {}", .{self.target.cpu.arch}),
}
}
@ -1107,7 +1108,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
if (inst.base.isUnused())
return MCValue.dead;
switch (arch) {
.arm, .armeb => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bitor),
.arm, .armeb => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bit_or),
else => return self.fail(inst.base.src, "TODO implement bitwise or for {}", .{self.target.cpu.arch}),
}
}
@ -1371,10 +1372,10 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
writeInt(u32, try self.code.addManyAsArray(4), Instruction.rsb(.al, dst_reg, dst_reg, operand).toU32());
}
},
.booland, .bitand => {
.bool_and, .bit_and => {
writeInt(u32, try self.code.addManyAsArray(4), Instruction.@"and"(.al, dst_reg, dst_reg, operand).toU32());
},
.boolor, .bitor => {
.bool_or, .bit_or => {
writeInt(u32, try self.code.addManyAsArray(4), Instruction.orr(.al, dst_reg, dst_reg, operand).toU32());
},
.not, .xor => {
@ -2441,17 +2442,14 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
}
}
fn genBrBlockFlat(self: *Self, inst: *ir.Inst.BrBlockFlat) !MCValue {
try self.genBody(inst.body);
const last = inst.body.instructions[inst.body.instructions.len - 1];
return self.br(inst.base.src, inst.block, last);
}
fn genBr(self: *Self, inst: *ir.Inst.Br) !MCValue {
if (inst.operand.ty.hasCodeGenBits()) {
const operand = try self.resolveInst(inst.operand);
const block_mcv = @bitCast(MCValue, inst.block.codegen.mcv);
if (block_mcv == .none) {
inst.block.codegen.mcv = @bitCast(AnyMCValue, operand);
} else {
try self.setRegOrMem(inst.base.src, inst.block.base.ty, block_mcv, operand);
}
}
return self.brVoid(inst.base.src, inst.block);
return self.br(inst.base.src, inst.block, inst.operand);
}
fn genBrVoid(self: *Self, inst: *ir.Inst.BrVoid) !MCValue {
@ -2464,20 +2462,33 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
switch (arch) {
.x86_64 => switch (inst.base.tag) {
// lhs AND rhs
.booland => return try self.genX8664BinMath(&inst.base, inst.lhs, inst.rhs, 4, 0x20),
.bool_and => return try self.genX8664BinMath(&inst.base, inst.lhs, inst.rhs, 4, 0x20),
// lhs OR rhs
.boolor => return try self.genX8664BinMath(&inst.base, inst.lhs, inst.rhs, 1, 0x08),
.bool_or => return try self.genX8664BinMath(&inst.base, inst.lhs, inst.rhs, 1, 0x08),
else => unreachable, // Not a boolean operation
},
.arm, .armeb => switch (inst.base.tag) {
.booland => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .booland),
.boolor => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .boolor),
.bool_and => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bool_and),
.bool_or => return try self.genArmBinOp(&inst.base, inst.lhs, inst.rhs, .bool_or),
else => unreachable, // Not a boolean operation
},
else => return self.fail(inst.base.src, "TODO implement boolean operations for {}", .{self.target.cpu.arch}),
}
}
fn br(self: *Self, src: usize, block: *ir.Inst.Block, operand: *ir.Inst) !MCValue {
if (operand.ty.hasCodeGenBits()) {
const operand_mcv = try self.resolveInst(operand);
const block_mcv = @bitCast(MCValue, block.codegen.mcv);
if (block_mcv == .none) {
block.codegen.mcv = @bitCast(AnyMCValue, operand_mcv);
} else {
try self.setRegOrMem(src, block.base.ty, block_mcv, operand_mcv);
}
}
return self.brVoid(src, block);
}
fn brVoid(self: *Self, src: usize, block: *ir.Inst.Block) !MCValue {
// Emit a jump with a relocation. It will be patched up after the block ends.
try block.codegen.relocs.ensureCapacity(self.gpa, block.codegen.relocs.items.len + 1);

View File

@ -56,13 +56,20 @@ pub const Inst = struct {
alloc,
arg,
assembly,
bitand,
bit_and,
bitcast,
bitor,
bit_or,
block,
br,
/// Same as `br` except the operand is a list of instructions to be treated as
/// a flat block; that is there is only 1 break instruction from the block, and
/// it is implied to be after the last instruction, and the last instruction is
/// the break operand.
/// This instruction exists for late-stage semantic analysis patch ups, to
/// replace one br operand with multiple instructions, without moving anything else around.
br_block_flat,
breakpoint,
brvoid,
br_void,
call,
cmp_lt,
cmp_lte,
@ -85,8 +92,8 @@ pub const Inst = struct {
is_err,
// *E!T => bool
is_err_ptr,
booland,
boolor,
bool_and,
bool_or,
/// Read a value from a pointer.
load,
loop,
@ -147,10 +154,10 @@ pub const Inst = struct {
.cmp_gt,
.cmp_neq,
.store,
.booland,
.boolor,
.bitand,
.bitor,
.bool_and,
.bool_or,
.bit_and,
.bit_or,
.xor,
=> BinOp,
@ -158,7 +165,8 @@ pub const Inst = struct {
.assembly => Assembly,
.block => Block,
.br => Br,
.brvoid => BrVoid,
.br_block_flat => BrBlockFlat,
.br_void => BrVoid,
.call => Call,
.condbr => CondBr,
.constant => Constant,
@ -251,7 +259,8 @@ pub const Inst = struct {
pub fn breakBlock(base: *Inst) ?*Block {
return switch (base.tag) {
.br => base.castTag(.br).?.block,
.brvoid => base.castTag(.brvoid).?.block,
.br_void => base.castTag(.br_void).?.block,
.br_block_flat => base.castTag(.br_block_flat).?.block,
else => null,
};
}
@ -355,6 +364,27 @@ pub const Inst = struct {
}
};
pub const convertable_br_size = std.math.max(@sizeOf(BrBlockFlat), @sizeOf(Br));
pub const convertable_br_align = std.math.max(@alignOf(BrBlockFlat), @alignOf(Br));
comptime {
assert(@byteOffsetOf(BrBlockFlat, "base") == @byteOffsetOf(Br, "base"));
}
pub const BrBlockFlat = struct {
pub const base_tag = Tag.br_block_flat;
base: Inst,
block: *Block,
body: Body,
pub fn operandCount(self: *const BrBlockFlat) usize {
return 0;
}
pub fn getOperand(self: *const BrBlockFlat, index: usize) ?*Inst {
return null;
}
};
pub const Br = struct {
pub const base_tag = Tag.br;
@ -363,7 +393,7 @@ pub const Inst = struct {
operand: *Inst,
pub fn operandCount(self: *const Br) usize {
return 0;
return 1;
}
pub fn getOperand(self: *const Br, index: usize) ?*Inst {
if (index == 0)
@ -373,7 +403,7 @@ pub const Inst = struct {
};
pub const BrVoid = struct {
pub const base_tag = Tag.brvoid;
pub const base_tag = Tag.br_void;
base: Inst,
block: *Block,

View File

@ -59,7 +59,7 @@ pub const Inst = struct {
/// Inline assembly.
@"asm",
/// Bitwise AND. `&`
bitand,
bit_and,
/// TODO delete this instruction, it has no purpose.
bitcast,
/// An arbitrary typed pointer is pointer-casted to a new Pointer.
@ -71,9 +71,9 @@ pub const Inst = struct {
/// The new result location pointer has an inferred type.
bitcast_result_ptr,
/// Bitwise NOT. `~`
bitnot,
bit_not,
/// Bitwise OR. `|`
bitor,
bit_or,
/// A labeled block of code, which can return a value.
block,
/// A block of code, which can return a value. There are no instructions that break out of
@ -83,17 +83,17 @@ pub const Inst = struct {
block_comptime,
/// Same as `block_flat` but additionally makes the inner instructions execute at comptime.
block_comptime_flat,
/// Boolean AND. See also `bitand`.
booland,
/// Boolean NOT. See also `bitnot`.
boolnot,
/// Boolean OR. See also `bitor`.
boolor,
/// Boolean AND. See also `bit_and`.
bool_and,
/// Boolean NOT. See also `bit_not`.
bool_not,
/// Boolean OR. See also `bit_or`.
bool_or,
/// Return a value from a `Block`.
@"break",
breakpoint,
/// Same as `break` but without an operand; the operand is assumed to be the void value.
breakvoid,
break_void,
/// Function call.
call,
/// `<`
@ -112,16 +112,10 @@ pub const Inst = struct {
/// as type coercion from the new element type to the old element type.
/// LHS is destination element type, RHS is result pointer.
coerce_result_ptr,
/// This instruction does a `coerce_result_ptr` operation on a `Block`'s
/// result location pointer, whose type is inferred by peer type resolution on the
/// `Block`'s corresponding `break` instructions.
coerce_result_block_ptr,
/// Equivalent to `as(ptr_child_type(typeof(ptr)), value)`.
coerce_to_ptr_elem,
/// Emit an error message and fail compilation.
compileerror,
compile_error,
/// Log compile time variables and emit an error message.
compilelog,
compile_log,
/// Conditional branch. Splits control flow based on a boolean condition value.
condbr,
/// Special case, has no textual representation.
@ -135,11 +129,11 @@ pub const Inst = struct {
/// Declares the beginning of a statement. Used for debug info.
dbg_stmt,
/// Represents a pointer to a global decl.
declref,
decl_ref,
/// Represents a pointer to a global decl by string name.
declref_str,
/// Equivalent to a declref followed by deref.
declval,
decl_ref_str,
/// Equivalent to a decl_ref followed by deref.
decl_val,
/// Load the value from a pointer.
deref,
/// Arithmetic division. Asserts no integer overflow.
@ -185,7 +179,7 @@ pub const Inst = struct {
/// can hold the same mathematical value.
intcast,
/// Make an integer type out of signedness and bit count.
inttype,
int_type,
/// Return a boolean false if an optional is null. `x != null`
is_non_null,
/// Return a boolean true if an optional is null. `x == null`
@ -232,7 +226,7 @@ pub const Inst = struct {
/// Sends control flow back to the function's callee. Takes an operand as the return value.
@"return",
/// Same as `return` but there is no operand; the operand is implicitly the void value.
returnvoid,
return_void,
/// Changes the maximum number of backwards branches that compile-time
/// code execution can use before giving up and making a compile error.
set_eval_branch_quota,
@ -270,6 +264,9 @@ pub const Inst = struct {
/// Write a value to a pointer. For loading, see `deref`.
store,
/// Same as `store` but the type of the value being stored will be used to infer
/// the block type. The LHS is the pointer to store to.
store_to_block_ptr,
/// Same as `store` but the type of the value being stored will be used to infer
/// the pointer type.
store_to_inferred_ptr,
/// String Literal. Makes an anonymous Decl and then takes a pointer to it.
@ -286,11 +283,11 @@ pub const Inst = struct {
typeof_peer,
/// Asserts control-flow will not reach this instruction. Not safety checked - the compiler
/// will assume the correctness of this instruction.
unreach_nocheck,
unreachable_unsafe,
/// Asserts control-flow will not reach this instruction. In safety-checked modes,
/// this will generate a call to the panic function unless it can be proven unreachable
/// by the compiler.
@"unreachable",
unreachable_safe,
/// Bitwise XOR. `^`
xor,
/// Create an optional type '?T'
@ -339,12 +336,8 @@ pub const Inst = struct {
enum_literal,
/// Create an enum type.
enum_type,
/// A switch expression.
switchbr,
/// A range in a switch case, `lhs...rhs`.
/// Only checks that `lhs >= rhs` if they are ints, everything else is
/// validated by the .switch instruction.
switch_range,
/// Does nothing; returns a void value.
void_value,
pub fn Type(tag: Tag) type {
return switch (tag) {
@ -352,17 +345,18 @@ pub const Inst = struct {
.alloc_inferred_mut,
.breakpoint,
.dbg_stmt,
.returnvoid,
.return_void,
.ret_ptr,
.ret_type,
.unreach_nocheck,
.@"unreachable",
.unreachable_unsafe,
.unreachable_safe,
.void_value,
=> NoOp,
.alloc,
.alloc_mut,
.boolnot,
.compileerror,
.bool_not,
.compile_error,
.deref,
.@"return",
.is_null,
@ -400,7 +394,7 @@ pub const Inst = struct {
.err_union_code_ptr,
.ensure_err_payload_void,
.anyframe_type,
.bitnot,
.bit_not,
.import,
.set_eval_branch_quota,
.indexable_ptr_len,
@ -411,10 +405,10 @@ pub const Inst = struct {
.array_cat,
.array_mul,
.array_type,
.bitand,
.bitor,
.booland,
.boolor,
.bit_and,
.bit_or,
.bool_and,
.bool_or,
.div,
.mod_rem,
.mul,
@ -422,6 +416,7 @@ pub const Inst = struct {
.shl,
.shr,
.store,
.store_to_block_ptr,
.store_to_inferred_ptr,
.sub,
.subwrap,
@ -440,7 +435,6 @@ pub const Inst = struct {
.error_union_type,
.merge_error_sets,
.slice_start,
.switch_range,
=> BinOp,
.block,
@ -452,19 +446,17 @@ pub const Inst = struct {
.arg => Arg,
.array_type_sentinel => ArrayTypeSentinel,
.@"break" => Break,
.breakvoid => BreakVoid,
.break_void => BreakVoid,
.call => Call,
.coerce_to_ptr_elem => CoerceToPtrElem,
.declref => DeclRef,
.declref_str => DeclRefStr,
.declval => DeclVal,
.coerce_result_block_ptr => CoerceResultBlockPtr,
.compilelog => CompileLog,
.decl_ref => DeclRef,
.decl_ref_str => DeclRefStr,
.decl_val => DeclVal,
.compile_log => CompileLog,
.loop => Loop,
.@"const" => Const,
.str => Str,
.int => Int,
.inttype => IntType,
.int_type => IntType,
.field_ptr, .field_val => Field,
.field_ptr_named, .field_val_named => FieldNamed,
.@"asm" => Asm,
@ -479,7 +471,6 @@ pub const Inst = struct {
.enum_literal => EnumLiteral,
.error_set => ErrorSet,
.slice => Slice,
.switchbr => SwitchBr,
.typeof_peer => TypeOfPeer,
.container_field_named => ContainerFieldNamed,
.container_field_typed => ContainerFieldTyped,
@ -508,18 +499,18 @@ pub const Inst = struct {
.arg,
.as,
.@"asm",
.bitand,
.bit_and,
.bitcast,
.bitcast_ref,
.bitcast_result_ptr,
.bitor,
.bit_or,
.block,
.block_flat,
.block_comptime,
.block_comptime_flat,
.boolnot,
.booland,
.boolor,
.bool_not,
.bool_and,
.bool_or,
.breakpoint,
.call,
.cmp_lt,
@ -529,13 +520,11 @@ pub const Inst = struct {
.cmp_gt,
.cmp_neq,
.coerce_result_ptr,
.coerce_result_block_ptr,
.coerce_to_ptr_elem,
.@"const",
.dbg_stmt,
.declref,
.declref_str,
.declval,
.decl_ref,
.decl_ref_str,
.decl_val,
.deref,
.div,
.elem_ptr,
@ -552,7 +541,7 @@ pub const Inst = struct {
.fntype,
.int,
.intcast,
.inttype,
.int_type,
.is_non_null,
.is_null,
.is_non_null_ptr,
@ -579,6 +568,7 @@ pub const Inst = struct {
.mut_slice_type,
.const_slice_type,
.store,
.store_to_block_ptr,
.store_to_inferred_ptr,
.str,
.sub,
@ -602,31 +592,30 @@ pub const Inst = struct {
.merge_error_sets,
.anyframe_type,
.error_union_type,
.bitnot,
.bit_not,
.error_set,
.slice,
.slice_start,
.import,
.switch_range,
.typeof_peer,
.resolve_inferred_alloc,
.set_eval_branch_quota,
.compilelog,
.compile_log,
.enum_type,
.union_type,
.struct_type,
.void_value,
=> false,
.@"break",
.breakvoid,
.break_void,
.condbr,
.compileerror,
.compile_error,
.@"return",
.returnvoid,
.unreach_nocheck,
.@"unreachable",
.return_void,
.unreachable_unsafe,
.unreachable_safe,
.loop,
.switchbr,
.container_field_named,
.container_field_typed,
.container_field,
@ -717,7 +706,7 @@ pub const Inst = struct {
};
pub const BreakVoid = struct {
pub const base_tag = Tag.breakvoid;
pub const base_tag = Tag.break_void;
base: Inst,
positionals: struct {
@ -739,19 +728,8 @@ pub const Inst = struct {
},
};
pub const CoerceToPtrElem = struct {
pub const base_tag = Tag.coerce_to_ptr_elem;
base: Inst,
positionals: struct {
ptr: *Inst,
value: *Inst,
},
kw_args: struct {},
};
pub const DeclRef = struct {
pub const base_tag = Tag.declref;
pub const base_tag = Tag.decl_ref;
base: Inst,
positionals: struct {
@ -761,7 +739,7 @@ pub const Inst = struct {
};
pub const DeclRefStr = struct {
pub const base_tag = Tag.declref_str;
pub const base_tag = Tag.decl_ref_str;
base: Inst,
positionals: struct {
@ -771,7 +749,7 @@ pub const Inst = struct {
};
pub const DeclVal = struct {
pub const base_tag = Tag.declval;
pub const base_tag = Tag.decl_val;
base: Inst,
positionals: struct {
@ -780,19 +758,8 @@ pub const Inst = struct {
kw_args: struct {},
};
pub const CoerceResultBlockPtr = struct {
pub const base_tag = Tag.coerce_result_block_ptr;
base: Inst,
positionals: struct {
dest_type: *Inst,
block: *Block,
},
kw_args: struct {},
};
pub const CompileLog = struct {
pub const base_tag = Tag.compilelog;
pub const base_tag = Tag.compile_log;
base: Inst,
positionals: struct {
@ -905,7 +872,7 @@ pub const Inst = struct {
};
pub const IntType = struct {
pub const base_tag = Tag.inttype;
pub const base_tag = Tag.int_type;
base: Inst,
positionals: struct {
@ -1114,32 +1081,6 @@ pub const Inst = struct {
},
};
pub const SwitchBr = struct {
pub const base_tag = Tag.switchbr;
base: Inst,
positionals: struct {
target_ptr: *Inst,
/// List of all individual items and ranges
items: []*Inst,
cases: []Case,
else_body: Body,
},
kw_args: struct {
/// Pointer to first range if such exists.
range: ?*Inst = null,
special_prong: enum {
none,
@"else",
underscore,
} = .none,
},
pub const Case = struct {
item: *Inst,
body: Body,
};
};
pub const TypeOfPeer = struct {
pub const base_tag = .typeof_peer;
base: Inst,
@ -1473,7 +1414,7 @@ const Writer = struct {
TypedValue => return stream.print("TypedValue{{ .ty = {}, .val = {}}}", .{ param.ty, param.val }),
*IrModule.Decl => return stream.print("Decl({s})", .{param.name}),
*Inst.Block => {
const name = self.block_table.get(param).?;
const name = self.block_table.get(param) orelse "!BADREF!";
return stream.print("\"{}\"", .{std.zig.fmtEscapes(name)});
},
*Inst.Loop => {
@ -1490,26 +1431,6 @@ const Writer = struct {
}
try stream.writeByte(']');
},
[]Inst.SwitchBr.Case => {
if (param.len == 0) {
return stream.writeAll("{}");
}
try stream.writeAll("{\n");
for (param) |*case, i| {
if (i != 0) {
try stream.writeAll(",\n");
}
try stream.writeByteNTimes(' ', self.indent);
self.indent += 2;
try self.writeParamToStream(stream, &case.item);
try stream.writeAll(" => ");
try self.writeParamToStream(stream, &case.body);
self.indent -= 2;
}
try stream.writeByte('\n');
try stream.writeByteNTimes(' ', self.indent - 2);
try stream.writeByte('}');
},
else => |T| @compileError("unimplemented: rendering parameter of type " ++ @typeName(T)),
}
}
@ -1641,10 +1562,10 @@ const DumpTzir = struct {
.cmp_gt,
.cmp_neq,
.store,
.booland,
.boolor,
.bitand,
.bitor,
.bool_and,
.bool_or,
.bit_and,
.bit_or,
.xor,
=> {
const bin_op = inst.cast(ir.Inst.BinOp).?;
@ -1660,9 +1581,15 @@ const DumpTzir = struct {
try dtz.findConst(br.operand);
},
.brvoid => {
const brvoid = inst.castTag(.brvoid).?;
try dtz.findConst(&brvoid.block.base);
.br_block_flat => {
const br_block_flat = inst.castTag(.br_block_flat).?;
try dtz.findConst(&br_block_flat.block.base);
try dtz.fetchInstsAndResolveConsts(br_block_flat.body);
},
.br_void => {
const br_void = inst.castTag(.br_void).?;
try dtz.findConst(&br_void.block.base);
},
.block => {
@ -1753,10 +1680,10 @@ const DumpTzir = struct {
.cmp_gt,
.cmp_neq,
.store,
.booland,
.boolor,
.bitand,
.bitor,
.bool_and,
.bool_or,
.bit_and,
.bit_or,
.xor,
=> {
const bin_op = inst.cast(ir.Inst.BinOp).?;
@ -1805,9 +1732,27 @@ const DumpTzir = struct {
}
},
.brvoid => {
const brvoid = inst.castTag(.brvoid).?;
const kinky = try dtz.writeInst(writer, &brvoid.block.base);
.br_block_flat => {
const br_block_flat = inst.castTag(.br_block_flat).?;
const block_kinky = try dtz.writeInst(writer, &br_block_flat.block.base);
if (block_kinky != null) {
try writer.writeAll(", { // Instruction does not dominate all uses!\n");
} else {
try writer.writeAll(", {\n");
}
const old_indent = dtz.indent;
dtz.indent += 2;
try dtz.dumpBody(br_block_flat.body, writer);
dtz.indent = old_indent;
try writer.writeByteNTimes(' ', dtz.indent);
try writer.writeAll("})\n");
},
.br_void => {
const br_void = inst.castTag(.br_void).?;
const kinky = try dtz.writeInst(writer, &br_void.block.base);
if (kinky) |_| {
try writer.writeAll(") // Instruction does not dominate all uses!\n");
} else {
@ -1818,7 +1763,7 @@ const DumpTzir = struct {
.block => {
const block = inst.castTag(.block).?;
try writer.writeAll("\n");
try writer.writeAll("{\n");
const old_indent = dtz.indent;
dtz.indent += 2;
@ -1826,7 +1771,7 @@ const DumpTzir = struct {
dtz.indent = old_indent;
try writer.writeByteNTimes(' ', dtz.indent);
try writer.writeAll(")\n");
try writer.writeAll("})\n");
},
.condbr => {
@ -1856,7 +1801,7 @@ const DumpTzir = struct {
.loop => {
const loop = inst.castTag(.loop).?;
try writer.writeAll("\n");
try writer.writeAll("{\n");
const old_indent = dtz.indent;
dtz.indent += 2;
@ -1864,7 +1809,7 @@ const DumpTzir = struct {
dtz.indent = old_indent;
try writer.writeByteNTimes(' ', dtz.indent);
try writer.writeAll(")\n");
try writer.writeAll("})\n");
},
.call => {

File diff suppressed because it is too large Load Diff

View File

@ -962,43 +962,6 @@ pub fn addCases(ctx: *TestContext) !void {
,
"hello\nhello\nhello\nhello\nhello\n",
);
// comptime switch
// Basic for loop
case.addCompareOutput(
\\pub export fn _start() noreturn {
\\ assert(foo() == 1);
\\ exit();
\\}
\\
\\fn foo() u32 {
\\ const a: comptime_int = 1;
\\ var b: u32 = 0;
\\ switch (a) {
\\ 1 => b = 1,
\\ 2 => b = 2,
\\ else => unreachable,
\\ }
\\ return b;
\\}
\\
\\pub fn assert(ok: bool) void {
\\ if (!ok) unreachable; // assertion failure
\\}
\\
\\fn exit() noreturn {
\\ asm volatile ("syscall"
\\ :
\\ : [number] "{rax}" (231),
\\ [arg1] "{rdi}" (0)
\\ : "rcx", "r11", "memory"
\\ );
\\ unreachable;
\\}
,
"",
);
}
{