mirror of
https://github.com/ziglang/zig.git
synced 2026-02-13 21:08:36 +00:00
commit
5fed721290
@ -2570,7 +2570,18 @@ pub fn analyzeIsNull(
|
||||
operand: *Inst,
|
||||
invert_logic: bool,
|
||||
) InnerError!*Inst {
|
||||
return self.fail(scope, src, "TODO implement analysis of isnull and isnotnull", .{});
|
||||
if (operand.value()) |opt_val| {
|
||||
const is_null = opt_val.isNull();
|
||||
const bool_value = if (invert_logic) !is_null else is_null;
|
||||
return self.constBool(scope, src, bool_value);
|
||||
}
|
||||
const b = try self.requireRuntimeBlock(scope, src);
|
||||
const inst_tag: Inst.Tag = if (invert_logic) .isnonnull else .isnull;
|
||||
return self.addUnOp(b, src, Type.initTag(.bool), inst_tag, operand);
|
||||
}
|
||||
|
||||
pub fn analyzeIsErr(self: *Module, scope: *Scope, src: usize, operand: *Inst) InnerError!*Inst {
|
||||
return self.fail(scope, src, "TODO implement analysis of iserr", .{});
|
||||
}
|
||||
|
||||
/// Asserts that lhs and rhs types are both numeric.
|
||||
|
||||
@ -273,22 +273,22 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
|
||||
.AnyFrameType => return rlWrap(mod, scope, rl, try anyFrameType(mod, scope, node.castTag(.AnyFrameType).?)),
|
||||
.ErrorSetDecl => return errorSetDecl(mod, scope, rl, node.castTag(.ErrorSetDecl).?),
|
||||
.ErrorType => return rlWrap(mod, scope, rl, try errorType(mod, scope, node.castTag(.ErrorType).?)),
|
||||
.For => return forExpr(mod, scope, rl, node.castTag(.For).?),
|
||||
.ArrayAccess => return arrayAccess(mod, scope, rl, node.castTag(.ArrayAccess).?),
|
||||
.Catch => return catchExpr(mod, scope, rl, node.castTag(.Catch).?),
|
||||
|
||||
.Defer => return mod.failNode(scope, node, "TODO implement astgen.expr for .Defer", .{}),
|
||||
.Catch => return mod.failNode(scope, node, "TODO implement astgen.expr for .Catch", .{}),
|
||||
.Range => return mod.failNode(scope, node, "TODO implement astgen.expr for .Range", .{}),
|
||||
.OrElse => return mod.failNode(scope, node, "TODO implement astgen.expr for .OrElse", .{}),
|
||||
.Await => return mod.failNode(scope, node, "TODO implement astgen.expr for .Await", .{}),
|
||||
.Resume => return mod.failNode(scope, node, "TODO implement astgen.expr for .Resume", .{}),
|
||||
.Try => return mod.failNode(scope, node, "TODO implement astgen.expr for .Try", .{}),
|
||||
.Slice => return mod.failNode(scope, node, "TODO implement astgen.expr for .Slice", .{}),
|
||||
.ArrayAccess => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayAccess", .{}),
|
||||
.ArrayInitializer => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayInitializer", .{}),
|
||||
.ArrayInitializerDot => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayInitializerDot", .{}),
|
||||
.StructInitializer => return mod.failNode(scope, node, "TODO implement astgen.expr for .StructInitializer", .{}),
|
||||
.StructInitializerDot => return mod.failNode(scope, node, "TODO implement astgen.expr for .StructInitializerDot", .{}),
|
||||
.Switch => return mod.failNode(scope, node, "TODO implement astgen.expr for .Switch", .{}),
|
||||
.For => return mod.failNode(scope, node, "TODO implement astgen.expr for .For", .{}),
|
||||
.Suspend => return mod.failNode(scope, node, "TODO implement astgen.expr for .Suspend", .{}),
|
||||
.Continue => return mod.failNode(scope, node, "TODO implement astgen.expr for .Continue", .{}),
|
||||
.AnyType => return mod.failNode(scope, node, "TODO implement astgen.expr for .AnyType", .{}),
|
||||
@ -497,7 +497,7 @@ fn varDecl(
|
||||
const var_data: struct { result_loc: ResultLoc, alloc: *zir.Inst } = if (node.getTrailer("type_node")) |type_node| a: {
|
||||
const type_inst = try typeExpr(mod, scope, type_node);
|
||||
const alloc = try addZIRUnOp(mod, scope, name_src, .alloc, type_inst);
|
||||
break :a .{ .alloc = try addZIRUnOp(mod, scope, name_src, .alloc, type_inst), .result_loc = .{ .ptr = alloc } };
|
||||
break :a .{ .alloc = alloc, .result_loc = .{ .ptr = alloc } };
|
||||
} else a: {
|
||||
const alloc = try addZIRNoOp(mod, scope, name_src, .alloc_inferred);
|
||||
break :a .{ .alloc = alloc, .result_loc = .{ .inferred_ptr = alloc.castTag(.alloc_inferred).? } };
|
||||
@ -624,7 +624,7 @@ fn ptrSliceType(mod: *Module, scope: *Scope, src: usize, ptr_info: *ast.PtrInfo,
|
||||
.One => if (mutable) T.single_mut_ptr_type else T.single_const_ptr_type,
|
||||
.Many => if (mutable) T.many_mut_ptr_type else T.many_const_ptr_type,
|
||||
.C => if (mutable) T.c_mut_ptr_type else T.c_const_ptr_type,
|
||||
.Slice => if (mutable) T.mut_slice_type else T.mut_slice_type,
|
||||
.Slice => if (mutable) T.mut_slice_type else T.const_slice_type,
|
||||
}, child_type);
|
||||
}
|
||||
|
||||
@ -750,6 +750,93 @@ fn errorType(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*
|
||||
});
|
||||
}
|
||||
|
||||
fn catchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Catch) InnerError!*zir.Inst {
|
||||
const tree = scope.tree();
|
||||
const src = tree.token_locs[node.op_token].start;
|
||||
|
||||
const err_union_ptr = try expr(mod, scope, .ref, node.lhs);
|
||||
// TODO we could avoid an unnecessary copy if .iserr took a pointer
|
||||
const err_union = try addZIRUnOp(mod, scope, src, .deref, err_union_ptr);
|
||||
const cond = try addZIRUnOp(mod, scope, src, .iserr, err_union);
|
||||
|
||||
var block_scope: Scope.GenZIR = .{
|
||||
.parent = scope,
|
||||
.decl = scope.decl().?,
|
||||
.arena = scope.arena(),
|
||||
.instructions = .{},
|
||||
};
|
||||
defer block_scope.instructions.deinit(mod.gpa);
|
||||
|
||||
const condbr = try addZIRInstSpecial(mod, &block_scope.base, src, zir.Inst.CondBr, .{
|
||||
.condition = cond,
|
||||
.then_body = undefined, // populated below
|
||||
.else_body = undefined, // populated below
|
||||
}, .{});
|
||||
|
||||
const block = try addZIRInstBlock(mod, scope, src, .{
|
||||
.instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
|
||||
});
|
||||
|
||||
// Most result location types can be forwarded directly; however
|
||||
// if we need to write to a pointer which has an inferred type,
|
||||
// proper type inference requires peer type resolution on the if's
|
||||
// branches.
|
||||
const branch_rl: ResultLoc = switch (rl) {
|
||||
.discard, .none, .ty, .ptr, .ref => rl,
|
||||
.inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = block },
|
||||
};
|
||||
|
||||
var err_scope: Scope.GenZIR = .{
|
||||
.parent = scope,
|
||||
.decl = block_scope.decl,
|
||||
.arena = block_scope.arena,
|
||||
.instructions = .{},
|
||||
};
|
||||
defer err_scope.instructions.deinit(mod.gpa);
|
||||
|
||||
var err_val_scope: Scope.LocalVal = undefined;
|
||||
const err_sub_scope = blk: {
|
||||
const payload = node.payload orelse
|
||||
break :blk &err_scope.base;
|
||||
|
||||
const err_name = tree.tokenSlice(payload.castTag(.Payload).?.error_symbol.firstToken());
|
||||
if (mem.eql(u8, err_name, "_"))
|
||||
break :blk &err_scope.base;
|
||||
|
||||
const unwrapped_err_ptr = try addZIRUnOp(mod, &err_scope.base, src, .unwrap_err_code, err_union_ptr);
|
||||
err_val_scope = .{
|
||||
.parent = &err_scope.base,
|
||||
.gen_zir = &err_scope,
|
||||
.name = err_name,
|
||||
.inst = try addZIRUnOp(mod, &err_scope.base, src, .deref, unwrapped_err_ptr),
|
||||
};
|
||||
break :blk &err_val_scope.base;
|
||||
};
|
||||
|
||||
_ = try addZIRInst(mod, &err_scope.base, src, zir.Inst.Break, .{
|
||||
.block = block,
|
||||
.operand = try expr(mod, err_sub_scope, branch_rl, node.rhs),
|
||||
}, .{});
|
||||
|
||||
var not_err_scope: Scope.GenZIR = .{
|
||||
.parent = scope,
|
||||
.decl = block_scope.decl,
|
||||
.arena = block_scope.arena,
|
||||
.instructions = .{},
|
||||
};
|
||||
defer not_err_scope.instructions.deinit(mod.gpa);
|
||||
|
||||
const unwrapped_payload = try addZIRUnOp(mod, ¬_err_scope.base, src, .unwrap_err_unsafe, err_union_ptr);
|
||||
_ = try addZIRInst(mod, ¬_err_scope.base, src, zir.Inst.Break, .{
|
||||
.block = block,
|
||||
.operand = unwrapped_payload,
|
||||
}, .{});
|
||||
|
||||
condbr.positionals.then_body = .{ .instructions = try err_scope.arena.dupe(*zir.Inst, err_scope.instructions.items) };
|
||||
condbr.positionals.else_body = .{ .instructions = try not_err_scope.arena.dupe(*zir.Inst, not_err_scope.instructions.items) };
|
||||
return rlWrap(mod, scope, rl, &block.base);
|
||||
}
|
||||
|
||||
/// Return whether the identifier names of two tokens are equal. Resolves @"" tokens without allocating.
|
||||
/// OK in theory it could do it without allocating. This implementation allocates when the @"" form is used.
|
||||
fn tokenIdentEql(mod: *Module, scope: *Scope, token1: ast.TokenIndex, token2: ast.TokenIndex) !bool {
|
||||
@ -794,9 +881,17 @@ fn field(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.SimpleInfix
|
||||
const lhs = try expr(mod, scope, .ref, node.lhs);
|
||||
const field_name = try identifierStringInst(mod, scope, node.rhs.castTag(.Identifier).?);
|
||||
|
||||
const pointer = try addZIRInst(mod, scope, src, zir.Inst.FieldPtr, .{ .object_ptr = lhs, .field_name = field_name }, .{});
|
||||
if (rl == .ref) return pointer;
|
||||
return rlWrap(mod, scope, rl, try addZIRUnOp(mod, scope, src, .deref, pointer));
|
||||
return rlWrapPtr(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.FieldPtr, .{ .object_ptr = lhs, .field_name = field_name }, .{}));
|
||||
}
|
||||
|
||||
fn arrayAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.ArrayAccess) InnerError!*zir.Inst {
|
||||
const tree = scope.tree();
|
||||
const src = tree.token_locs[node.rtoken].start;
|
||||
|
||||
const array_ptr = try expr(mod, scope, .ref, node.lhs);
|
||||
const index = try expr(mod, scope, .none, node.index_expr);
|
||||
|
||||
return rlWrapPtr(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.ElemPtr, .{ .array_ptr = array_ptr, .index = index }, .{}));
|
||||
}
|
||||
|
||||
fn deref(mod: *Module, scope: *Scope, node: *ast.Node.SimpleSuffixOp) InnerError!*zir.Inst {
|
||||
@ -1080,6 +1175,12 @@ fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.W
|
||||
}
|
||||
}
|
||||
|
||||
if (while_node.label) |tok|
|
||||
return mod.failTok(scope, tok, "TODO labeled while", .{});
|
||||
|
||||
if (while_node.inline_token) |tok|
|
||||
return mod.failTok(scope, tok, "TODO inline while", .{});
|
||||
|
||||
var expr_scope: Scope.GenZIR = .{
|
||||
.parent = scope,
|
||||
.decl = scope.decl().?,
|
||||
@ -1198,6 +1299,181 @@ fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.W
|
||||
return &while_block.base;
|
||||
}
|
||||
|
||||
fn forExpr(mod: *Module, scope: *Scope, rl: ResultLoc, for_node: *ast.Node.For) InnerError!*zir.Inst {
|
||||
if (for_node.label) |tok|
|
||||
return mod.failTok(scope, tok, "TODO labeled for", .{});
|
||||
|
||||
if (for_node.inline_token) |tok|
|
||||
return mod.failTok(scope, tok, "TODO inline for", .{});
|
||||
|
||||
var for_scope: Scope.GenZIR = .{
|
||||
.parent = scope,
|
||||
.decl = scope.decl().?,
|
||||
.arena = scope.arena(),
|
||||
.instructions = .{},
|
||||
};
|
||||
defer for_scope.instructions.deinit(mod.gpa);
|
||||
|
||||
// setup variables and constants
|
||||
const tree = scope.tree();
|
||||
const for_src = tree.token_locs[for_node.for_token].start;
|
||||
const index_ptr = blk: {
|
||||
const usize_type = try addZIRInstConst(mod, &for_scope.base, for_src, .{
|
||||
.ty = Type.initTag(.type),
|
||||
.val = Value.initTag(.usize_type),
|
||||
});
|
||||
const index_ptr = try addZIRUnOp(mod, &for_scope.base, for_src, .alloc, usize_type);
|
||||
// initialize to zero
|
||||
const zero = try addZIRInstConst(mod, &for_scope.base, for_src, .{
|
||||
.ty = Type.initTag(.usize),
|
||||
.val = Value.initTag(.zero),
|
||||
});
|
||||
_ = try addZIRBinOp(mod, &for_scope.base, for_src, .store, index_ptr, zero);
|
||||
break :blk index_ptr;
|
||||
};
|
||||
const array_ptr = try expr(mod, &for_scope.base, .ref, for_node.array_expr);
|
||||
_ = try addZIRUnOp(mod, &for_scope.base, for_node.array_expr.firstToken(), .ensure_indexable, array_ptr);
|
||||
const cond_src = tree.token_locs[for_node.array_expr.firstToken()].start;
|
||||
const len_ptr = try addZIRInst(mod, &for_scope.base, cond_src, zir.Inst.FieldPtr, .{
|
||||
.object_ptr = array_ptr,
|
||||
.field_name = try addZIRInst(mod, &for_scope.base, cond_src, zir.Inst.Str, .{ .bytes = "len" }, .{}),
|
||||
}, .{});
|
||||
|
||||
var loop_scope: Scope.GenZIR = .{
|
||||
.parent = &for_scope.base,
|
||||
.decl = for_scope.decl,
|
||||
.arena = for_scope.arena,
|
||||
.instructions = .{},
|
||||
};
|
||||
defer loop_scope.instructions.deinit(mod.gpa);
|
||||
|
||||
var cond_scope: Scope.GenZIR = .{
|
||||
.parent = &loop_scope.base,
|
||||
.decl = loop_scope.decl,
|
||||
.arena = loop_scope.arena,
|
||||
.instructions = .{},
|
||||
};
|
||||
defer cond_scope.instructions.deinit(mod.gpa);
|
||||
|
||||
// check condition i < array_expr.len
|
||||
const index = try addZIRUnOp(mod, &cond_scope.base, cond_src, .deref, index_ptr);
|
||||
const len = try addZIRUnOp(mod, &cond_scope.base, cond_src, .deref, len_ptr);
|
||||
const cond = try addZIRBinOp(mod, &cond_scope.base, cond_src, .cmp_lt, index, len);
|
||||
|
||||
const condbr = try addZIRInstSpecial(mod, &cond_scope.base, for_src, zir.Inst.CondBr, .{
|
||||
.condition = cond,
|
||||
.then_body = undefined, // populated below
|
||||
.else_body = undefined, // populated below
|
||||
}, .{});
|
||||
const cond_block = try addZIRInstBlock(mod, &loop_scope.base, for_src, .{
|
||||
.instructions = try loop_scope.arena.dupe(*zir.Inst, cond_scope.instructions.items),
|
||||
});
|
||||
|
||||
// increment index variable
|
||||
const one = try addZIRInstConst(mod, &loop_scope.base, for_src, .{
|
||||
.ty = Type.initTag(.usize),
|
||||
.val = Value.initTag(.one),
|
||||
});
|
||||
const index_2 = try addZIRUnOp(mod, &loop_scope.base, cond_src, .deref, index_ptr);
|
||||
const index_plus_one = try addZIRBinOp(mod, &loop_scope.base, for_src, .add, index_2, one);
|
||||
_ = try addZIRBinOp(mod, &loop_scope.base, for_src, .store, index_ptr, index_plus_one);
|
||||
|
||||
// looping stuff
|
||||
const loop = try addZIRInstLoop(mod, &for_scope.base, for_src, .{
|
||||
.instructions = try for_scope.arena.dupe(*zir.Inst, loop_scope.instructions.items),
|
||||
});
|
||||
const for_block = try addZIRInstBlock(mod, scope, for_src, .{
|
||||
.instructions = try for_scope.arena.dupe(*zir.Inst, for_scope.instructions.items),
|
||||
});
|
||||
|
||||
// while body
|
||||
const then_src = tree.token_locs[for_node.body.lastToken()].start;
|
||||
var then_scope: Scope.GenZIR = .{
|
||||
.parent = &cond_scope.base,
|
||||
.decl = cond_scope.decl,
|
||||
.arena = cond_scope.arena,
|
||||
.instructions = .{},
|
||||
};
|
||||
defer then_scope.instructions.deinit(mod.gpa);
|
||||
|
||||
// Most result location types can be forwarded directly; however
|
||||
// if we need to write to a pointer which has an inferred type,
|
||||
// proper type inference requires peer type resolution on the while's
|
||||
// branches.
|
||||
const branch_rl: ResultLoc = switch (rl) {
|
||||
.discard, .none, .ty, .ptr, .ref => rl,
|
||||
.inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = for_block },
|
||||
};
|
||||
|
||||
var index_scope: Scope.LocalPtr = undefined;
|
||||
const then_sub_scope = blk: {
|
||||
const payload = for_node.payload.castTag(.PointerIndexPayload).?;
|
||||
const is_ptr = payload.ptr_token != null;
|
||||
const value_name = tree.tokenSlice(payload.value_symbol.firstToken());
|
||||
if (!mem.eql(u8, value_name, "_")) {
|
||||
return mod.failNode(&then_scope.base, payload.value_symbol, "TODO implement for value payload", .{});
|
||||
} else if (is_ptr) {
|
||||
return mod.failTok(&then_scope.base, payload.ptr_token.?, "pointer modifier invalid on discard", .{});
|
||||
}
|
||||
|
||||
const index_symbol_node = payload.index_symbol orelse
|
||||
break :blk &then_scope.base;
|
||||
|
||||
const index_name = tree.tokenSlice(index_symbol_node.firstToken());
|
||||
if (mem.eql(u8, index_name, "_")) {
|
||||
break :blk &then_scope.base;
|
||||
}
|
||||
// TODO make this const without an extra copy?
|
||||
index_scope = .{
|
||||
.parent = &then_scope.base,
|
||||
.gen_zir = &then_scope,
|
||||
.name = index_name,
|
||||
.ptr = index_ptr,
|
||||
};
|
||||
break :blk &index_scope.base;
|
||||
};
|
||||
|
||||
const then_result = try expr(mod, then_sub_scope, branch_rl, for_node.body);
|
||||
if (!then_result.tag.isNoReturn()) {
|
||||
_ = try addZIRInst(mod, then_sub_scope, then_src, zir.Inst.Break, .{
|
||||
.block = cond_block,
|
||||
.operand = then_result,
|
||||
}, .{});
|
||||
}
|
||||
condbr.positionals.then_body = .{
|
||||
.instructions = try then_scope.arena.dupe(*zir.Inst, then_scope.instructions.items),
|
||||
};
|
||||
|
||||
// else branch
|
||||
var else_scope: Scope.GenZIR = .{
|
||||
.parent = &cond_scope.base,
|
||||
.decl = cond_scope.decl,
|
||||
.arena = cond_scope.arena,
|
||||
.instructions = .{},
|
||||
};
|
||||
defer else_scope.instructions.deinit(mod.gpa);
|
||||
|
||||
if (for_node.@"else") |else_node| {
|
||||
const else_src = tree.token_locs[else_node.body.lastToken()].start;
|
||||
const else_result = try expr(mod, &else_scope.base, branch_rl, else_node.body);
|
||||
if (!else_result.tag.isNoReturn()) {
|
||||
_ = try addZIRInst(mod, &else_scope.base, else_src, zir.Inst.Break, .{
|
||||
.block = for_block,
|
||||
.operand = else_result,
|
||||
}, .{});
|
||||
}
|
||||
} else {
|
||||
const else_src = tree.token_locs[for_node.lastToken()].start;
|
||||
_ = try addZIRInst(mod, &else_scope.base, else_src, zir.Inst.BreakVoid, .{
|
||||
.block = for_block,
|
||||
}, .{});
|
||||
}
|
||||
condbr.positionals.else_body = .{
|
||||
.instructions = try else_scope.arena.dupe(*zir.Inst, else_scope.instructions.items),
|
||||
};
|
||||
return &for_block.base;
|
||||
}
|
||||
|
||||
fn ret(mod: *Module, scope: *Scope, cfe: *ast.Node.ControlFlowExpression) InnerError!*zir.Inst {
|
||||
const tree = scope.tree();
|
||||
const src = tree.token_locs[cfe.ltoken].start;
|
||||
|
||||
@ -829,6 +829,16 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
// No side effects, so if it's unreferenced, do nothing.
|
||||
if (inst.base.isUnused())
|
||||
return MCValue.dead;
|
||||
|
||||
const operand = try self.resolveInst(inst.operand);
|
||||
const info_a = inst.operand.ty.intInfo(self.target.*);
|
||||
const info_b = inst.base.ty.intInfo(self.target.*);
|
||||
if (info_a.signed != info_b.signed)
|
||||
return self.fail(inst.base.src, "TODO gen intcast sign safety in semantic analysis", .{});
|
||||
|
||||
if (info_a.bits == info_b.bits)
|
||||
return operand;
|
||||
|
||||
switch (arch) {
|
||||
else => return self.fail(inst.base.src, "TODO implement intCast for {}", .{self.target.cpu.arch}),
|
||||
}
|
||||
@ -2039,15 +2049,29 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
mem.writeIntLittle(u32, self.code.addManyAsArrayAssumeCapacity(4), x);
|
||||
},
|
||||
8 => {
|
||||
return self.fail(src, "TODO implement set abi_size=8 stack variable with immediate", .{});
|
||||
// We have a positive stack offset value but we want a twos complement negative
|
||||
// offset from rbp, which is at the top of the stack frame.
|
||||
const negative_offset = @intCast(i8, -@intCast(i32, adj_off));
|
||||
const twos_comp = @bitCast(u8, negative_offset);
|
||||
|
||||
// 64 bit write to memory would take two mov's anyways so we
|
||||
// insted just use two 32 bit writes to avoid register allocation
|
||||
try self.code.ensureCapacity(self.code.items.len + 14);
|
||||
var buf: [8]u8 = undefined;
|
||||
mem.writeIntLittle(u64, &buf, x_big);
|
||||
|
||||
// mov DWORD PTR [rbp+offset+4], immediate
|
||||
self.code.appendSliceAssumeCapacity(&[_]u8{ 0xc7, 0x45, twos_comp + 4});
|
||||
self.code.appendSliceAssumeCapacity(buf[4..8]);
|
||||
|
||||
// mov DWORD PTR [rbp+offset], immediate
|
||||
self.code.appendSliceAssumeCapacity(&[_]u8{ 0xc7, 0x45, twos_comp });
|
||||
self.code.appendSliceAssumeCapacity(buf[0..4]);
|
||||
},
|
||||
else => {
|
||||
return self.fail(src, "TODO implement set abi_size=large stack variable with immediate", .{});
|
||||
},
|
||||
}
|
||||
if (x_big <= math.maxInt(u32)) {} else {
|
||||
return self.fail(src, "TODO implement set stack variable with large immediate", .{});
|
||||
}
|
||||
},
|
||||
.embedded_in_code => |code_offset| {
|
||||
return self.fail(src, "TODO implement set stack variable from embedded_in_code", .{});
|
||||
|
||||
@ -2675,6 +2675,13 @@ pub const Type = extern union {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isIndexable(self: Type) bool {
|
||||
const zig_tag = self.zigTypeTag();
|
||||
// TODO tuples are indexable
|
||||
return zig_tag == .Array or zig_tag == .Vector or self.isSlice() or
|
||||
(self.isSinglePointer() and self.elemType().zigTypeTag() == .Array);
|
||||
}
|
||||
|
||||
/// This enum does not directly correspond to `std.builtin.TypeId` because
|
||||
/// it has extra enum tags in it, as a way of using less memory. For example,
|
||||
/// even though Zig recognizes `*align(10) i32` and `*i32` both as Pointer types
|
||||
|
||||
@ -65,6 +65,7 @@ pub const Value = extern union {
|
||||
|
||||
undef,
|
||||
zero,
|
||||
one,
|
||||
void_value,
|
||||
unreachable_value,
|
||||
empty_array,
|
||||
@ -174,6 +175,7 @@ pub const Value = extern union {
|
||||
.anyframe_type,
|
||||
.undef,
|
||||
.zero,
|
||||
.one,
|
||||
.void_value,
|
||||
.unreachable_value,
|
||||
.empty_array,
|
||||
@ -313,6 +315,7 @@ pub const Value = extern union {
|
||||
.null_value => return out_stream.writeAll("null"),
|
||||
.undef => return out_stream.writeAll("undefined"),
|
||||
.zero => return out_stream.writeAll("0"),
|
||||
.one => return out_stream.writeAll("1"),
|
||||
.void_value => return out_stream.writeAll("{}"),
|
||||
.unreachable_value => return out_stream.writeAll("unreachable"),
|
||||
.bool_true => return out_stream.writeAll("true"),
|
||||
@ -447,6 +450,7 @@ pub const Value = extern union {
|
||||
|
||||
.undef,
|
||||
.zero,
|
||||
.one,
|
||||
.void_value,
|
||||
.unreachable_value,
|
||||
.empty_array,
|
||||
@ -546,7 +550,9 @@ pub const Value = extern union {
|
||||
.bool_false,
|
||||
=> return BigIntMutable.init(&space.limbs, 0).toConst(),
|
||||
|
||||
.bool_true => return BigIntMutable.init(&space.limbs, 1).toConst(),
|
||||
.one,
|
||||
.bool_true,
|
||||
=> return BigIntMutable.init(&space.limbs, 1).toConst(),
|
||||
|
||||
.int_u64 => return BigIntMutable.init(&space.limbs, self.cast(Payload.Int_u64).?.int).toConst(),
|
||||
.int_i64 => return BigIntMutable.init(&space.limbs, self.cast(Payload.Int_i64).?.int).toConst(),
|
||||
@ -627,7 +633,9 @@ pub const Value = extern union {
|
||||
.bool_false,
|
||||
=> return 0,
|
||||
|
||||
.bool_true => return 1,
|
||||
.one,
|
||||
.bool_true,
|
||||
=> return 1,
|
||||
|
||||
.int_u64 => return self.cast(Payload.Int_u64).?.int,
|
||||
.int_i64 => return @intCast(u64, self.cast(Payload.Int_i64).?.int),
|
||||
@ -708,7 +716,9 @@ pub const Value = extern union {
|
||||
.bool_false,
|
||||
=> return 0,
|
||||
|
||||
.bool_true => return 1,
|
||||
.one,
|
||||
.bool_true,
|
||||
=> return 1,
|
||||
|
||||
.int_u64 => return @intCast(i64, self.cast(Payload.Int_u64).?.int),
|
||||
.int_i64 => return self.cast(Payload.Int_i64).?.int,
|
||||
@ -734,6 +744,7 @@ pub const Value = extern union {
|
||||
.float_128 => @floatCast(T, self.cast(Payload.Float_128).?.val),
|
||||
|
||||
.zero => 0,
|
||||
.one => 1,
|
||||
.int_u64 => @intToFloat(T, self.cast(Payload.Int_u64).?.int),
|
||||
.int_i64 => @intToFloat(T, self.cast(Payload.Int_i64).?.int),
|
||||
|
||||
@ -814,7 +825,9 @@ pub const Value = extern union {
|
||||
.bool_false,
|
||||
=> return 0,
|
||||
|
||||
.bool_true => return 1,
|
||||
.one,
|
||||
.bool_true,
|
||||
=> return 1,
|
||||
|
||||
.int_u64 => {
|
||||
const x = self.cast(Payload.Int_u64).?.int;
|
||||
@ -900,7 +913,9 @@ pub const Value = extern union {
|
||||
.bool_false,
|
||||
=> return true,
|
||||
|
||||
.bool_true => {
|
||||
.one,
|
||||
.bool_true,
|
||||
=> {
|
||||
const info = ty.intInfo(target);
|
||||
if (info.signed) {
|
||||
return info.bits >= 2;
|
||||
@ -1064,7 +1079,9 @@ pub const Value = extern union {
|
||||
.@"error",
|
||||
=> unreachable,
|
||||
|
||||
.zero => false,
|
||||
.zero,
|
||||
.one,
|
||||
=> false,
|
||||
|
||||
.float_16 => @rem(self.cast(Payload.Float_16).?.val, 1) != 0,
|
||||
.float_32 => @rem(self.cast(Payload.Float_32).?.val, 1) != 0,
|
||||
@ -1140,7 +1157,9 @@ pub const Value = extern union {
|
||||
.bool_false,
|
||||
=> .eq,
|
||||
|
||||
.bool_true => .gt,
|
||||
.one,
|
||||
.bool_true,
|
||||
=> .gt,
|
||||
|
||||
.int_u64 => std.math.order(lhs.cast(Payload.Int_u64).?.int, 0),
|
||||
.int_i64 => std.math.order(lhs.cast(Payload.Int_i64).?.int, 0),
|
||||
@ -1257,6 +1276,7 @@ pub const Value = extern union {
|
||||
.enum_literal_type,
|
||||
.anyframe_type,
|
||||
.zero,
|
||||
.one,
|
||||
.bool_true,
|
||||
.bool_false,
|
||||
.null_value,
|
||||
@ -1339,6 +1359,7 @@ pub const Value = extern union {
|
||||
.enum_literal_type,
|
||||
.anyframe_type,
|
||||
.zero,
|
||||
.one,
|
||||
.bool_true,
|
||||
.bool_false,
|
||||
.null_value,
|
||||
@ -1438,6 +1459,7 @@ pub const Value = extern union {
|
||||
.enum_literal_type,
|
||||
.anyframe_type,
|
||||
.zero,
|
||||
.one,
|
||||
.empty_array,
|
||||
.bool_true,
|
||||
.bool_false,
|
||||
|
||||
@ -137,6 +137,8 @@ pub const Inst = struct {
|
||||
ensure_result_used,
|
||||
/// Emits a compile error if an error is ignored.
|
||||
ensure_result_non_error,
|
||||
/// Emits a compile error if operand cannot be indexed.
|
||||
ensure_indexable,
|
||||
/// Create a `E!T` type.
|
||||
error_union_type,
|
||||
/// Create an error set.
|
||||
@ -251,6 +253,8 @@ pub const Inst = struct {
|
||||
unwrap_err_safe,
|
||||
/// Same as previous, but without safety checks. Used for orelse, if and while
|
||||
unwrap_err_unsafe,
|
||||
/// Gets the error code value of an error union
|
||||
unwrap_err_code,
|
||||
/// Takes a *E!T and raises a compiler error if T != void
|
||||
ensure_err_payload_void,
|
||||
/// Enum literal
|
||||
@ -278,6 +282,7 @@ pub const Inst = struct {
|
||||
.alloc,
|
||||
.ensure_result_used,
|
||||
.ensure_result_non_error,
|
||||
.ensure_indexable,
|
||||
.bitcast_result_ptr,
|
||||
.ref,
|
||||
.bitcast_ref,
|
||||
@ -295,6 +300,7 @@ pub const Inst = struct {
|
||||
.unwrap_optional_unsafe,
|
||||
.unwrap_err_safe,
|
||||
.unwrap_err_unsafe,
|
||||
.unwrap_err_code,
|
||||
.ensure_err_payload_void,
|
||||
.anyframe_type,
|
||||
.bitnot,
|
||||
@ -409,6 +415,7 @@ pub const Inst = struct {
|
||||
.elemptr,
|
||||
.ensure_result_used,
|
||||
.ensure_result_non_error,
|
||||
.ensure_indexable,
|
||||
.@"export",
|
||||
.floatcast,
|
||||
.fieldptr,
|
||||
@ -450,6 +457,7 @@ pub const Inst = struct {
|
||||
.unwrap_optional_unsafe,
|
||||
.unwrap_err_safe,
|
||||
.unwrap_err_unsafe,
|
||||
.unwrap_err_code,
|
||||
.ptr_type,
|
||||
.ensure_err_payload_void,
|
||||
.enum_literal,
|
||||
|
||||
@ -48,6 +48,7 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!
|
||||
.declval_in_module => return analyzeInstDeclValInModule(mod, scope, old_inst.castTag(.declval_in_module).?),
|
||||
.ensure_result_used => return analyzeInstEnsureResultUsed(mod, scope, old_inst.castTag(.ensure_result_used).?),
|
||||
.ensure_result_non_error => return analyzeInstEnsureResultNonError(mod, scope, old_inst.castTag(.ensure_result_non_error).?),
|
||||
.ensure_indexable => return analyzeInstEnsureIndexable(mod, scope, old_inst.castTag(.ensure_indexable).?),
|
||||
.ref => return analyzeInstRef(mod, scope, old_inst.castTag(.ref).?),
|
||||
.ret_ptr => return analyzeInstRetPtr(mod, scope, old_inst.castTag(.ret_ptr).?),
|
||||
.ret_type => return analyzeInstRetType(mod, scope, old_inst.castTag(.ret_type).?),
|
||||
@ -111,7 +112,7 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!
|
||||
.condbr => return analyzeInstCondBr(mod, scope, old_inst.castTag(.condbr).?),
|
||||
.isnull => return analyzeInstIsNonNull(mod, scope, old_inst.castTag(.isnull).?, true),
|
||||
.isnonnull => return analyzeInstIsNonNull(mod, scope, old_inst.castTag(.isnonnull).?, false),
|
||||
.iserr => return analyzeInstIsErr(mod, scope, old_inst.castTag(.iserr).?, true),
|
||||
.iserr => return analyzeInstIsErr(mod, scope, old_inst.castTag(.iserr).?),
|
||||
.boolnot => return analyzeInstBoolNot(mod, scope, old_inst.castTag(.boolnot).?),
|
||||
.typeof => return analyzeInstTypeOf(mod, scope, old_inst.castTag(.typeof).?),
|
||||
.optional_type => return analyzeInstOptionalType(mod, scope, old_inst.castTag(.optional_type).?),
|
||||
@ -119,6 +120,7 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!
|
||||
.unwrap_optional_unsafe => return analyzeInstUnwrapOptional(mod, scope, old_inst.castTag(.unwrap_optional_unsafe).?, false),
|
||||
.unwrap_err_safe => return analyzeInstUnwrapErr(mod, scope, old_inst.castTag(.unwrap_err_safe).?, true),
|
||||
.unwrap_err_unsafe => return analyzeInstUnwrapErr(mod, scope, old_inst.castTag(.unwrap_err_unsafe).?, false),
|
||||
.unwrap_err_code => return analyzeInstUnwrapErrCode(mod, scope, old_inst.castTag(.unwrap_err_code).?),
|
||||
.ensure_err_payload_void => return analyzeInstEnsureErrPayloadVoid(mod, scope, old_inst.castTag(.ensure_err_payload_void).?),
|
||||
.array_type => return analyzeInstArrayType(mod, scope, old_inst.castTag(.array_type).?),
|
||||
.array_type_sentinel => return analyzeInstArrayTypeSentinel(mod, scope, old_inst.castTag(.array_type_sentinel).?),
|
||||
@ -382,6 +384,19 @@ fn analyzeInstEnsureResultNonError(mod: *Module, scope: *Scope, inst: *zir.Inst.
|
||||
}
|
||||
}
|
||||
|
||||
fn analyzeInstEnsureIndexable(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
|
||||
const operand = try resolveInst(mod, scope, inst.positionals.operand);
|
||||
const elem_ty = operand.ty.elemType();
|
||||
if (elem_ty.isIndexable()) {
|
||||
return mod.constVoid(scope, operand.src);
|
||||
} else {
|
||||
// TODO error notes
|
||||
// error: type '{}' does not support indexing
|
||||
// note: for loop operand must be an array, a slice or a tuple
|
||||
return mod.fail(scope, operand.src, "for loop operand must be an array, a slice or a tuple", .{});
|
||||
}
|
||||
}
|
||||
|
||||
fn analyzeInstAlloc(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
|
||||
const var_type = try resolveType(mod, scope, inst.positionals.operand);
|
||||
// TODO this should happen only for var allocs
|
||||
@ -786,11 +801,12 @@ fn analyzeInstUnwrapOptional(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp
|
||||
const operand = try resolveInst(mod, scope, unwrap.positionals.operand);
|
||||
assert(operand.ty.zigTypeTag() == .Pointer);
|
||||
|
||||
if (operand.ty.elemType().zigTypeTag() != .Optional) {
|
||||
return mod.fail(scope, unwrap.base.src, "expected optional type, found {}", .{operand.ty.elemType()});
|
||||
const elem_type = operand.ty.elemType();
|
||||
if (elem_type.zigTypeTag() != .Optional) {
|
||||
return mod.fail(scope, unwrap.base.src, "expected optional type, found {}", .{elem_type});
|
||||
}
|
||||
|
||||
const child_type = try operand.ty.elemType().optionalChildAlloc(scope.arena());
|
||||
const child_type = try elem_type.optionalChildAlloc(scope.arena());
|
||||
const child_pointer = try mod.simplePtrType(scope, unwrap.base.src, child_type, operand.ty.isConstPtr(), .One);
|
||||
|
||||
if (operand.value()) |val| {
|
||||
@ -815,6 +831,10 @@ fn analyzeInstUnwrapErr(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp, saf
|
||||
return mod.fail(scope, unwrap.base.src, "TODO implement analyzeInstUnwrapErr", .{});
|
||||
}
|
||||
|
||||
fn analyzeInstUnwrapErrCode(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp) InnerError!*Inst {
|
||||
return mod.fail(scope, unwrap.base.src, "TODO implement analyzeInstUnwrapErrCode", .{});
|
||||
}
|
||||
|
||||
fn analyzeInstEnsureErrPayloadVoid(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp) InnerError!*Inst {
|
||||
return mod.fail(scope, unwrap.base.src, "TODO implement analyzeInstEnsureErrPayloadVoid", .{});
|
||||
}
|
||||
@ -950,7 +970,8 @@ fn analyzeInstFieldPtr(mod: *Module, scope: *Scope, fieldptr: *zir.Inst.FieldPtr
|
||||
const entry = if (val.cast(Value.Payload.ErrorSet)) |payload|
|
||||
(payload.fields.getEntry(field_name) orelse
|
||||
return mod.fail(scope, fieldptr.base.src, "no error named '{}' in '{}'", .{ field_name, child_type })).*
|
||||
else try mod.getErrorValue(field_name);
|
||||
else
|
||||
try mod.getErrorValue(field_name);
|
||||
|
||||
const error_payload = try scope.arena().create(Value.Payload.Error);
|
||||
error_payload.* = .{
|
||||
@ -1062,9 +1083,19 @@ fn analyzeInstElemPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.ElemPtr) Inne
|
||||
const array_ptr = try resolveInst(mod, scope, inst.positionals.array_ptr);
|
||||
const uncasted_index = try resolveInst(mod, scope, inst.positionals.index);
|
||||
const elem_index = try mod.coerce(scope, Type.initTag(.usize), uncasted_index);
|
||||
|
||||
const elem_ty = switch (array_ptr.ty.zigTypeTag()) {
|
||||
.Pointer => array_ptr.ty.elemType(),
|
||||
else => return mod.fail(scope, inst.positionals.array_ptr.src, "expected pointer, found '{}'", .{array_ptr.ty}),
|
||||
};
|
||||
if (!elem_ty.isIndexable()) {
|
||||
return mod.fail(scope, inst.base.src, "array access of non-array type '{}'", .{elem_ty});
|
||||
}
|
||||
|
||||
if (array_ptr.ty.isSinglePointer() and array_ptr.ty.elemType().zigTypeTag() == .Array) {
|
||||
if (array_ptr.value()) |array_ptr_val| {
|
||||
if (elem_ty.isSinglePointer() and elem_ty.elemType().zigTypeTag() == .Array) {
|
||||
// we have to deref the ptr operand to get the actual array pointer
|
||||
const array_ptr_deref = try mod.analyzeDeref(scope, inst.base.src, array_ptr, inst.positionals.array_ptr.src);
|
||||
if (array_ptr_deref.value()) |array_ptr_val| {
|
||||
if (elem_index.value()) |index_val| {
|
||||
// Both array pointer and index are compile-time known.
|
||||
const index_u64 = index_val.toUnsignedInt();
|
||||
@ -1075,7 +1106,7 @@ fn analyzeInstElemPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.ElemPtr) Inne
|
||||
const type_payload = try scope.arena().create(Type.Payload.PointerSimple);
|
||||
type_payload.* = .{
|
||||
.base = .{ .tag = .single_const_pointer },
|
||||
.pointee_type = array_ptr.ty.elemType().elemType(),
|
||||
.pointee_type = elem_ty.elemType().elemType(),
|
||||
};
|
||||
|
||||
return mod.constInst(scope, inst.base.src, .{
|
||||
@ -1274,17 +1305,7 @@ fn analyzeInstCmp(
|
||||
{
|
||||
// comparing null with optionals
|
||||
const opt_operand = if (lhs_ty_tag == .Optional) lhs else rhs;
|
||||
if (opt_operand.value()) |opt_val| {
|
||||
const is_null = opt_val.isNull();
|
||||
return mod.constBool(scope, inst.base.src, if (op == .eq) is_null else !is_null);
|
||||
}
|
||||
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
|
||||
const inst_tag: Inst.Tag = switch (op) {
|
||||
.eq => .isnull,
|
||||
.neq => .isnonnull,
|
||||
else => unreachable,
|
||||
};
|
||||
return mod.addUnOp(b, inst.base.src, Type.initTag(.bool), inst_tag, opt_operand);
|
||||
return mod.analyzeIsNull(scope, inst.base.src, opt_operand, op == .neq);
|
||||
} else if (is_equality_cmp and
|
||||
((lhs_ty_tag == .Null and rhs.ty.isCPtr()) or (rhs_ty_tag == .Null and lhs.ty.isCPtr())))
|
||||
{
|
||||
@ -1332,8 +1353,9 @@ fn analyzeInstIsNonNull(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, inver
|
||||
return mod.analyzeIsNull(scope, inst.base.src, operand, invert_logic);
|
||||
}
|
||||
|
||||
fn analyzeInstIsErr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp, invert_logic: bool) InnerError!*Inst {
|
||||
return mod.fail(scope, inst.base.src, "TODO implement analyzeInstIsErr", .{});
|
||||
fn analyzeInstIsErr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
|
||||
const operand = try resolveInst(mod, scope, inst.positionals.operand);
|
||||
return mod.analyzeIsErr(scope, inst.base.src, operand);
|
||||
}
|
||||
|
||||
fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerError!*Inst {
|
||||
|
||||
@ -820,6 +820,90 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
,
|
||||
"",
|
||||
);
|
||||
|
||||
// Array access.
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() noreturn {
|
||||
\\ assert("hello"[0] == 'h');
|
||||
\\
|
||||
\\ exit();
|
||||
\\}
|
||||
\\
|
||||
\\pub fn assert(ok: bool) void {
|
||||
\\ if (!ok) unreachable; // assertion failure
|
||||
\\}
|
||||
\\
|
||||
\\fn exit() noreturn {
|
||||
\\ asm volatile ("syscall"
|
||||
\\ :
|
||||
\\ : [number] "{rax}" (231),
|
||||
\\ [arg1] "{rdi}" (0)
|
||||
\\ : "rcx", "r11", "memory"
|
||||
\\ );
|
||||
\\ unreachable;
|
||||
\\}
|
||||
,
|
||||
"",
|
||||
);
|
||||
|
||||
// 64bit set stack
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() noreturn {
|
||||
\\ var i: u64 = 0xFFEEDDCCBBAA9988;
|
||||
\\ assert(i == 0xFFEEDDCCBBAA9988);
|
||||
\\
|
||||
\\ exit();
|
||||
\\}
|
||||
\\
|
||||
\\pub fn assert(ok: bool) void {
|
||||
\\ if (!ok) unreachable; // assertion failure
|
||||
\\}
|
||||
\\
|
||||
\\fn exit() noreturn {
|
||||
\\ asm volatile ("syscall"
|
||||
\\ :
|
||||
\\ : [number] "{rax}" (231),
|
||||
\\ [arg1] "{rdi}" (0)
|
||||
\\ : "rcx", "r11", "memory"
|
||||
\\ );
|
||||
\\ unreachable;
|
||||
\\}
|
||||
,
|
||||
"",
|
||||
);
|
||||
|
||||
// Basic for loop
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() noreturn {
|
||||
\\ for ("hello") |_| print();
|
||||
\\
|
||||
\\ exit();
|
||||
\\}
|
||||
\\
|
||||
\\fn print() void {
|
||||
\\ asm volatile ("syscall"
|
||||
\\ :
|
||||
\\ : [number] "{rax}" (1),
|
||||
\\ [arg1] "{rdi}" (1),
|
||||
\\ [arg2] "{rsi}" (@ptrToInt("hello\n")),
|
||||
\\ [arg3] "{rdx}" (6)
|
||||
\\ : "rcx", "r11", "memory"
|
||||
\\ );
|
||||
\\ return;
|
||||
\\}
|
||||
\\
|
||||
\\fn exit() noreturn {
|
||||
\\ asm volatile ("syscall"
|
||||
\\ :
|
||||
\\ : [number] "{rax}" (231),
|
||||
\\ [arg1] "{rdi}" (0)
|
||||
\\ : "rcx", "r11", "memory"
|
||||
\\ );
|
||||
\\ unreachable;
|
||||
\\}
|
||||
,
|
||||
"hello\nhello\nhello\nhello\nhello\n",
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
|
||||
@ -43,10 +43,11 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
\\
|
||||
\\@entry = fn(@fnty, {
|
||||
\\ %a = str("\x32\x08\x01\x0a")
|
||||
\\ %eptr0 = elemptr(%a, @0)
|
||||
\\ %eptr1 = elemptr(%a, @1)
|
||||
\\ %eptr2 = elemptr(%a, @2)
|
||||
\\ %eptr3 = elemptr(%a, @3)
|
||||
\\ %a_ref = ref(%a)
|
||||
\\ %eptr0 = elemptr(%a_ref, @0)
|
||||
\\ %eptr1 = elemptr(%a_ref, @1)
|
||||
\\ %eptr2 = elemptr(%a_ref, @2)
|
||||
\\ %eptr3 = elemptr(%a_ref, @3)
|
||||
\\ %v0 = deref(%eptr0)
|
||||
\\ %v1 = deref(%eptr1)
|
||||
\\ %v2 = deref(%eptr2)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user