From cc26cb9b2366723a8149e9f79e8252936cb69b73 Mon Sep 17 00:00:00 2001 From: Vexu Date: Wed, 26 Aug 2020 21:07:56 +0300 Subject: [PATCH] stage2: codegen needed for basic for loop --- src-self-hosted/astgen.zig | 12 +++++--- src-self-hosted/codegen.zig | 32 ++++++++++++++++--- src-self-hosted/zir_sema.zig | 2 +- test/stage2/test.zig | 59 ++++++++++++++++++++++++++++++++++++ 4 files changed, 95 insertions(+), 10 deletions(-) diff --git a/src-self-hosted/astgen.zig b/src-self-hosted/astgen.zig index 737af24268..cb8191fcee 100644 --- a/src-self-hosted/astgen.zig +++ b/src-self-hosted/astgen.zig @@ -802,7 +802,7 @@ fn catchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Catch) const err_name = tree.tokenSlice(payload.castTag(.Payload).?.error_symbol.firstToken()); if (mem.eql(u8, err_name, "_")) break :blk &err_scope.base; - + const unwrapped_err_ptr = try addZIRUnOp(mod, &err_scope.base, src, .unwrap_err_code, err_union_ptr); err_val_scope = .{ .parent = &err_scope.base, @@ -1374,7 +1374,8 @@ fn forExpr(mod: *Module, scope: *Scope, rl: ResultLoc, for_node: *ast.Node.For) .ty = Type.initTag(.usize), .val = Value.initTag(.one), }); - const index_plus_one = try addZIRBinOp(mod, &loop_scope.base, for_src, .add, index, one); + const index_2 = try addZIRUnOp(mod, &loop_scope.base, cond_src, .deref, index_ptr); + const index_plus_one = try addZIRBinOp(mod, &loop_scope.base, for_src, .add, index_2, one); _ = try addZIRBinOp(mod, &loop_scope.base, for_src, .store, index_ptr, index_plus_one); // looping stuff @@ -1382,7 +1383,7 @@ fn forExpr(mod: *Module, scope: *Scope, rl: ResultLoc, for_node: *ast.Node.For) .instructions = try for_scope.arena.dupe(*zir.Inst, loop_scope.instructions.items), }); const for_block = try addZIRInstBlock(mod, scope, for_src, .{ - .instructions = try scope.arena().dupe(*zir.Inst, for_scope.instructions.items), + .instructions = try for_scope.arena.dupe(*zir.Inst, for_scope.instructions.items), }); // while body @@ -1404,7 +1405,7 @@ fn forExpr(mod: *Module, scope: *Scope, rl: ResultLoc, for_node: *ast.Node.For) .inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = for_block }, }; - var index_scope: Scope.LocalVal = undefined; + var index_scope: Scope.LocalPtr = undefined; const then_sub_scope = blk: { const payload = for_node.payload.castTag(.PointerIndexPayload).?; const is_ptr = payload.ptr_token != null; @@ -1422,11 +1423,12 @@ fn forExpr(mod: *Module, scope: *Scope, rl: ResultLoc, for_node: *ast.Node.For) if (mem.eql(u8, index_name, "_")) { break :blk &then_scope.base; } + // TODO make this const without an extra copy? index_scope = .{ .parent = &then_scope.base, .gen_zir = &then_scope, .name = index_name, - .inst = index, + .ptr = index_ptr, }; break :blk &index_scope.base; }; diff --git a/src-self-hosted/codegen.zig b/src-self-hosted/codegen.zig index b282c2011b..7a24af976d 100644 --- a/src-self-hosted/codegen.zig +++ b/src-self-hosted/codegen.zig @@ -829,6 +829,16 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type { // No side effects, so if it's unreferenced, do nothing. if (inst.base.isUnused()) return MCValue.dead; + + const operand = try self.resolveInst(inst.operand); + const info_a = inst.operand.ty.intInfo(self.target.*); + const info_b = inst.base.ty.intInfo(self.target.*); + if (info_a.signed != info_b.signed) + return self.fail(inst.base.src, "TODO gen intcast sign safety in semantic analysis", .{}); + + if (info_a.bits == info_b.bits) + return operand; + switch (arch) { else => return self.fail(inst.base.src, "TODO implement intCast for {}", .{self.target.cpu.arch}), } @@ -2039,15 +2049,29 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type { mem.writeIntLittle(u32, self.code.addManyAsArrayAssumeCapacity(4), x); }, 8 => { - return self.fail(src, "TODO implement set abi_size=8 stack variable with immediate", .{}); + // We have a positive stack offset value but we want a twos complement negative + // offset from rbp, which is at the top of the stack frame. + const negative_offset = @intCast(i8, -@intCast(i32, adj_off)); + const twos_comp = @bitCast(u8, negative_offset); + + // 64 bit write to memory would take two mov's anyways so we + // insted just use two 32 bit writes to avoid register allocation + try self.code.ensureCapacity(self.code.items.len + 14); + var buf: [8]u8 = undefined; + mem.writeIntLittle(u64, &buf, x_big); + + // mov DWORD PTR [rbp+offset+4], immediate + self.code.appendSliceAssumeCapacity(&[_]u8{ 0xc7, 0x45, twos_comp + 4}); + self.code.appendSliceAssumeCapacity(buf[4..8]); + + // mov DWORD PTR [rbp+offset], immediate + self.code.appendSliceAssumeCapacity(&[_]u8{ 0xc7, 0x45, twos_comp }); + self.code.appendSliceAssumeCapacity(buf[0..4]); }, else => { return self.fail(src, "TODO implement set abi_size=large stack variable with immediate", .{}); }, } - if (x_big <= math.maxInt(u32)) {} else { - return self.fail(src, "TODO implement set stack variable with large immediate", .{}); - } }, .embedded_in_code => |code_offset| { return self.fail(src, "TODO implement set stack variable from embedded_in_code", .{}); diff --git a/src-self-hosted/zir_sema.zig b/src-self-hosted/zir_sema.zig index 056d6c2faa..2ac14f8bb4 100644 --- a/src-self-hosted/zir_sema.zig +++ b/src-self-hosted/zir_sema.zig @@ -112,7 +112,7 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError! .condbr => return analyzeInstCondBr(mod, scope, old_inst.castTag(.condbr).?), .isnull => return analyzeInstIsNonNull(mod, scope, old_inst.castTag(.isnull).?, true), .isnonnull => return analyzeInstIsNonNull(mod, scope, old_inst.castTag(.isnonnull).?, false), - .iserr => return analyzeInstIsErr(mod, scope, old_inst.castTag(.iserr).?, true), + .iserr => return analyzeInstIsErr(mod, scope, old_inst.castTag(.iserr).?), .boolnot => return analyzeInstBoolNot(mod, scope, old_inst.castTag(.boolnot).?), .typeof => return analyzeInstTypeOf(mod, scope, old_inst.castTag(.typeof).?), .optional_type => return analyzeInstOptionalType(mod, scope, old_inst.castTag(.optional_type).?), diff --git a/test/stage2/test.zig b/test/stage2/test.zig index beb40f8e95..50203c7ee9 100644 --- a/test/stage2/test.zig +++ b/test/stage2/test.zig @@ -845,6 +845,65 @@ pub fn addCases(ctx: *TestContext) !void { , "", ); + + // 64bit set stack + case.addCompareOutput( + \\export fn _start() noreturn { + \\ var i: u64 = 0xFFEEDDCCBBAA9988; + \\ assert(i == 0xFFEEDDCCBBAA9988); + \\ + \\ exit(); + \\} + \\ + \\pub fn assert(ok: bool) void { + \\ if (!ok) unreachable; // assertion failure + \\} + \\ + \\fn exit() noreturn { + \\ asm volatile ("syscall" + \\ : + \\ : [number] "{rax}" (231), + \\ [arg1] "{rdi}" (0) + \\ : "rcx", "r11", "memory" + \\ ); + \\ unreachable; + \\} + , + "", + ); + + // Basic for loop + case.addCompareOutput( + \\export fn _start() noreturn { + \\ for ("hello") |_| print(); + \\ + \\ exit(); + \\} + \\ + \\fn print() void { + \\ asm volatile ("syscall" + \\ : + \\ : [number] "{rax}" (1), + \\ [arg1] "{rdi}" (1), + \\ [arg2] "{rsi}" (@ptrToInt("hello\n")), + \\ [arg3] "{rdx}" (6) + \\ : "rcx", "r11", "memory" + \\ ); + \\ return; + \\} + \\ + \\fn exit() noreturn { + \\ asm volatile ("syscall" + \\ : + \\ : [number] "{rax}" (231), + \\ [arg1] "{rdi}" (0) + \\ : "rcx", "r11", "memory" + \\ ); + \\ unreachable; + \\} + , + "hello\nhello\nhello\nhello\nhello\n", + ); } {