mirror of
https://github.com/ziglang/zig.git
synced 2026-02-06 06:27:05 +00:00
Merge remote-tracking branch 'origin/master' into llvm12
This commit is contained in:
commit
93cf9560b1
@ -114,13 +114,12 @@ if [ "${BUILD_REASON}" != "PullRequest" ]; then
|
||||
SHASUM=$(shasum -a 256 $TARBALL | cut '-d ' -f1)
|
||||
BYTESIZE=$(wc -c < $TARBALL)
|
||||
|
||||
JSONFILE="macos-$GITBRANCH.json"
|
||||
JSONFILE="tarball.json"
|
||||
touch $JSONFILE
|
||||
echo "{\"tarball\": \"$TARBALL\"," >>$JSONFILE
|
||||
echo "\"shasum\": \"$SHASUM\"," >>$JSONFILE
|
||||
echo "\"size\": \"$BYTESIZE\"}" >>$JSONFILE
|
||||
|
||||
s3cmd put -P --add-header="Cache-Control: max-age=0, must-revalidate" "$JSONFILE" "s3://ziglang.org/builds/$JSONFILE"
|
||||
s3cmd put -P "$JSONFILE" "s3://ziglang.org/builds/$ARCH-macos-$VERSION.json"
|
||||
|
||||
# `set -x` causes these variables to be mangled.
|
||||
|
||||
@ -24,7 +24,7 @@ jobs:
|
||||
secureFile: s3cfg
|
||||
- script: ci/azure/macos_arm64_script
|
||||
name: main
|
||||
displayName: 'Build and cross-compile'
|
||||
displayName: 'Build'
|
||||
- job: BuildLinux
|
||||
pool:
|
||||
vmImage: 'ubuntu-18.04'
|
||||
@ -66,6 +66,7 @@ jobs:
|
||||
- job: OnMasterSuccess
|
||||
dependsOn:
|
||||
- BuildMacOS
|
||||
- BuildMacOS_arm64
|
||||
- BuildLinux
|
||||
- BuildWindows
|
||||
condition: and(succeeded(), eq(variables['Build.SourceBranch'], 'refs/heads/master'))
|
||||
|
||||
@ -199,6 +199,12 @@
|
||||
<td>{{X86_64_MACOS_BYTESIZE}}</td>
|
||||
<td class="code">{{X86_64_MACOS_SHASUM}}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a href="https://ziglang.org/builds/{{AARCH64_MACOS_TARBALL}}">{{AARCH64_MACOS_TARBALL}}</a></td>
|
||||
<td>Binary</td>
|
||||
<td>{{AARCH64_MACOS_BYTESIZE}}</td>
|
||||
<td class="code">{{AARCH64_MACOS_SHASUM}}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a href="https://ziglang.org/builds/{{X86_64_FREEBSD_TARBALL}}">{{X86_64_FREEBSD_TARBALL}}</a></td>
|
||||
<td>Binary</td>
|
||||
|
||||
@ -19,6 +19,11 @@
|
||||
"shasum": "{{X86_64_MACOS_SHASUM}}",
|
||||
"size": "{{X86_64_MACOS_BYTESIZE}}"
|
||||
},
|
||||
"aarch64-macos": {
|
||||
"tarball": "https://ziglang.org/builds/{{AARCH64_MACOS_TARBALL}}",
|
||||
"shasum": "{{AARCH64_MACOS_SHASUM}}",
|
||||
"size": "{{AARCH64_MACOS_BYTESIZE}}"
|
||||
},
|
||||
"x86_64-windows": {
|
||||
"tarball": "https://ziglang.org/builds/{{X86_64_WINDOWS_TARBALL}}",
|
||||
"shasum": "{{X86_64_WINDOWS_SHASUM}}",
|
||||
|
||||
@ -12,6 +12,7 @@ NATIVE_TARBALL="zig-linux-$(uname -m)-$VERSION.tar.xz"
|
||||
AARCH64_LINUX_JSON_URL="https://ziglang.org/builds/aarch64-linux-$VERSION.json"
|
||||
X86_64_LINUX_JSON_URL="https://ziglang.org/builds/x86_64-linux-$VERSION.json"
|
||||
X86_64_WINDOWS_JSON_URL="https://ziglang.org/builds/x86_64-windows-$VERSION.json"
|
||||
AARCH64_MACOS_JSON_URL="https://ziglang.org/builds/aarch64-macos-$VERSION.json"
|
||||
X86_64_MACOS_JSON_URL="https://ziglang.org/builds/x86_64-macos-$VERSION.json"
|
||||
X86_64_FREEBSD_JSON_URL="https://ziglang.org/builds/x86_64-freebsd-$VERSION.json"
|
||||
|
||||
@ -20,6 +21,7 @@ X86_64_FREEBSD_JSON_URL="https://ziglang.org/builds/x86_64-freebsd-$VERSION.json
|
||||
curl --fail -I "$AARCH64_LINUX_JSON_URL" >/dev/null || exit 0
|
||||
curl --fail -I "$X86_64_LINUX_JSON_URL" >/dev/null || exit 0
|
||||
curl --fail -I "$X86_64_WINDOWS_JSON_URL" >/dev/null || exit 0
|
||||
curl --fail -I "$AARCH64_MACOS_JSON_URL" >/dev/null || exit 0
|
||||
curl --fail -I "$X86_64_MACOS_JSON_URL" >/dev/null || exit 0
|
||||
curl --fail -I "$X86_64_FREEBSD_JSON_URL" >/dev/null || exit 0
|
||||
|
||||
@ -57,6 +59,11 @@ export X86_64_WINDOWS_TARBALL="$(echo "$X86_64_WINDOWS_JSON" | jq .tarball -r)"
|
||||
export X86_64_WINDOWS_BYTESIZE="$(echo "$X86_64_WINDOWS_JSON" | jq .size -r)"
|
||||
export X86_64_WINDOWS_SHASUM="$(echo "$X86_64_WINDOWS_JSON" | jq .shasum -r)"
|
||||
|
||||
AARCH64_MACOS_JSON=$(curl --fail "$AARCH64_MACOS_JSON_URL" || exit 1)
|
||||
export AARCH64_MACOS_TARBALL="$(echo "$AARCH64_MACOS_JSON" | jq .tarball -r)"
|
||||
export AARCH64_MACOS_BYTESIZE="$(echo "$AARCH64_MACOS_JSON" | jq .size -r)"
|
||||
export AARCH64_MACOS_SHASUM="$(echo "$AARCH64_MACOS_JSON" | jq .shasum -r)"
|
||||
|
||||
X86_64_MACOS_JSON=$(curl --fail "$X86_64_MACOS_JSON_URL" || exit 1)
|
||||
export X86_64_MACOS_TARBALL="$(echo "$X86_64_MACOS_JSON" | jq .tarball -r)"
|
||||
export X86_64_MACOS_BYTESIZE="$(echo "$X86_64_MACOS_JSON" | jq .size -r)"
|
||||
|
||||
@ -1023,6 +1023,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: any
|
||||
const builtin_code = try getBuiltinCode(allocator, &env_map, zig_exe);
|
||||
|
||||
for (toc.nodes) |node| {
|
||||
defer root_node.completeOne();
|
||||
switch (node) {
|
||||
.Content => |data| {
|
||||
try out.writeAll(data);
|
||||
@ -1062,8 +1063,6 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: any
|
||||
try tokenizeAndPrint(tokenizer, out, content_tok);
|
||||
},
|
||||
.Code => |code| {
|
||||
root_node.completeOne();
|
||||
|
||||
const raw_source = tokenizer.buffer[code.source_token.start..code.source_token.end];
|
||||
const trimmed_raw_source = mem.trim(u8, raw_source, " \n");
|
||||
if (!code.is_inline) {
|
||||
|
||||
@ -85,7 +85,6 @@
|
||||
#main-wrapper {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 100vh;
|
||||
}
|
||||
|
||||
#contents-wrapper {
|
||||
@ -106,6 +105,11 @@
|
||||
#main-wrapper {
|
||||
flex-direction: row;
|
||||
}
|
||||
#toc {
|
||||
height: 100vh;
|
||||
position: sticky;
|
||||
top: 0;
|
||||
}
|
||||
#contents-wrapper, #toc {
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
@ -1939,6 +1939,15 @@ pub const LibExeObjStep = struct {
|
||||
out.print("pub const {}: []const u8 = \"{}\";\n", .{ std.zig.fmtId(name), std.zig.fmtEscapes(value) }) catch unreachable;
|
||||
return;
|
||||
},
|
||||
?[:0]const u8 => {
|
||||
out.print("pub const {}: ?[:0]const u8 = ", .{std.zig.fmtId(name)}) catch unreachable;
|
||||
if (value) |payload| {
|
||||
out.print("\"{}\";\n", .{std.zig.fmtEscapes(payload)}) catch unreachable;
|
||||
} else {
|
||||
out.writeAll("null;\n") catch unreachable;
|
||||
}
|
||||
return;
|
||||
},
|
||||
?[]const u8 => {
|
||||
out.print("pub const {}: ?[]const u8 = ", .{std.zig.fmtId(name)}) catch unreachable;
|
||||
if (value) |payload| {
|
||||
|
||||
@ -18,14 +18,14 @@ pub extern "c" fn _lwp_self() lwpid_t;
|
||||
|
||||
pub extern "c" fn pipe2(fds: *[2]fd_t, flags: u32) c_int;
|
||||
pub extern "c" fn arc4random_buf(buf: [*]u8, len: usize) void;
|
||||
pub extern "c" fn __fstat50(fd: fd_t, buf: *Stat) c_int;
|
||||
pub extern "c" fn __stat50(path: [*:0]const u8, buf: *Stat) c_int;
|
||||
pub extern "c" fn __fstat50(fd: fd_t, buf: *libc_stat) c_int;
|
||||
pub extern "c" fn __stat50(path: [*:0]const u8, buf: *libc_stat) c_int;
|
||||
pub extern "c" fn __clock_gettime50(clk_id: c_int, tp: *timespec) c_int;
|
||||
pub extern "c" fn __clock_getres50(clk_id: c_int, tp: *timespec) c_int;
|
||||
pub extern "c" fn __getdents30(fd: c_int, buf_ptr: [*]u8, nbytes: usize) c_int;
|
||||
pub extern "c" fn __sigaltstack14(ss: ?*stack_t, old_ss: ?*stack_t) c_int;
|
||||
pub extern "c" fn __nanosleep50(rqtp: *const timespec, rmtp: ?*timespec) c_int;
|
||||
pub extern "c" fn __sigaction14(sig: c_int, noalias act: *const Sigaction, noalias oact: ?*Sigaction) c_int;
|
||||
pub extern "c" fn __sigaction14(sig: c_int, noalias act: ?*const Sigaction, noalias oact: ?*Sigaction) c_int;
|
||||
pub extern "c" fn __sigprocmask14(how: c_int, noalias set: ?*const sigset_t, noalias oset: ?*sigset_t) c_int;
|
||||
pub extern "c" fn __socket30(domain: c_uint, sock_type: c_uint, protocol: c_uint) c_int;
|
||||
pub extern "c" fn __gettimeofday50(noalias tv: ?*timeval, noalias tz: ?*timezone) c_int;
|
||||
@ -34,7 +34,6 @@ pub extern "c" fn __getrusage50(who: c_int, usage: *rusage) c_int;
|
||||
pub extern "c" fn __libc_thr_yield() c_int;
|
||||
|
||||
pub extern "c" fn posix_memalign(memptr: *?*c_void, alignment: usize, size: usize) c_int;
|
||||
pub extern "c" fn malloc_usable_size(?*const c_void) usize;
|
||||
|
||||
pub const pthread_mutex_t = extern struct {
|
||||
ptm_magic: u32 = 0x33330003,
|
||||
@ -93,3 +92,5 @@ pub const pthread_attr_t = extern struct {
|
||||
pta_flags: i32,
|
||||
pta_private: ?*c_void,
|
||||
};
|
||||
|
||||
pub const sem_t = ?*opaque {};
|
||||
|
||||
@ -462,7 +462,7 @@ pub const TTY = struct {
|
||||
// TODO give this a payload of file handle
|
||||
windows_api,
|
||||
|
||||
fn setColor(conf: Config, out_stream: anytype, color: Color) void {
|
||||
pub fn setColor(conf: Config, out_stream: anytype, color: Color) void {
|
||||
nosuspend switch (conf) {
|
||||
.no_color => return,
|
||||
.escape_codes => switch (color) {
|
||||
|
||||
@ -223,15 +223,15 @@ pub const File = struct {
|
||||
return os.lseek_SET(self.handle, offset);
|
||||
}
|
||||
|
||||
pub const GetPosError = os.SeekError || os.FStatError;
|
||||
pub const GetSeekPosError = os.SeekError || os.FStatError;
|
||||
|
||||
/// TODO: integrate with async I/O
|
||||
pub fn getPos(self: File) GetPosError!u64 {
|
||||
pub fn getPos(self: File) GetSeekPosError!u64 {
|
||||
return os.lseek_CUR_get(self.handle);
|
||||
}
|
||||
|
||||
/// TODO: integrate with async I/O
|
||||
pub fn getEndPos(self: File) GetPosError!u64 {
|
||||
pub fn getEndPos(self: File) GetSeekPosError!u64 {
|
||||
if (builtin.os.tag == .windows) {
|
||||
return windows.GetFileSizeEx(self.handle);
|
||||
}
|
||||
@ -819,7 +819,7 @@ pub const File = struct {
|
||||
pub const SeekableStream = io.SeekableStream(
|
||||
File,
|
||||
SeekError,
|
||||
GetPosError,
|
||||
GetSeekPosError,
|
||||
seekTo,
|
||||
seekBy,
|
||||
getPos,
|
||||
|
||||
@ -398,10 +398,6 @@ pub fn HashMapUnmanaged(
|
||||
return size * 100 < max_load_percentage * cap;
|
||||
}
|
||||
|
||||
pub fn init(allocator: *Allocator) Self {
|
||||
return .{};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self, allocator: *Allocator) void {
|
||||
self.deallocate(allocator);
|
||||
self.* = undefined;
|
||||
|
||||
@ -5,7 +5,6 @@
|
||||
// and substantial portions of the software.
|
||||
const std = @import("../std.zig");
|
||||
const io = std.io;
|
||||
const testing = std.testing;
|
||||
|
||||
/// Provides `io.Reader`, `io.Writer`, and `io.SeekableStream` for in-memory buffers as
|
||||
/// well as files.
|
||||
@ -19,7 +18,7 @@ pub const StreamSource = union(enum) {
|
||||
pub const ReadError = std.fs.File.ReadError;
|
||||
pub const WriteError = std.fs.File.WriteError;
|
||||
pub const SeekError = std.fs.File.SeekError;
|
||||
pub const GetSeekPosError = std.fs.File.GetPosError;
|
||||
pub const GetSeekPosError = std.fs.File.GetSeekPosError;
|
||||
|
||||
pub const Reader = io.Reader(*StreamSource, ReadError, read);
|
||||
pub const Writer = io.Writer(*StreamSource, WriteError, write);
|
||||
|
||||
@ -1234,7 +1234,7 @@ test "json.validate" {
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArenaAllocator = std.heap.ArenaAllocator;
|
||||
const ArrayList = std.ArrayList;
|
||||
const StringHashMap = std.StringHashMap;
|
||||
const StringArrayHashMap = std.StringArrayHashMap;
|
||||
|
||||
pub const ValueTree = struct {
|
||||
arena: ArenaAllocator,
|
||||
@ -1245,7 +1245,7 @@ pub const ValueTree = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub const ObjectMap = StringHashMap(Value);
|
||||
pub const ObjectMap = StringArrayHashMap(Value);
|
||||
pub const Array = ArrayList(Value);
|
||||
|
||||
/// Represents a JSON value
|
||||
|
||||
@ -12,7 +12,7 @@ const std = @import("../std.zig");
|
||||
const json = std.json;
|
||||
const testing = std.testing;
|
||||
|
||||
fn testNonStreaming(comptime s: []const u8) !void {
|
||||
fn testNonStreaming(s: []const u8) !void {
|
||||
var p = json.Parser.init(testing.allocator, false);
|
||||
defer p.deinit();
|
||||
|
||||
@ -20,44 +20,60 @@ fn testNonStreaming(comptime s: []const u8) !void {
|
||||
defer tree.deinit();
|
||||
}
|
||||
|
||||
fn ok(comptime s: []const u8) void {
|
||||
fn ok(s: []const u8) !void {
|
||||
testing.expect(json.validate(s));
|
||||
|
||||
testNonStreaming(s) catch testing.expect(false);
|
||||
try testNonStreaming(s);
|
||||
}
|
||||
|
||||
fn err(comptime s: []const u8) void {
|
||||
fn err(s: []const u8) void {
|
||||
testing.expect(!json.validate(s));
|
||||
|
||||
testNonStreaming(s) catch return;
|
||||
testing.expect(false);
|
||||
}
|
||||
|
||||
fn utf8Error(comptime s: []const u8) void {
|
||||
fn utf8Error(s: []const u8) void {
|
||||
testing.expect(!json.validate(s));
|
||||
|
||||
testing.expectError(error.InvalidUtf8Byte, testNonStreaming(s));
|
||||
}
|
||||
|
||||
fn any(comptime s: []const u8) void {
|
||||
fn any(s: []const u8) void {
|
||||
_ = json.validate(s);
|
||||
|
||||
testNonStreaming(s) catch {};
|
||||
}
|
||||
|
||||
fn anyStreamingErrNonStreaming(comptime s: []const u8) void {
|
||||
fn anyStreamingErrNonStreaming(s: []const u8) void {
|
||||
_ = json.validate(s);
|
||||
|
||||
testNonStreaming(s) catch return;
|
||||
testing.expect(false);
|
||||
}
|
||||
|
||||
fn roundTrip(s: []const u8) !void {
|
||||
testing.expect(json.validate(s));
|
||||
|
||||
var p = json.Parser.init(testing.allocator, false);
|
||||
defer p.deinit();
|
||||
|
||||
var tree = try p.parse(s);
|
||||
defer tree.deinit();
|
||||
|
||||
var buf: [256]u8 = undefined;
|
||||
var fbs = std.io.fixedBufferStream(&buf);
|
||||
try tree.root.jsonStringify(.{}, fbs.writer());
|
||||
|
||||
testing.expectEqualStrings(s, fbs.getWritten());
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//
|
||||
// Additional tests not part of test JSONTestSuite.
|
||||
|
||||
test "y_trailing_comma_after_empty" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\{"1":[],"2":{},"3":"4"}
|
||||
);
|
||||
}
|
||||
@ -65,252 +81,252 @@ test "y_trailing_comma_after_empty" {
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
test "y_array_arraysWithSpaces" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[[] ]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_array_empty" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\[]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_array_empty-string" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\[""]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_array_ending_with_newline" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\["a"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_array_false" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\[false]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_array_heterogeneous" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[null, 1, "1", {}]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_array_null" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\[null]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_array_with_1_and_newline" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[1
|
||||
\\]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_array_with_leading_space" {
|
||||
ok(
|
||||
try ok(
|
||||
\\ [1]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_array_with_several_null" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\[1,null,null,null,2]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_array_with_trailing_space" {
|
||||
ok("[2] ");
|
||||
try ok("[2] ");
|
||||
}
|
||||
|
||||
test "y_number_0e+1" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[0e+1]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_0e1" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[0e1]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_after_space" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[ 4]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_double_close_to_zero" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[-0.000000000000000000000000000000000000000000000000000000000000000000000000000001]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_int_with_exp" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[20e1]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[123e65]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_minus_zero" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[-0]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_negative_int" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\[-123]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_negative_one" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\[-1]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_negative_zero" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[-0]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_real_capital_e" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[1E22]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_real_capital_e_neg_exp" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[1E-2]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_real_capital_e_pos_exp" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[1E+2]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_real_exponent" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[123e45]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_real_fraction_exponent" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[123.456e78]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_real_neg_exp" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[1e-2]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_real_pos_exponent" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[1e+2]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_simple_int" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\[123]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_number_simple_real" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[123.456789]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_object_basic" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\{"asd":"sdf"}
|
||||
);
|
||||
}
|
||||
|
||||
test "y_object_duplicated_key_and_value" {
|
||||
ok(
|
||||
try ok(
|
||||
\\{"a":"b","a":"b"}
|
||||
);
|
||||
}
|
||||
|
||||
test "y_object_duplicated_key" {
|
||||
ok(
|
||||
try ok(
|
||||
\\{"a":"b","a":"c"}
|
||||
);
|
||||
}
|
||||
|
||||
test "y_object_empty" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\{}
|
||||
);
|
||||
}
|
||||
|
||||
test "y_object_empty_key" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\{"":0}
|
||||
);
|
||||
}
|
||||
|
||||
test "y_object_escaped_null_in_key" {
|
||||
ok(
|
||||
try ok(
|
||||
\\{"foo\u0000bar": 42}
|
||||
);
|
||||
}
|
||||
|
||||
test "y_object_extreme_numbers" {
|
||||
ok(
|
||||
try ok(
|
||||
\\{ "min": -1.0e+28, "max": 1.0e+28 }
|
||||
);
|
||||
}
|
||||
|
||||
test "y_object" {
|
||||
ok(
|
||||
try ok(
|
||||
\\{"asd":"sdf", "dfg":"fgh"}
|
||||
);
|
||||
}
|
||||
|
||||
test "y_object_long_strings" {
|
||||
ok(
|
||||
try ok(
|
||||
\\{"x":[{"id": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"}], "id": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"}
|
||||
);
|
||||
}
|
||||
|
||||
test "y_object_simple" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\{"a":[]}
|
||||
);
|
||||
}
|
||||
|
||||
test "y_object_string_unicode" {
|
||||
ok(
|
||||
try ok(
|
||||
\\{"title":"\u041f\u043e\u043b\u0442\u043e\u0440\u0430 \u0417\u0435\u043c\u043b\u0435\u043a\u043e\u043f\u0430" }
|
||||
);
|
||||
}
|
||||
|
||||
test "y_object_with_newlines" {
|
||||
ok(
|
||||
try ok(
|
||||
\\{
|
||||
\\"a": "b"
|
||||
\\}
|
||||
@ -318,311 +334,311 @@ test "y_object_with_newlines" {
|
||||
}
|
||||
|
||||
test "y_string_1_2_3_bytes_UTF-8_sequences" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\u0060\u012a\u12AB"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_accepted_surrogate_pair" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\uD801\udc37"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_accepted_surrogate_pairs" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\ud83d\ude39\ud83d\udc8d"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_allowed_escapes" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\"\\\/\b\f\n\r\t"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_backslash_and_u_escaped_zero" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\\u0000"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_backslash_doublequotes" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\["\""]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_comments" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["a/*b*/c/*d//e"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_double_escape_a" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\\a"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_double_escape_n" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\["\\n"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_escaped_control_character" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\u0012"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_escaped_noncharacter" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\uFFFF"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_in_array" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["asd"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_in_array_with_leading_space" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[ "asd"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_last_surrogates_1_and_2" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\uDBFF\uDFFF"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_nbsp_uescaped" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["new\u00A0line"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_nonCharacterInUTF-8_U+10FFFF" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[""]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_nonCharacterInUTF-8_U+FFFF" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[""]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_null_escape" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\u0000"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_one-byte-utf-8" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\u002c"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_pi" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["π"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_reservedCharacterInUTF-8_U+1BFFF" {
|
||||
ok(
|
||||
try ok(
|
||||
\\[""]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_simple_ascii" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["asd "]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_space" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\" "
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_surrogates_U+1D11E_MUSICAL_SYMBOL_G_CLEF" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\uD834\uDd1e"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_three-byte-utf-8" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\u0821"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_two-byte-utf-8" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\u0123"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_u+2028_line_sep" {
|
||||
ok("[\"\xe2\x80\xa8\"]");
|
||||
try ok("[\"\xe2\x80\xa8\"]");
|
||||
}
|
||||
|
||||
test "y_string_u+2029_par_sep" {
|
||||
ok("[\"\xe2\x80\xa9\"]");
|
||||
try ok("[\"\xe2\x80\xa9\"]");
|
||||
}
|
||||
|
||||
test "y_string_uescaped_newline" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["new\u000Aline"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_uEscape" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\u0061\u30af\u30EA\u30b9"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_unescaped_char_delete" {
|
||||
ok("[\"\x7f\"]");
|
||||
try ok("[\"\x7f\"]");
|
||||
}
|
||||
|
||||
test "y_string_unicode_2" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["⍂㈴⍂"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_unicodeEscapedBackslash" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\u005C"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_unicode_escaped_double_quote" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\u0022"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_unicode" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\uA66D"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_unicode_U+10FFFE_nonchar" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\uDBFF\uDFFE"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_unicode_U+1FFFE_nonchar" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\uD83F\uDFFE"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_unicode_U+200B_ZERO_WIDTH_SPACE" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\u200B"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_unicode_U+2064_invisible_plus" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\u2064"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_unicode_U+FDD0_nonchar" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\uFDD0"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_unicode_U+FFFE_nonchar" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["\uFFFE"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_utf8" {
|
||||
ok(
|
||||
try ok(
|
||||
\\["€𝄞"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_string_with_del_character" {
|
||||
ok("[\"a\x7fa\"]");
|
||||
try ok("[\"a\x7fa\"]");
|
||||
}
|
||||
|
||||
test "y_structure_lonely_false" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\false
|
||||
);
|
||||
}
|
||||
|
||||
test "y_structure_lonely_int" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\42
|
||||
);
|
||||
}
|
||||
|
||||
test "y_structure_lonely_negative_real" {
|
||||
ok(
|
||||
try ok(
|
||||
\\-0.1
|
||||
);
|
||||
}
|
||||
|
||||
test "y_structure_lonely_null" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\null
|
||||
);
|
||||
}
|
||||
|
||||
test "y_structure_lonely_string" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\"asd"
|
||||
);
|
||||
}
|
||||
|
||||
test "y_structure_lonely_true" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\true
|
||||
);
|
||||
}
|
||||
|
||||
test "y_structure_string_empty" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\""
|
||||
);
|
||||
}
|
||||
|
||||
test "y_structure_trailing_newline" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\["a"]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_structure_true_in_array" {
|
||||
ok(
|
||||
try roundTrip(
|
||||
\\[true]
|
||||
);
|
||||
}
|
||||
|
||||
test "y_structure_whitespace_array" {
|
||||
ok(" [] ");
|
||||
try ok(" [] ");
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
@ -36,8 +36,9 @@ pub fn ln(x: anytype) @TypeOf(x) {
|
||||
.ComptimeInt => {
|
||||
return @as(comptime_int, math.floor(ln_64(@as(f64, x))));
|
||||
},
|
||||
.Int => {
|
||||
return @as(T, math.floor(ln_64(@as(f64, x))));
|
||||
.Int => |IntType| switch (IntType.signedness) {
|
||||
.signed => return @compileError("ln not implemented for signed integers"),
|
||||
.unsigned => return @as(T, math.floor(ln_64(@as(f64, x)))),
|
||||
},
|
||||
else => @compileError("ln not implemented for " ++ @typeName(T)),
|
||||
}
|
||||
|
||||
@ -31,9 +31,11 @@ pub fn log(comptime T: type, base: T, x: T) T {
|
||||
.ComptimeInt => {
|
||||
return @as(comptime_int, math.floor(math.ln(@as(f64, x)) / math.ln(float_base)));
|
||||
},
|
||||
.Int => {
|
||||
// TODO implement integer log without using float math
|
||||
return @floatToInt(T, math.floor(math.ln(@intToFloat(f64, x)) / math.ln(float_base)));
|
||||
|
||||
// TODO implement integer log without using float math
|
||||
.Int => |IntType| switch (IntType.signedness) {
|
||||
.signed => return @compileError("log not implemented for signed integers"),
|
||||
.unsigned => return @floatToInt(T, math.floor(math.ln(@intToFloat(f64, x)) / math.ln(float_base))),
|
||||
},
|
||||
|
||||
.Float => {
|
||||
@ -53,7 +55,7 @@ pub fn log(comptime T: type, base: T, x: T) T {
|
||||
test "math.log integer" {
|
||||
expect(log(u8, 2, 0x1) == 0);
|
||||
expect(log(u8, 2, 0x2) == 1);
|
||||
expect(log(i16, 2, 0x72) == 6);
|
||||
expect(log(u16, 2, 0x72) == 6);
|
||||
expect(log(u32, 2, 0xFFFFFF) == 23);
|
||||
expect(log(u64, 2, 0x7FF0123456789ABC) == 62);
|
||||
}
|
||||
|
||||
@ -37,8 +37,9 @@ pub fn log10(x: anytype) @TypeOf(x) {
|
||||
.ComptimeInt => {
|
||||
return @as(comptime_int, math.floor(log10_64(@as(f64, x))));
|
||||
},
|
||||
.Int => {
|
||||
return @floatToInt(T, math.floor(log10_64(@intToFloat(f64, x))));
|
||||
.Int => |IntType| switch (IntType.signedness) {
|
||||
.signed => return @compileError("log10 not implemented for signed integers"),
|
||||
.unsigned => return @floatToInt(T, math.floor(log10_64(@intToFloat(f64, x)))),
|
||||
},
|
||||
else => @compileError("log10 not implemented for " ++ @typeName(T)),
|
||||
}
|
||||
|
||||
@ -43,8 +43,9 @@ pub fn log2(x: anytype) @TypeOf(x) {
|
||||
}) : (result += 1) {}
|
||||
return result;
|
||||
},
|
||||
.Int => {
|
||||
return math.log2_int(T, x);
|
||||
.Int => |IntType| switch (IntType.signedness) {
|
||||
.signed => return @compileError("log2 not implemented for signed integers"),
|
||||
.unsigned => return math.log2_int(T, x),
|
||||
},
|
||||
else => @compileError("log2 not implemented for " ++ @typeName(T)),
|
||||
}
|
||||
|
||||
@ -31,7 +31,10 @@ pub fn sqrt(x: anytype) Sqrt(@TypeOf(x)) {
|
||||
}
|
||||
return @as(T, sqrt_int(u128, x));
|
||||
},
|
||||
.Int => return sqrt_int(T, x),
|
||||
.Int => |IntType| switch (IntType.signedness) {
|
||||
.signed => return @compileError("sqrt not implemented for signed integers"),
|
||||
.unsigned => return sqrt_int(T, x),
|
||||
},
|
||||
else => @compileError("sqrt not implemented for " ++ @typeName(T)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -970,12 +970,15 @@ fn castPtr(comptime DestType: type, target: anytype) DestType {
|
||||
|
||||
if (source.is_const and !dest.is_const or source.is_volatile and !dest.is_volatile)
|
||||
return @intToPtr(DestType, @ptrToInt(target))
|
||||
else if (@typeInfo(dest.child) == .Opaque)
|
||||
// dest.alignment would error out
|
||||
return @ptrCast(DestType, target)
|
||||
else
|
||||
return @ptrCast(DestType, @alignCast(dest.alignment, target));
|
||||
}
|
||||
|
||||
fn ptrInfo(comptime PtrType: type) TypeInfo.Pointer {
|
||||
return switch(@typeInfo(PtrType)){
|
||||
return switch (@typeInfo(PtrType)) {
|
||||
.Optional => |opt_info| @typeInfo(opt_info.child).Pointer,
|
||||
.Pointer => |ptr_info| ptr_info,
|
||||
else => unreachable,
|
||||
@ -1010,6 +1013,8 @@ test "std.meta.cast" {
|
||||
|
||||
testing.expectEqual(@intToPtr(*u8, 2), cast(*u8, @intToPtr(*const u8, 2)));
|
||||
testing.expectEqual(@intToPtr(*u8, 2), cast(*u8, @intToPtr(*volatile u8, 2)));
|
||||
|
||||
testing.expectEqual(@intToPtr(?*c_void, 2), cast(?*c_void, @intToPtr(*u8, 2)));
|
||||
}
|
||||
|
||||
/// Given a value returns its size as C's sizeof operator would.
|
||||
@ -1297,3 +1302,35 @@ pub fn globalOption(comptime name: []const u8, comptime T: type) ?T {
|
||||
return null;
|
||||
return @as(T, @field(root, name));
|
||||
}
|
||||
|
||||
/// This function is for translate-c and is not intended for general use.
|
||||
/// Convert from clang __builtin_shufflevector index to Zig @shuffle index
|
||||
/// clang requires __builtin_shufflevector index arguments to be integer constants.
|
||||
/// negative values for `this_index` indicate "don't care" so we arbitrarily choose 0
|
||||
/// clang enforces that `this_index` is less than the total number of vector elements
|
||||
/// See https://ziglang.org/documentation/master/#shuffle
|
||||
/// See https://clang.llvm.org/docs/LanguageExtensions.html#langext-builtin-shufflevector
|
||||
pub fn shuffleVectorIndex(comptime this_index: c_int, comptime source_vector_len: usize) i32 {
|
||||
if (this_index <= 0) return 0;
|
||||
|
||||
const positive_index = @intCast(usize, this_index);
|
||||
if (positive_index < source_vector_len) return @intCast(i32, this_index);
|
||||
const b_index = positive_index - source_vector_len;
|
||||
return ~@intCast(i32, b_index);
|
||||
}
|
||||
|
||||
test "shuffleVectorIndex" {
|
||||
const vector_len: usize = 4;
|
||||
|
||||
testing.expect(shuffleVectorIndex(-1, vector_len) == 0);
|
||||
|
||||
testing.expect(shuffleVectorIndex(0, vector_len) == 0);
|
||||
testing.expect(shuffleVectorIndex(1, vector_len) == 1);
|
||||
testing.expect(shuffleVectorIndex(2, vector_len) == 2);
|
||||
testing.expect(shuffleVectorIndex(3, vector_len) == 3);
|
||||
|
||||
testing.expect(shuffleVectorIndex(4, vector_len) == -1);
|
||||
testing.expect(shuffleVectorIndex(5, vector_len) == -2);
|
||||
testing.expect(shuffleVectorIndex(6, vector_len) == -3);
|
||||
testing.expect(shuffleVectorIndex(7, vector_len) == -4);
|
||||
}
|
||||
|
||||
@ -3254,6 +3254,9 @@ pub const ConnectError = error{
|
||||
|
||||
/// Connection was reset by peer before connect could complete.
|
||||
ConnectionResetByPeer,
|
||||
|
||||
/// Socket is non-blocking and already has a pending connection in progress.
|
||||
ConnectionPending,
|
||||
} || UnexpectedError;
|
||||
|
||||
/// Initiate a connection on a socket.
|
||||
@ -3294,7 +3297,7 @@ pub fn connect(sock: socket_t, sock_addr: *const sockaddr, len: socklen_t) Conne
|
||||
EADDRNOTAVAIL => return error.AddressNotAvailable,
|
||||
EAFNOSUPPORT => return error.AddressFamilyNotSupported,
|
||||
EAGAIN, EINPROGRESS => return error.WouldBlock,
|
||||
EALREADY => unreachable, // The socket is nonblocking and a previous connection attempt has not yet been completed.
|
||||
EALREADY => return error.ConnectionPending,
|
||||
EBADF => unreachable, // sockfd is not a valid open file descriptor.
|
||||
ECONNREFUSED => return error.ConnectionRefused,
|
||||
ECONNRESET => return error.ConnectionResetByPeer,
|
||||
@ -3325,7 +3328,7 @@ pub fn getsockoptError(sockfd: fd_t) ConnectError!void {
|
||||
EADDRNOTAVAIL => return error.AddressNotAvailable,
|
||||
EAFNOSUPPORT => return error.AddressFamilyNotSupported,
|
||||
EAGAIN => return error.SystemResources,
|
||||
EALREADY => unreachable, // The socket is nonblocking and a previous connection attempt has not yet been completed.
|
||||
EALREADY => return error.ConnectionPending,
|
||||
EBADF => unreachable, // sockfd is not a valid open file descriptor.
|
||||
ECONNREFUSED => return error.ConnectionRefused,
|
||||
EFAULT => unreachable, // The socket structure address is outside the user's address space.
|
||||
|
||||
@ -21,6 +21,7 @@ pub usingnamespace switch (builtin.arch) {
|
||||
.riscv64 => @import("linux/riscv64.zig"),
|
||||
.sparcv9 => @import("linux/sparc64.zig"),
|
||||
.mips, .mipsel => @import("linux/mips.zig"),
|
||||
.powerpc => @import("linux/powerpc.zig"),
|
||||
.powerpc64, .powerpc64le => @import("linux/powerpc64.zig"),
|
||||
else => struct {},
|
||||
};
|
||||
@ -586,6 +587,92 @@ pub const IP_DEFAULT_MULTICAST_TTL = 1;
|
||||
pub const IP_DEFAULT_MULTICAST_LOOP = 1;
|
||||
pub const IP_MAX_MEMBERSHIPS = 20;
|
||||
|
||||
// IPv6 socket options
|
||||
|
||||
pub const IPV6_ADDRFORM = 1;
|
||||
pub const IPV6_2292PKTINFO = 2;
|
||||
pub const IPV6_2292HOPOPTS = 3;
|
||||
pub const IPV6_2292DSTOPTS = 4;
|
||||
pub const IPV6_2292RTHDR = 5;
|
||||
pub const IPV6_2292PKTOPTIONS = 6;
|
||||
pub const IPV6_CHECKSUM = 7;
|
||||
pub const IPV6_2292HOPLIMIT = 8;
|
||||
pub const IPV6_NEXTHOP = 9;
|
||||
pub const IPV6_AUTHHDR = 10;
|
||||
pub const IPV6_FLOWINFO = 11;
|
||||
|
||||
pub const IPV6_UNICAST_HOPS = 16;
|
||||
pub const IPV6_MULTICAST_IF = 17;
|
||||
pub const IPV6_MULTICAST_HOPS = 18;
|
||||
pub const IPV6_MULTICAST_LOOP = 19;
|
||||
pub const IPV6_ADD_MEMBERSHIP = 20;
|
||||
pub const IPV6_DROP_MEMBERSHIP = 21;
|
||||
pub const IPV6_ROUTER_ALERT = 22;
|
||||
pub const IPV6_MTU_DISCOVER = 23;
|
||||
pub const IPV6_MTU = 24;
|
||||
pub const IPV6_RECVERR = 25;
|
||||
pub const IPV6_V6ONLY = 26;
|
||||
pub const IPV6_JOIN_ANYCAST = 27;
|
||||
pub const IPV6_LEAVE_ANYCAST = 28;
|
||||
|
||||
// IPV6_MTU_DISCOVER values
|
||||
pub const IPV6_PMTUDISC_DONT = 0;
|
||||
pub const IPV6_PMTUDISC_WANT = 1;
|
||||
pub const IPV6_PMTUDISC_DO = 2;
|
||||
pub const IPV6_PMTUDISC_PROBE = 3;
|
||||
pub const IPV6_PMTUDISC_INTERFACE = 4;
|
||||
pub const IPV6_PMTUDISC_OMIT = 5;
|
||||
|
||||
// Flowlabel
|
||||
pub const IPV6_FLOWLABEL_MGR = 32;
|
||||
pub const IPV6_FLOWINFO_SEND = 33;
|
||||
pub const IPV6_IPSEC_POLICY = 34;
|
||||
pub const IPV6_XFRM_POLICY = 35;
|
||||
pub const IPV6_HDRINCL = 36;
|
||||
|
||||
// Advanced API (RFC3542) (1)
|
||||
pub const IPV6_RECVPKTINFO = 49;
|
||||
pub const IPV6_PKTINFO = 50;
|
||||
pub const IPV6_RECVHOPLIMIT = 51;
|
||||
pub const IPV6_HOPLIMIT = 52;
|
||||
pub const IPV6_RECVHOPOPTS = 53;
|
||||
pub const IPV6_HOPOPTS = 54;
|
||||
pub const IPV6_RTHDRDSTOPTS = 55;
|
||||
pub const IPV6_RECVRTHDR = 56;
|
||||
pub const IPV6_RTHDR = 57;
|
||||
pub const IPV6_RECVDSTOPTS = 58;
|
||||
pub const IPV6_DSTOPTS = 59;
|
||||
pub const IPV6_RECVPATHMTU = 60;
|
||||
pub const IPV6_PATHMTU = 61;
|
||||
pub const IPV6_DONTFRAG = 62;
|
||||
|
||||
// Advanced API (RFC3542) (2)
|
||||
pub const IPV6_RECVTCLASS = 66;
|
||||
pub const IPV6_TCLASS = 67;
|
||||
|
||||
pub const IPV6_AUTOFLOWLABEL = 70;
|
||||
|
||||
// RFC5014: Source address selection
|
||||
pub const IPV6_ADDR_PREFERENCES = 72;
|
||||
|
||||
pub const IPV6_PREFER_SRC_TMP = 0x0001;
|
||||
pub const IPV6_PREFER_SRC_PUBLIC = 0x0002;
|
||||
pub const IPV6_PREFER_SRC_PUBTMP_DEFAULT = 0x0100;
|
||||
pub const IPV6_PREFER_SRC_COA = 0x0004;
|
||||
pub const IPV6_PREFER_SRC_HOME = 0x0400;
|
||||
pub const IPV6_PREFER_SRC_CGA = 0x0008;
|
||||
pub const IPV6_PREFER_SRC_NONCGA = 0x0800;
|
||||
|
||||
// RFC5082: Generalized Ttl Security Mechanism
|
||||
pub const IPV6_MINHOPCOUNT = 73;
|
||||
|
||||
pub const IPV6_ORIGDSTADDR = 74;
|
||||
pub const IPV6_RECVORIGDSTADDR = IPV6_ORIGDSTADDR;
|
||||
pub const IPV6_TRANSPARENT = 75;
|
||||
pub const IPV6_UNICAST_IF = 76;
|
||||
pub const IPV6_RECVFRAGSIZE = 77;
|
||||
pub const IPV6_FREEBIND = 78;
|
||||
|
||||
pub const MSG_OOB = 0x0001;
|
||||
pub const MSG_PEEK = 0x0002;
|
||||
pub const MSG_DONTROUTE = 0x0004;
|
||||
|
||||
635
lib/std/os/bits/linux/powerpc.zig
Normal file
635
lib/std/os/bits/linux/powerpc.zig
Normal file
@ -0,0 +1,635 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
// Copyright (c) 2015-2021 Zig Contributors
|
||||
// This file is part of [zig](https://ziglang.org/), which is MIT licensed.
|
||||
// The MIT license requires this copyright notice to be included in all copies
|
||||
// and substantial portions of the software.
|
||||
|
||||
const std = @import("../../../std.zig");
|
||||
const linux = std.os.linux;
|
||||
const socklen_t = linux.socklen_t;
|
||||
const iovec = linux.iovec;
|
||||
const iovec_const = linux.iovec_const;
|
||||
const uid_t = linux.uid_t;
|
||||
const gid_t = linux.gid_t;
|
||||
const pid_t = linux.pid_t;
|
||||
const stack_t = linux.stack_t;
|
||||
const sigset_t = linux.sigset_t;
|
||||
pub const SYS = extern enum(usize) {
|
||||
restart_syscall = 0,
|
||||
exit = 1,
|
||||
fork = 2,
|
||||
read = 3,
|
||||
write = 4,
|
||||
open = 5,
|
||||
close = 6,
|
||||
waitpid = 7,
|
||||
creat = 8,
|
||||
link = 9,
|
||||
unlink = 10,
|
||||
execve = 11,
|
||||
chdir = 12,
|
||||
time = 13,
|
||||
mknod = 14,
|
||||
chmod = 15,
|
||||
lchown = 16,
|
||||
@"break" = 17,
|
||||
oldstat = 18,
|
||||
lseek = 19,
|
||||
getpid = 20,
|
||||
mount = 21,
|
||||
umount = 22,
|
||||
setuid = 23,
|
||||
getuid = 24,
|
||||
stime = 25,
|
||||
ptrace = 26,
|
||||
alarm = 27,
|
||||
oldfstat = 28,
|
||||
pause = 29,
|
||||
utime = 30,
|
||||
stty = 31,
|
||||
gtty = 32,
|
||||
access = 33,
|
||||
nice = 34,
|
||||
ftime = 35,
|
||||
sync = 36,
|
||||
kill = 37,
|
||||
rename = 38,
|
||||
mkdir = 39,
|
||||
rmdir = 40,
|
||||
dup = 41,
|
||||
pipe = 42,
|
||||
times = 43,
|
||||
prof = 44,
|
||||
brk = 45,
|
||||
setgid = 46,
|
||||
getgid = 47,
|
||||
signal = 48,
|
||||
geteuid = 49,
|
||||
getegid = 50,
|
||||
acct = 51,
|
||||
umount2 = 52,
|
||||
lock = 53,
|
||||
ioctl = 54,
|
||||
fcntl = 55,
|
||||
mpx = 56,
|
||||
setpgid = 57,
|
||||
ulimit = 58,
|
||||
oldolduname = 59,
|
||||
umask = 60,
|
||||
chroot = 61,
|
||||
ustat = 62,
|
||||
dup2 = 63,
|
||||
getppid = 64,
|
||||
getpgrp = 65,
|
||||
setsid = 66,
|
||||
sigaction = 67,
|
||||
sgetmask = 68,
|
||||
ssetmask = 69,
|
||||
setreuid = 70,
|
||||
setregid = 71,
|
||||
sigsuspend = 72,
|
||||
sigpending = 73,
|
||||
sethostname = 74,
|
||||
setrlimit = 75,
|
||||
getrlimit = 76,
|
||||
getrusage = 77,
|
||||
gettimeofday = 78,
|
||||
settimeofday = 79,
|
||||
getgroups = 80,
|
||||
setgroups = 81,
|
||||
select = 82,
|
||||
symlink = 83,
|
||||
oldlstat = 84,
|
||||
readlink = 85,
|
||||
uselib = 86,
|
||||
swapon = 87,
|
||||
reboot = 88,
|
||||
readdir = 89,
|
||||
mmap = 90,
|
||||
munmap = 91,
|
||||
truncate = 92,
|
||||
ftruncate = 93,
|
||||
fchmod = 94,
|
||||
fchown = 95,
|
||||
getpriority = 96,
|
||||
setpriority = 97,
|
||||
profil = 98,
|
||||
statfs = 99,
|
||||
fstatfs = 100,
|
||||
ioperm = 101,
|
||||
socketcall = 102,
|
||||
syslog = 103,
|
||||
setitimer = 104,
|
||||
getitimer = 105,
|
||||
stat = 106,
|
||||
lstat = 107,
|
||||
fstat = 108,
|
||||
olduname = 109,
|
||||
iopl = 110,
|
||||
vhangup = 111,
|
||||
idle = 112,
|
||||
vm86 = 113,
|
||||
wait4 = 114,
|
||||
swapoff = 115,
|
||||
sysinfo = 116,
|
||||
ipc = 117,
|
||||
fsync = 118,
|
||||
sigreturn = 119,
|
||||
clone = 120,
|
||||
setdomainname = 121,
|
||||
uname = 122,
|
||||
modify_ldt = 123,
|
||||
adjtimex = 124,
|
||||
mprotect = 125,
|
||||
sigprocmask = 126,
|
||||
create_module = 127,
|
||||
init_module = 128,
|
||||
delete_module = 129,
|
||||
get_kernel_syms = 130,
|
||||
quotactl = 131,
|
||||
getpgid = 132,
|
||||
fchdir = 133,
|
||||
bdflush = 134,
|
||||
sysfs = 135,
|
||||
personality = 136,
|
||||
afs_syscall = 137,
|
||||
setfsuid = 138,
|
||||
setfsgid = 139,
|
||||
_llseek = 140,
|
||||
getdents = 141,
|
||||
_newselect = 142,
|
||||
flock = 143,
|
||||
msync = 144,
|
||||
readv = 145,
|
||||
writev = 146,
|
||||
getsid = 147,
|
||||
fdatasync = 148,
|
||||
_sysctl = 149,
|
||||
mlock = 150,
|
||||
munlock = 151,
|
||||
mlockall = 152,
|
||||
munlockall = 153,
|
||||
sched_setparam = 154,
|
||||
sched_getparam = 155,
|
||||
sched_setscheduler = 156,
|
||||
sched_getscheduler = 157,
|
||||
sched_yield = 158,
|
||||
sched_get_priority_max = 159,
|
||||
sched_get_priority_min = 160,
|
||||
sched_rr_get_interval = 161,
|
||||
nanosleep = 162,
|
||||
mremap = 163,
|
||||
setresuid = 164,
|
||||
getresuid = 165,
|
||||
query_module = 166,
|
||||
poll = 167,
|
||||
nfsservctl = 168,
|
||||
setresgid = 169,
|
||||
getresgid = 170,
|
||||
prctl = 171,
|
||||
rt_sigreturn = 172,
|
||||
rt_sigaction = 173,
|
||||
rt_sigprocmask = 174,
|
||||
rt_sigpending = 175,
|
||||
rt_sigtimedwait = 176,
|
||||
rt_sigqueueinfo = 177,
|
||||
rt_sigsuspend = 178,
|
||||
pread64 = 179,
|
||||
pwrite64 = 180,
|
||||
chown = 181,
|
||||
getcwd = 182,
|
||||
capget = 183,
|
||||
capset = 184,
|
||||
sigaltstack = 185,
|
||||
sendfile = 186,
|
||||
getpmsg = 187,
|
||||
putpmsg = 188,
|
||||
vfork = 189,
|
||||
ugetrlimit = 190,
|
||||
readahead = 191,
|
||||
mmap2 = 192,
|
||||
truncate64 = 193,
|
||||
ftruncate64 = 194,
|
||||
stat64 = 195,
|
||||
lstat64 = 196,
|
||||
fstat64 = 197,
|
||||
pciconfig_read = 198,
|
||||
pciconfig_write = 199,
|
||||
pciconfig_iobase = 200,
|
||||
multiplexer = 201,
|
||||
getdents64 = 202,
|
||||
pivot_root = 203,
|
||||
fcntl64 = 204,
|
||||
madvise = 205,
|
||||
mincore = 206,
|
||||
gettid = 207,
|
||||
tkill = 208,
|
||||
setxattr = 209,
|
||||
lsetxattr = 210,
|
||||
fsetxattr = 211,
|
||||
getxattr = 212,
|
||||
lgetxattr = 213,
|
||||
fgetxattr = 214,
|
||||
listxattr = 215,
|
||||
llistxattr = 216,
|
||||
flistxattr = 217,
|
||||
removexattr = 218,
|
||||
lremovexattr = 219,
|
||||
fremovexattr = 220,
|
||||
futex = 221,
|
||||
sched_setaffinity = 222,
|
||||
sched_getaffinity = 223,
|
||||
tuxcall = 225,
|
||||
sendfile64 = 226,
|
||||
io_setup = 227,
|
||||
io_destroy = 228,
|
||||
io_getevents = 229,
|
||||
io_submit = 230,
|
||||
io_cancel = 231,
|
||||
set_tid_address = 232,
|
||||
fadvise64 = 233,
|
||||
exit_group = 234,
|
||||
lookup_dcookie = 235,
|
||||
epoll_create = 236,
|
||||
epoll_ctl = 237,
|
||||
epoll_wait = 238,
|
||||
remap_file_pages = 239,
|
||||
timer_create = 240,
|
||||
timer_settime = 241,
|
||||
timer_gettime = 242,
|
||||
timer_getoverrun = 243,
|
||||
timer_delete = 244,
|
||||
clock_settime = 245,
|
||||
clock_gettime = 246,
|
||||
clock_getres = 247,
|
||||
clock_nanosleep = 248,
|
||||
swapcontext = 249,
|
||||
tgkill = 250,
|
||||
utimes = 251,
|
||||
statfs64 = 252,
|
||||
fstatfs64 = 253,
|
||||
fadvise64_64 = 254,
|
||||
rtas = 255,
|
||||
sys_debug_setcontext = 256,
|
||||
migrate_pages = 258,
|
||||
mbind = 259,
|
||||
get_mempolicy = 260,
|
||||
set_mempolicy = 261,
|
||||
mq_open = 262,
|
||||
mq_unlink = 263,
|
||||
mq_timedsend = 264,
|
||||
mq_timedreceive = 265,
|
||||
mq_notify = 266,
|
||||
mq_getsetattr = 267,
|
||||
kexec_load = 268,
|
||||
add_key = 269,
|
||||
request_key = 270,
|
||||
keyctl = 271,
|
||||
waitid = 272,
|
||||
ioprio_set = 273,
|
||||
ioprio_get = 274,
|
||||
inotify_init = 275,
|
||||
inotify_add_watch = 276,
|
||||
inotify_rm_watch = 277,
|
||||
spu_run = 278,
|
||||
spu_create = 279,
|
||||
pselect6 = 280,
|
||||
ppoll = 281,
|
||||
unshare = 282,
|
||||
splice = 283,
|
||||
tee = 284,
|
||||
vmsplice = 285,
|
||||
openat = 286,
|
||||
mkdirat = 287,
|
||||
mknodat = 288,
|
||||
fchownat = 289,
|
||||
futimesat = 290,
|
||||
fstatat64 = 291,
|
||||
unlinkat = 292,
|
||||
renameat = 293,
|
||||
linkat = 294,
|
||||
symlinkat = 295,
|
||||
readlinkat = 296,
|
||||
fchmodat = 297,
|
||||
faccessat = 298,
|
||||
get_robust_list = 299,
|
||||
set_robust_list = 300,
|
||||
move_pages = 301,
|
||||
getcpu = 302,
|
||||
epoll_pwait = 303,
|
||||
utimensat = 304,
|
||||
signalfd = 305,
|
||||
timerfd_create = 306,
|
||||
eventfd = 307,
|
||||
sync_file_range2 = 308,
|
||||
fallocate = 309,
|
||||
subpage_prot = 310,
|
||||
timerfd_settime = 311,
|
||||
timerfd_gettime = 312,
|
||||
signalfd4 = 313,
|
||||
eventfd2 = 314,
|
||||
epoll_create1 = 315,
|
||||
dup3 = 316,
|
||||
pipe2 = 317,
|
||||
inotify_init1 = 318,
|
||||
perf_event_open = 319,
|
||||
preadv = 320,
|
||||
pwritev = 321,
|
||||
rt_tgsigqueueinfo = 322,
|
||||
fanotify_init = 323,
|
||||
fanotify_mark = 324,
|
||||
prlimit64 = 325,
|
||||
socket = 326,
|
||||
bind = 327,
|
||||
connect = 328,
|
||||
listen = 329,
|
||||
accept = 330,
|
||||
getsockname = 331,
|
||||
getpeername = 332,
|
||||
socketpair = 333,
|
||||
send = 334,
|
||||
sendto = 335,
|
||||
recv = 336,
|
||||
recvfrom = 337,
|
||||
shutdown = 338,
|
||||
setsockopt = 339,
|
||||
getsockopt = 340,
|
||||
sendmsg = 341,
|
||||
recvmsg = 342,
|
||||
recvmmsg = 343,
|
||||
accept4 = 344,
|
||||
name_to_handle_at = 345,
|
||||
open_by_handle_at = 346,
|
||||
clock_adjtime = 347,
|
||||
syncfs = 348,
|
||||
sendmmsg = 349,
|
||||
setns = 350,
|
||||
process_vm_readv = 351,
|
||||
process_vm_writev = 352,
|
||||
finit_module = 353,
|
||||
kcmp = 354,
|
||||
sched_setattr = 355,
|
||||
sched_getattr = 356,
|
||||
renameat2 = 357,
|
||||
seccomp = 358,
|
||||
getrandom = 359,
|
||||
memfd_create = 360,
|
||||
bpf = 361,
|
||||
execveat = 362,
|
||||
switch_endian = 363,
|
||||
userfaultfd = 364,
|
||||
membarrier = 365,
|
||||
mlock2 = 378,
|
||||
copy_file_range = 379,
|
||||
preadv2 = 380,
|
||||
pwritev2 = 381,
|
||||
kexec_file_load = 382,
|
||||
statx = 383,
|
||||
pkey_alloc = 384,
|
||||
pkey_free = 385,
|
||||
pkey_mprotect = 386,
|
||||
rseq = 387,
|
||||
io_pgetevents = 388,
|
||||
semget = 393,
|
||||
semctl = 394,
|
||||
shmget = 395,
|
||||
shmctl = 396,
|
||||
shmat = 397,
|
||||
shmdt = 398,
|
||||
msgget = 399,
|
||||
msgsnd = 400,
|
||||
msgrcv = 401,
|
||||
msgctl = 402,
|
||||
clock_gettime64 = 403,
|
||||
clock_settime64 = 404,
|
||||
clock_adjtime64 = 405,
|
||||
clock_getres_time64 = 406,
|
||||
clock_nanosleep_time64 = 407,
|
||||
timer_gettime64 = 408,
|
||||
timer_settime64 = 409,
|
||||
timerfd_gettime64 = 410,
|
||||
timerfd_settime64 = 411,
|
||||
utimensat_time64 = 412,
|
||||
pselect6_time64 = 413,
|
||||
ppoll_time64 = 414,
|
||||
io_pgetevents_time64 = 416,
|
||||
recvmmsg_time64 = 417,
|
||||
mq_timedsend_time64 = 418,
|
||||
mq_timedreceive_time64 = 419,
|
||||
semtimedop_time64 = 420,
|
||||
rt_sigtimedwait_time64 = 421,
|
||||
futex_time64 = 422,
|
||||
sched_rr_get_interval_time64 = 423,
|
||||
pidfd_send_signal = 424,
|
||||
io_uring_setup = 425,
|
||||
io_uring_enter = 426,
|
||||
io_uring_register = 427,
|
||||
open_tree = 428,
|
||||
move_mount = 429,
|
||||
fsopen = 430,
|
||||
fsconfig = 431,
|
||||
fsmount = 432,
|
||||
fspick = 433,
|
||||
pidfd_open = 434,
|
||||
clone3 = 435,
|
||||
close_range = 436,
|
||||
openat2 = 437,
|
||||
pidfd_getfd = 438,
|
||||
faccessat2 = 439,
|
||||
process_madvise = 440,
|
||||
};
|
||||
|
||||
pub const O_CREAT = 0o100;
|
||||
pub const O_EXCL = 0o200;
|
||||
pub const O_NOCTTY = 0o400;
|
||||
pub const O_TRUNC = 0o1000;
|
||||
pub const O_APPEND = 0o2000;
|
||||
pub const O_NONBLOCK = 0o4000;
|
||||
pub const O_DSYNC = 0o10000;
|
||||
pub const O_SYNC = 0o4010000;
|
||||
pub const O_RSYNC = 0o4010000;
|
||||
pub const O_DIRECTORY = 0o40000;
|
||||
pub const O_NOFOLLOW = 0o100000;
|
||||
pub const O_CLOEXEC = 0o2000000;
|
||||
|
||||
pub const O_ASYNC = 0o20000;
|
||||
pub const O_DIRECT = 0o400000;
|
||||
pub const O_LARGEFILE = 0o200000;
|
||||
pub const O_NOATIME = 0o1000000;
|
||||
pub const O_PATH = 0o10000000;
|
||||
pub const O_TMPFILE = 0o20040000;
|
||||
pub const O_NDELAY = O_NONBLOCK;
|
||||
|
||||
pub const F_DUPFD = 0;
|
||||
pub const F_GETFD = 1;
|
||||
pub const F_SETFD = 2;
|
||||
pub const F_GETFL = 3;
|
||||
pub const F_SETFL = 4;
|
||||
|
||||
pub const F_SETOWN = 8;
|
||||
pub const F_GETOWN = 9;
|
||||
pub const F_SETSIG = 10;
|
||||
pub const F_GETSIG = 11;
|
||||
|
||||
pub const F_GETLK = 12;
|
||||
pub const F_SETLK = 13;
|
||||
pub const F_SETLKW = 14;
|
||||
|
||||
pub const F_SETOWN_EX = 15;
|
||||
pub const F_GETOWN_EX = 16;
|
||||
|
||||
pub const F_GETOWNER_UIDS = 17;
|
||||
|
||||
pub const F_RDLCK = 0;
|
||||
pub const F_WRLCK = 1;
|
||||
pub const F_UNLCK = 2;
|
||||
|
||||
pub const LOCK_SH = 1;
|
||||
pub const LOCK_EX = 2;
|
||||
pub const LOCK_UN = 8;
|
||||
pub const LOCK_NB = 4;
|
||||
|
||||
/// stack-like segment
|
||||
pub const MAP_GROWSDOWN = 0x0100;
|
||||
|
||||
/// ETXTBSY
|
||||
pub const MAP_DENYWRITE = 0x0800;
|
||||
|
||||
/// mark it as an executable
|
||||
pub const MAP_EXECUTABLE = 0x1000;
|
||||
|
||||
/// pages are locked
|
||||
pub const MAP_LOCKED = 0x0080;
|
||||
|
||||
/// don't check for reservations
|
||||
pub const MAP_NORESERVE = 0x0040;
|
||||
|
||||
pub const VDSO_CGT_SYM = "__kernel_clock_gettime";
|
||||
pub const VDSO_CGT_VER = "LINUX_2.6.15";
|
||||
|
||||
pub const Flock = extern struct {
|
||||
l_type: i16,
|
||||
l_whence: i16,
|
||||
l_start: off_t,
|
||||
l_len: off_t,
|
||||
l_pid: pid_t,
|
||||
};
|
||||
|
||||
pub const msghdr = extern struct {
|
||||
msg_name: ?*sockaddr,
|
||||
msg_namelen: socklen_t,
|
||||
msg_iov: [*]iovec,
|
||||
msg_iovlen: usize,
|
||||
msg_control: ?*c_void,
|
||||
msg_controllen: socklen_t,
|
||||
msg_flags: i32,
|
||||
};
|
||||
|
||||
pub const msghdr_const = extern struct {
|
||||
msg_name: ?*const sockaddr,
|
||||
msg_namelen: socklen_t,
|
||||
msg_iov: [*]iovec_const,
|
||||
msg_iovlen: usize,
|
||||
msg_control: ?*c_void,
|
||||
msg_controllen: socklen_t,
|
||||
msg_flags: i32,
|
||||
};
|
||||
|
||||
pub const blksize_t = i32;
|
||||
pub const nlink_t = u32;
|
||||
pub const time_t = isize;
|
||||
pub const mode_t = u32;
|
||||
pub const off_t = i64;
|
||||
pub const ino_t = u64;
|
||||
pub const dev_t = u64;
|
||||
pub const blkcnt_t = i64;
|
||||
|
||||
// The `stat` definition used by the Linux kernel.
|
||||
pub const kernel_stat = extern struct {
|
||||
dev: dev_t,
|
||||
ino: ino_t,
|
||||
mode: mode_t,
|
||||
nlink: nlink_t,
|
||||
uid: uid_t,
|
||||
gid: gid_t,
|
||||
rdev: dev_t,
|
||||
__rdev_padding: i16,
|
||||
size: off_t,
|
||||
blksize: blksize_t,
|
||||
blocks: blkcnt_t,
|
||||
__atim32: timespec32,
|
||||
__mtim32: timespec32,
|
||||
__ctim32: timespec32,
|
||||
__unused: [2]u32,
|
||||
atim: timespec,
|
||||
mtim: timespec,
|
||||
ctim: timespec,
|
||||
|
||||
const timespec32 = extern struct {
|
||||
tv_sec: i32,
|
||||
tv_nsec: i32,
|
||||
};
|
||||
|
||||
pub fn atime(self: @This()) timespec {
|
||||
return self.atim;
|
||||
}
|
||||
|
||||
pub fn mtime(self: @This()) timespec {
|
||||
return self.mtim;
|
||||
}
|
||||
|
||||
pub fn ctime(self: @This()) timespec {
|
||||
return self.ctim;
|
||||
}
|
||||
};
|
||||
|
||||
// The `stat64` definition used by the libc.
|
||||
pub const libc_stat = kernel_stat;
|
||||
|
||||
pub const timespec = extern struct {
|
||||
tv_sec: time_t,
|
||||
tv_nsec: isize,
|
||||
};
|
||||
|
||||
pub const timeval = extern struct {
|
||||
tv_sec: time_t,
|
||||
tv_usec: isize,
|
||||
};
|
||||
|
||||
pub const timezone = extern struct {
|
||||
tz_minuteswest: i32,
|
||||
tz_dsttime: i32,
|
||||
};
|
||||
|
||||
pub const greg_t = u32;
|
||||
pub const gregset_t = [48]greg_t;
|
||||
pub const fpregset_t = [33]f64;
|
||||
|
||||
pub const vrregset = extern struct {
|
||||
vrregs: [32][4]u32,
|
||||
vrsave: u32,
|
||||
_pad: [2]u32,
|
||||
vscr: u32,
|
||||
};
|
||||
pub const vrregset_t = vrregset;
|
||||
|
||||
pub const mcontext_t = extern struct {
|
||||
gp_regs: gregset_t,
|
||||
fp_regs: fpregset_t,
|
||||
v_regs: vrregset_t align(16),
|
||||
};
|
||||
|
||||
pub const ucontext_t = extern struct {
|
||||
flags: u32,
|
||||
link: *ucontext_t,
|
||||
stack: stack_t,
|
||||
pad: [7]i32,
|
||||
regs: *mcontext_t,
|
||||
sigmask: sigset_t,
|
||||
pad2: [3]i32,
|
||||
mcontext: mcontext_t,
|
||||
};
|
||||
|
||||
pub const Elf_Symndx = u32;
|
||||
|
||||
pub const MMAP2_UNIT = 4096;
|
||||
@ -813,10 +813,6 @@ pub const sigset_t = extern struct {
|
||||
__bits: [_SIG_WORDS]u32,
|
||||
};
|
||||
|
||||
pub const SIG_ERR = @intToPtr(?Sigaction.sigaction_fn, maxInt(usize));
|
||||
pub const SIG_DFL = @intToPtr(?Sigaction.sigaction_fn, 0);
|
||||
pub const SIG_IGN = @intToPtr(?Sigaction.sigaction_fn, 1);
|
||||
|
||||
pub const empty_sigset = sigset_t{ .__bits = [_]u32{0} ** _SIG_WORDS };
|
||||
|
||||
// XXX x86_64 specific
|
||||
@ -1219,3 +1215,25 @@ pub const rlimit = extern struct {
|
||||
pub const SHUT_RD = 0;
|
||||
pub const SHUT_WR = 1;
|
||||
pub const SHUT_RDWR = 2;
|
||||
|
||||
pub const nfds_t = u32;
|
||||
|
||||
pub const pollfd = extern struct {
|
||||
fd: fd_t,
|
||||
events: i16,
|
||||
revents: i16,
|
||||
};
|
||||
|
||||
/// Testable events (may be specified in events field).
|
||||
pub const POLLIN = 0x0001;
|
||||
pub const POLLPRI = 0x0002;
|
||||
pub const POLLOUT = 0x0004;
|
||||
pub const POLLRDNORM = 0x0040;
|
||||
pub const POLLWRNORM = POLLOUT;
|
||||
pub const POLLRDBAND = 0x0080;
|
||||
pub const POLLWRBAND = 0x0100;
|
||||
|
||||
/// Non-testable events (may not be specified in events field).
|
||||
pub const POLLERR = 0x0008;
|
||||
pub const POLLHUP = 0x0010;
|
||||
pub const POLLNVAL = 0x0020;
|
||||
|
||||
@ -26,6 +26,7 @@ pub usingnamespace switch (builtin.arch) {
|
||||
.riscv64 => @import("linux/riscv64.zig"),
|
||||
.sparcv9 => @import("linux/sparc64.zig"),
|
||||
.mips, .mipsel => @import("linux/mips.zig"),
|
||||
.powerpc => @import("linux/powerpc.zig"),
|
||||
.powerpc64, .powerpc64le => @import("linux/powerpc64.zig"),
|
||||
else => struct {},
|
||||
};
|
||||
|
||||
@ -1354,7 +1354,7 @@ test "timeout (after a relative time)" {
|
||||
.flags = 0,
|
||||
}, cqe);
|
||||
|
||||
// Tests should not depend on timings: skip test (result) if outside margin.
|
||||
// Tests should not depend on timings: skip test if outside margin.
|
||||
if (!std.math.approxEqAbs(f64, ms, @intToFloat(f64, stopped - started), margin)) return error.SkipZigTest;
|
||||
}
|
||||
|
||||
|
||||
133
lib/std/os/linux/powerpc.zig
Normal file
133
lib/std/os/linux/powerpc.zig
Normal file
@ -0,0 +1,133 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
// Copyright (c) 2015-2021 Zig Contributors
|
||||
// This file is part of [zig](https://ziglang.org/), which is MIT licensed.
|
||||
// The MIT license requires this copyright notice to be included in all copies
|
||||
// and substantial portions of the software.
|
||||
|
||||
usingnamespace @import("../bits.zig");
|
||||
|
||||
pub fn syscall0(number: SYS) usize {
|
||||
return asm volatile (
|
||||
\\ sc
|
||||
\\ bns+ 1f
|
||||
\\ neg 3, 3
|
||||
\\ 1:
|
||||
: [ret] "={r3}" (-> usize)
|
||||
: [number] "{r0}" (@enumToInt(number))
|
||||
: "memory", "cr0", "r4", "r5", "r6", "r7", "r8", "r9", "r10", "r11", "r12"
|
||||
);
|
||||
}
|
||||
|
||||
pub fn syscall1(number: SYS, arg1: usize) usize {
|
||||
return asm volatile (
|
||||
\\ sc
|
||||
\\ bns+ 1f
|
||||
\\ neg 3, 3
|
||||
\\ 1:
|
||||
: [ret] "={r3}" (-> usize)
|
||||
: [number] "{r0}" (@enumToInt(number)),
|
||||
[arg1] "{r3}" (arg1)
|
||||
: "memory", "cr0", "r4", "r5", "r6", "r7", "r8", "r9", "r10", "r11", "r12"
|
||||
);
|
||||
}
|
||||
|
||||
pub fn syscall2(number: SYS, arg1: usize, arg2: usize) usize {
|
||||
return asm volatile (
|
||||
\\ sc
|
||||
\\ bns+ 1f
|
||||
\\ neg 3, 3
|
||||
\\ 1:
|
||||
: [ret] "={r3}" (-> usize)
|
||||
: [number] "{r0}" (@enumToInt(number)),
|
||||
[arg1] "{r3}" (arg1),
|
||||
[arg2] "{r4}" (arg2)
|
||||
: "memory", "cr0", "r4", "r5", "r6", "r7", "r8", "r9", "r10", "r11", "r12"
|
||||
);
|
||||
}
|
||||
|
||||
pub fn syscall3(number: SYS, arg1: usize, arg2: usize, arg3: usize) usize {
|
||||
return asm volatile (
|
||||
\\ sc
|
||||
\\ bns+ 1f
|
||||
\\ neg 3, 3
|
||||
\\ 1:
|
||||
: [ret] "={r3}" (-> usize)
|
||||
: [number] "{r0}" (@enumToInt(number)),
|
||||
[arg1] "{r3}" (arg1),
|
||||
[arg2] "{r4}" (arg2),
|
||||
[arg3] "{r5}" (arg3)
|
||||
: "memory", "cr0", "r4", "r5", "r6", "r7", "r8", "r9", "r10", "r11", "r12"
|
||||
);
|
||||
}
|
||||
|
||||
pub fn syscall4(number: SYS, arg1: usize, arg2: usize, arg3: usize, arg4: usize) usize {
|
||||
return asm volatile (
|
||||
\\ sc
|
||||
\\ bns+ 1f
|
||||
\\ neg 3, 3
|
||||
\\ 1:
|
||||
: [ret] "={r3}" (-> usize)
|
||||
: [number] "{r0}" (@enumToInt(number)),
|
||||
[arg1] "{r3}" (arg1),
|
||||
[arg2] "{r4}" (arg2),
|
||||
[arg3] "{r5}" (arg3),
|
||||
[arg4] "{r6}" (arg4)
|
||||
: "memory", "cr0", "r4", "r5", "r6", "r7", "r8", "r9", "r10", "r11", "r12"
|
||||
);
|
||||
}
|
||||
|
||||
pub fn syscall5(number: SYS, arg1: usize, arg2: usize, arg3: usize, arg4: usize, arg5: usize) usize {
|
||||
return asm volatile (
|
||||
\\ sc
|
||||
\\ bns+ 1f
|
||||
\\ neg 3, 3
|
||||
\\ 1:
|
||||
: [ret] "={r3}" (-> usize)
|
||||
: [number] "{r0}" (@enumToInt(number)),
|
||||
[arg1] "{r3}" (arg1),
|
||||
[arg2] "{r4}" (arg2),
|
||||
[arg3] "{r5}" (arg3),
|
||||
[arg4] "{r6}" (arg4),
|
||||
[arg5] "{r7}" (arg5)
|
||||
: "memory", "cr0", "r4", "r5", "r6", "r7", "r8", "r9", "r10", "r11", "r12"
|
||||
);
|
||||
}
|
||||
|
||||
pub fn syscall6(
|
||||
number: SYS,
|
||||
arg1: usize,
|
||||
arg2: usize,
|
||||
arg3: usize,
|
||||
arg4: usize,
|
||||
arg5: usize,
|
||||
arg6: usize,
|
||||
) usize {
|
||||
return asm volatile (
|
||||
\\ sc
|
||||
\\ bns+ 1f
|
||||
\\ neg 3, 3
|
||||
\\ 1:
|
||||
: [ret] "={r3}" (-> usize)
|
||||
: [number] "{r0}" (@enumToInt(number)),
|
||||
[arg1] "{r3}" (arg1),
|
||||
[arg2] "{r4}" (arg2),
|
||||
[arg3] "{r5}" (arg3),
|
||||
[arg4] "{r6}" (arg4),
|
||||
[arg5] "{r7}" (arg5),
|
||||
[arg6] "{r8}" (arg6)
|
||||
: "memory", "cr0", "r4", "r5", "r6", "r7", "r8", "r9", "r10", "r11", "r12"
|
||||
);
|
||||
}
|
||||
|
||||
/// This matches the libc clone function.
|
||||
pub extern fn clone(func: fn (arg: usize) callconv(.C) u8, stack: usize, flags: usize, arg: usize, ptid: *i32, tls: usize, ctid: *i32) usize;
|
||||
|
||||
pub const restore = restore_rt;
|
||||
|
||||
pub fn restore_rt() callconv(.Naked) void {
|
||||
return asm volatile ("sc"
|
||||
:
|
||||
: [number] "{r0}" (@enumToInt(SYS.rt_sigreturn))
|
||||
: "memory", "cr0", "r4", "r5", "r6", "r7", "r8", "r9", "r10", "r11", "r12"
|
||||
);
|
||||
}
|
||||
@ -491,6 +491,71 @@ fn clone() callconv(.Naked) void {
|
||||
\\ syscall
|
||||
);
|
||||
},
|
||||
.powerpc => {
|
||||
// __clone(func, stack, flags, arg, ptid, tls, ctid)
|
||||
// 3, 4, 5, 6, 7, 8, 9
|
||||
|
||||
// syscall(SYS_clone, flags, stack, ptid, tls, ctid)
|
||||
// 0 3, 4, 5, 6, 7
|
||||
asm volatile (
|
||||
\\# store non-volatile regs r30, r31 on stack in order to put our
|
||||
\\# start func and its arg there
|
||||
\\stwu 30, -16(1)
|
||||
\\stw 31, 4(1)
|
||||
\\
|
||||
\\# save r3 (func) into r30, and r6(arg) into r31
|
||||
\\mr 30, 3
|
||||
\\mr 31, 6
|
||||
\\
|
||||
\\# create initial stack frame for new thread
|
||||
\\clrrwi 4, 4, 4
|
||||
\\li 0, 0
|
||||
\\stwu 0, -16(4)
|
||||
\\
|
||||
\\#move c into first arg
|
||||
\\mr 3, 5
|
||||
\\#mr 4, 4
|
||||
\\mr 5, 7
|
||||
\\mr 6, 8
|
||||
\\mr 7, 9
|
||||
\\
|
||||
\\# move syscall number into r0
|
||||
\\li 0, 120
|
||||
\\
|
||||
\\sc
|
||||
\\
|
||||
\\# check for syscall error
|
||||
\\bns+ 1f # jump to label 1 if no summary overflow.
|
||||
\\#else
|
||||
\\neg 3, 3 #negate the result (errno)
|
||||
\\1:
|
||||
\\# compare sc result with 0
|
||||
\\cmpwi cr7, 3, 0
|
||||
\\
|
||||
\\# if not 0, jump to end
|
||||
\\bne cr7, 2f
|
||||
\\
|
||||
\\#else: we're the child
|
||||
\\#call funcptr: move arg (d) into r3
|
||||
\\mr 3, 31
|
||||
\\#move r30 (funcptr) into CTR reg
|
||||
\\mtctr 30
|
||||
\\# call CTR reg
|
||||
\\bctrl
|
||||
\\# mov SYS_exit into r0 (the exit param is already in r3)
|
||||
\\li 0, 1
|
||||
\\sc
|
||||
\\
|
||||
\\2:
|
||||
\\
|
||||
\\# restore stack
|
||||
\\lwz 30, 0(1)
|
||||
\\lwz 31, 4(1)
|
||||
\\addi 1, 1, 16
|
||||
\\
|
||||
\\blr
|
||||
);
|
||||
},
|
||||
.powerpc64, .powerpc64le => {
|
||||
// __clone(func, stack, flags, arg, ptid, tls, ctid)
|
||||
// 3, 4, 5, 6, 7, 8, 9
|
||||
|
||||
@ -43,6 +43,8 @@
|
||||
|
||||
/* layout */
|
||||
.canvas {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
width: 100vw;
|
||||
height: 100vh;
|
||||
overflow: hidden;
|
||||
@ -53,12 +55,21 @@
|
||||
background-color: var(--bg-color);
|
||||
}
|
||||
|
||||
.banner {
|
||||
background-color: darkred;
|
||||
text-align: center;
|
||||
color: white;
|
||||
padding: 15px 5px;
|
||||
}
|
||||
|
||||
.banner a {
|
||||
color: bisque;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.flex-main {
|
||||
display: flex;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
justify-content: center;
|
||||
|
||||
overflow-y: hidden;
|
||||
z-index: 100;
|
||||
}
|
||||
|
||||
@ -515,7 +526,7 @@
|
||||
</style>
|
||||
</head>
|
||||
<body class="canvas">
|
||||
<div style="background-color: darkred; width: 100vw; text-align: center; color: white; padding: 15px 5px;">These docs are experimental. <a style="color: bisque;text-decoration: underline;" href="https://kristoff.it/blog/zig-new-relationship-llvm/">Progress depends on the self-hosted compiler</a>, <a style="color: bisque;text-decoration: underline;" href="https://github.com/ziglang/zig/wiki/How-to-read-the-standard-library-source-code">consider reading the stlib source in the meantime</a>.</div>
|
||||
<div class="banner">These docs are experimental. <a href="https://kristoff.it/blog/zig-new-relationship-llvm/">Progress depends on the self-hosted compiler</a>, <a href="https://github.com/ziglang/zig/wiki/How-to-read-the-standard-library-source-code">consider reading the stdlib source in the meantime</a>.</div>
|
||||
<div class="flex-main">
|
||||
<div class="flex-filler"></div>
|
||||
<div class="flex-left sidebar">
|
||||
|
||||
@ -1844,7 +1844,13 @@
|
||||
var oldHash = location.hash;
|
||||
var parts = oldHash.split("?");
|
||||
var newPart2 = (domSearch.value === "") ? "" : ("?" + domSearch.value);
|
||||
location.hash = (parts.length === 1) ? (oldHash + newPart2) : (parts[0] + newPart2);
|
||||
var newHash = (oldHash === "" ? "#" : parts[0]) + newPart2;
|
||||
// create a history entry only once per search
|
||||
if (parts.length === 1) {
|
||||
location.assign(newHash);
|
||||
} else {
|
||||
location.replace(newHash);
|
||||
}
|
||||
}
|
||||
function getSearchTerms() {
|
||||
var list = curNavSearch.trim().split(/[ \r\n\t]+/);
|
||||
|
||||
@ -7,7 +7,7 @@
|
||||
|
||||
const root = @import("root");
|
||||
const std = @import("std.zig");
|
||||
const builtin = std.builtin;
|
||||
const builtin = @import("builtin");
|
||||
const assert = std.debug.assert;
|
||||
const uefi = std.os.uefi;
|
||||
const tlcsprng = @import("crypto/tlcsprng.zig");
|
||||
@ -17,39 +17,101 @@ var argc_argv_ptr: [*]usize = undefined;
|
||||
const start_sym_name = if (builtin.arch.isMIPS()) "__start" else "_start";
|
||||
|
||||
comptime {
|
||||
if (builtin.output_mode == .Lib and builtin.link_mode == .Dynamic) {
|
||||
if (builtin.os.tag == .windows and !@hasDecl(root, "_DllMainCRTStartup")) {
|
||||
@export(_DllMainCRTStartup, .{ .name = "_DllMainCRTStartup" });
|
||||
// The self-hosted compiler is not fully capable of handling all of this start.zig file.
|
||||
// Until then, we have simplified logic here for self-hosted. TODO remove this once
|
||||
// self-hosted is capable enough to handle all of the real start.zig logic.
|
||||
if (builtin.zig_is_stage2) {
|
||||
if (builtin.output_mode == .Exe) {
|
||||
if (builtin.link_libc or builtin.object_format == .c) {
|
||||
if (!@hasDecl(root, "main")) {
|
||||
@export(main2, "main");
|
||||
}
|
||||
} else {
|
||||
if (!@hasDecl(root, "_start")) {
|
||||
@export(_start2, "_start");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (builtin.output_mode == .Exe or @hasDecl(root, "main")) {
|
||||
if (builtin.link_libc and @hasDecl(root, "main")) {
|
||||
if (@typeInfo(@TypeOf(root.main)).Fn.calling_convention != .C) {
|
||||
@export(main, .{ .name = "main", .linkage = .Weak });
|
||||
} else {
|
||||
if (builtin.output_mode == .Lib and builtin.link_mode == .Dynamic) {
|
||||
if (builtin.os.tag == .windows and !@hasDecl(root, "_DllMainCRTStartup")) {
|
||||
@export(_DllMainCRTStartup, .{ .name = "_DllMainCRTStartup" });
|
||||
}
|
||||
} else if (builtin.os.tag == .windows) {
|
||||
if (!@hasDecl(root, "WinMain") and !@hasDecl(root, "WinMainCRTStartup") and
|
||||
!@hasDecl(root, "wWinMain") and !@hasDecl(root, "wWinMainCRTStartup"))
|
||||
{
|
||||
@export(WinStartup, .{ .name = "wWinMainCRTStartup" });
|
||||
} else if (@hasDecl(root, "WinMain") and !@hasDecl(root, "WinMainCRTStartup") and
|
||||
!@hasDecl(root, "wWinMain") and !@hasDecl(root, "wWinMainCRTStartup"))
|
||||
{
|
||||
@compileError("WinMain not supported; declare wWinMain or main instead");
|
||||
} else if (@hasDecl(root, "wWinMain") and !@hasDecl(root, "wWinMainCRTStartup") and
|
||||
!@hasDecl(root, "WinMain") and !@hasDecl(root, "WinMainCRTStartup"))
|
||||
{
|
||||
@export(wWinMainCRTStartup, .{ .name = "wWinMainCRTStartup" });
|
||||
} else if (builtin.output_mode == .Exe or @hasDecl(root, "main")) {
|
||||
if (builtin.link_libc and @hasDecl(root, "main")) {
|
||||
if (@typeInfo(@TypeOf(root.main)).Fn.calling_convention != .C) {
|
||||
@export(main, .{ .name = "main", .linkage = .Weak });
|
||||
}
|
||||
} else if (builtin.os.tag == .windows) {
|
||||
if (!@hasDecl(root, "WinMain") and !@hasDecl(root, "WinMainCRTStartup") and
|
||||
!@hasDecl(root, "wWinMain") and !@hasDecl(root, "wWinMainCRTStartup"))
|
||||
{
|
||||
@export(WinStartup, .{ .name = "wWinMainCRTStartup" });
|
||||
} else if (@hasDecl(root, "WinMain") and !@hasDecl(root, "WinMainCRTStartup") and
|
||||
!@hasDecl(root, "wWinMain") and !@hasDecl(root, "wWinMainCRTStartup"))
|
||||
{
|
||||
@compileError("WinMain not supported; declare wWinMain or main instead");
|
||||
} else if (@hasDecl(root, "wWinMain") and !@hasDecl(root, "wWinMainCRTStartup") and
|
||||
!@hasDecl(root, "WinMain") and !@hasDecl(root, "WinMainCRTStartup"))
|
||||
{
|
||||
@export(wWinMainCRTStartup, .{ .name = "wWinMainCRTStartup" });
|
||||
}
|
||||
} else if (builtin.os.tag == .uefi) {
|
||||
if (!@hasDecl(root, "EfiMain")) @export(EfiMain, .{ .name = "EfiMain" });
|
||||
} else if (builtin.arch.isWasm() and builtin.os.tag == .freestanding) {
|
||||
if (!@hasDecl(root, start_sym_name)) @export(wasm_freestanding_start, .{ .name = start_sym_name });
|
||||
} else if (builtin.os.tag != .other and builtin.os.tag != .freestanding) {
|
||||
if (!@hasDecl(root, start_sym_name)) @export(_start, .{ .name = start_sym_name });
|
||||
}
|
||||
} else if (builtin.os.tag == .uefi) {
|
||||
if (!@hasDecl(root, "EfiMain")) @export(EfiMain, .{ .name = "EfiMain" });
|
||||
} else if (builtin.arch.isWasm() and builtin.os.tag == .freestanding) {
|
||||
if (!@hasDecl(root, start_sym_name)) @export(wasm_freestanding_start, .{ .name = start_sym_name });
|
||||
} else if (builtin.os.tag != .other and builtin.os.tag != .freestanding) {
|
||||
if (!@hasDecl(root, start_sym_name)) @export(_start, .{ .name = start_sym_name });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Simplified start code for stage2 until it supports more language features ///
|
||||
|
||||
fn main2() callconv(.C) c_int {
|
||||
root.main();
|
||||
return 0;
|
||||
}
|
||||
|
||||
fn _start2() callconv(.Naked) noreturn {
|
||||
root.main();
|
||||
exit2(0);
|
||||
}
|
||||
|
||||
fn exit2(code: u8) noreturn {
|
||||
switch (builtin.arch) {
|
||||
.x86_64 => {
|
||||
asm volatile ("syscall"
|
||||
:
|
||||
: [number] "{rax}" (231),
|
||||
[arg1] "{rdi}" (code)
|
||||
: "rcx", "r11", "memory"
|
||||
);
|
||||
},
|
||||
.arm => {
|
||||
asm volatile ("svc #0"
|
||||
:
|
||||
: [number] "{r7}" (1),
|
||||
[arg1] "{r0}" (code)
|
||||
: "memory"
|
||||
);
|
||||
},
|
||||
.aarch64 => {
|
||||
asm volatile ("svc #0"
|
||||
:
|
||||
: [number] "{x8}" (93),
|
||||
[arg1] "{x0}" (code)
|
||||
: "memory", "cc"
|
||||
);
|
||||
},
|
||||
else => @compileError("TODO"),
|
||||
}
|
||||
unreachable;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
fn _DllMainCRTStartup(
|
||||
hinstDLL: std.os.windows.HINSTANCE,
|
||||
fdwReason: std.os.windows.DWORD,
|
||||
@ -135,6 +197,19 @@ fn _start() callconv(.Naked) noreturn {
|
||||
: [argc] "={sp}" (-> [*]usize)
|
||||
);
|
||||
},
|
||||
.powerpc => {
|
||||
// Setup the initial stack frame and clear the back chain pointer.
|
||||
argc_argv_ptr = asm volatile (
|
||||
\\ mr 4, 1
|
||||
\\ li 0, 0
|
||||
\\ stwu 1,-16(1)
|
||||
\\ stw 0, 0(1)
|
||||
\\ mtlr 0
|
||||
: [argc] "={r4}" (-> [*]usize)
|
||||
:
|
||||
: "r0"
|
||||
);
|
||||
},
|
||||
.powerpc64le => {
|
||||
// Setup the initial stack frame and clear the back chain pointer.
|
||||
// TODO: Support powerpc64 (big endian) on ELFv2.
|
||||
|
||||
@ -92,7 +92,7 @@ pub const zig = @import("zig.zig");
|
||||
pub const start = @import("start.zig");
|
||||
|
||||
// This forces the start.zig file to be imported, and the comptime logic inside that
|
||||
// file decides whether to export any appropriate start symbols.
|
||||
// file decides whether to export any appropriate start symbols, and call main.
|
||||
comptime {
|
||||
_ = start;
|
||||
}
|
||||
|
||||
@ -1178,7 +1178,7 @@ pub const Target = struct {
|
||||
return switch (arch) {
|
||||
.arm, .armeb, .thumb, .thumbeb => &arm.cpu.generic,
|
||||
.aarch64, .aarch64_be, .aarch64_32 => &aarch64.cpu.generic,
|
||||
.avr => &avr.cpu.avr1,
|
||||
.avr => &avr.cpu.avr2,
|
||||
.bpfel, .bpfeb => &bpf.cpu.generic,
|
||||
.hexagon => &hexagon.cpu.generic,
|
||||
.mips, .mipsel => &mips.cpu.mips32,
|
||||
|
||||
@ -431,7 +431,7 @@ fn printIndicatorLine(source: []const u8, indicator_index: usize) void {
|
||||
|
||||
fn printWithVisibleNewlines(source: []const u8) void {
|
||||
var i: usize = 0;
|
||||
while (std.mem.indexOf(u8, source[i..], "\n")) |nl| : (i += nl + 1) {
|
||||
while (std.mem.indexOfScalar(u8, source[i..], '\n')) |nl| : (i += nl + 1) {
|
||||
printLine(source[i .. i + nl]);
|
||||
}
|
||||
print("{s}␃\n", .{source[i..]}); // End of Text symbol (ETX)
|
||||
@ -439,7 +439,7 @@ fn printWithVisibleNewlines(source: []const u8) void {
|
||||
|
||||
fn printLine(line: []const u8) void {
|
||||
if (line.len != 0) switch (line[line.len - 1]) {
|
||||
' ', '\t' => print("{s}⏎\n", .{line}), // Carriage return symbol,
|
||||
' ', '\t' => return print("{s}⏎\n", .{line}), // Carriage return symbol,
|
||||
else => {},
|
||||
};
|
||||
print("{s}\n", .{line});
|
||||
@ -451,7 +451,7 @@ test {
|
||||
|
||||
/// Given a type, reference all the declarations inside, so that the semantic analyzer sees them.
|
||||
pub fn refAllDecls(comptime T: type) void {
|
||||
if (!@import("builtin").is_test) return;
|
||||
if (!std.builtin.is_test) return;
|
||||
inline for (std.meta.declarations(T)) |decl| {
|
||||
_ = decl;
|
||||
}
|
||||
|
||||
@ -271,7 +271,9 @@ test "timestamp" {
|
||||
sleep(ns_per_ms);
|
||||
const time_1 = milliTimestamp();
|
||||
const interval = time_1 - time_0;
|
||||
testing.expect(interval > 0 and interval < margin);
|
||||
testing.expect(interval > 0);
|
||||
// Tests should not depend on timings: skip test if outside margin.
|
||||
if (!(interval < margin)) return error.SkipZigTest;
|
||||
}
|
||||
|
||||
test "Timer" {
|
||||
@ -280,7 +282,9 @@ test "Timer" {
|
||||
var timer = try Timer.start();
|
||||
sleep(10 * ns_per_ms);
|
||||
const time_0 = timer.read();
|
||||
testing.expect(time_0 > 0 and time_0 < margin);
|
||||
testing.expect(time_0 > 0);
|
||||
// Tests should not depend on timings: skip test if outside margin.
|
||||
if (!(time_0 < margin)) return error.SkipZigTest;
|
||||
|
||||
const time_1 = timer.lap();
|
||||
testing.expect(time_1 >= time_0);
|
||||
|
||||
@ -5,6 +5,8 @@
|
||||
// and substantial portions of the software.
|
||||
const testing = @import("std.zig").testing;
|
||||
|
||||
// TODO: Add support for multi-byte ops (e.g. table operations)
|
||||
|
||||
/// Wasm instruction opcodes
|
||||
///
|
||||
/// All instructions are defined as per spec:
|
||||
@ -175,7 +177,7 @@ pub const Opcode = enum(u8) {
|
||||
i32_reinterpret_f32 = 0xBC,
|
||||
i64_reinterpret_f64 = 0xBD,
|
||||
f32_reinterpret_i32 = 0xBE,
|
||||
i64_reinterpret_i64 = 0xBF,
|
||||
f64_reinterpret_i64 = 0xBF,
|
||||
i32_extend8_s = 0xC0,
|
||||
i32_extend16_s = 0xC1,
|
||||
i64_extend8_s = 0xC2,
|
||||
@ -278,3 +280,6 @@ pub const block_empty: u8 = 0x40;
|
||||
// binary constants
|
||||
pub const magic = [_]u8{ 0x00, 0x61, 0x73, 0x6D }; // \0asm
|
||||
pub const version = [_]u8{ 0x01, 0x00, 0x00, 0x00 }; // version 1
|
||||
|
||||
// Each wasm page size is 64kB
|
||||
pub const page_size = 64 * 1024;
|
||||
|
||||
@ -18,34 +18,54 @@ pub const CrossTarget = @import("zig/cross_target.zig").CrossTarget;
|
||||
|
||||
pub const SrcHash = [16]u8;
|
||||
|
||||
/// If the source is small enough, it is used directly as the hash.
|
||||
/// If it is long, blake3 hash is computed.
|
||||
pub fn hashSrc(src: []const u8) SrcHash {
|
||||
var out: SrcHash = undefined;
|
||||
if (src.len <= @typeInfo(SrcHash).Array.len) {
|
||||
std.mem.copy(u8, &out, src);
|
||||
std.mem.set(u8, out[src.len..], 0);
|
||||
} else {
|
||||
std.crypto.hash.Blake3.hash(src, &out, .{});
|
||||
}
|
||||
std.crypto.hash.Blake3.hash(src, &out, .{});
|
||||
return out;
|
||||
}
|
||||
|
||||
pub fn findLineColumn(source: []const u8, byte_offset: usize) struct { line: usize, column: usize } {
|
||||
pub fn hashName(parent_hash: SrcHash, sep: []const u8, name: []const u8) SrcHash {
|
||||
var out: SrcHash = undefined;
|
||||
var hasher = std.crypto.hash.Blake3.init(.{});
|
||||
hasher.update(&parent_hash);
|
||||
hasher.update(sep);
|
||||
hasher.update(name);
|
||||
hasher.final(&out);
|
||||
return out;
|
||||
}
|
||||
|
||||
pub const Loc = struct {
|
||||
line: usize,
|
||||
column: usize,
|
||||
/// Does not include the trailing newline.
|
||||
source_line: []const u8,
|
||||
};
|
||||
|
||||
pub fn findLineColumn(source: []const u8, byte_offset: usize) Loc {
|
||||
var line: usize = 0;
|
||||
var column: usize = 0;
|
||||
for (source[0..byte_offset]) |byte| {
|
||||
switch (byte) {
|
||||
var line_start: usize = 0;
|
||||
var i: usize = 0;
|
||||
while (i < byte_offset) : (i += 1) {
|
||||
switch (source[i]) {
|
||||
'\n' => {
|
||||
line += 1;
|
||||
column = 0;
|
||||
line_start = i + 1;
|
||||
},
|
||||
else => {
|
||||
column += 1;
|
||||
},
|
||||
}
|
||||
}
|
||||
return .{ .line = line, .column = column };
|
||||
while (i < source.len and source[i] != '\n') {
|
||||
i += 1;
|
||||
}
|
||||
return .{
|
||||
.line = line,
|
||||
.column = column,
|
||||
.source_line = source[line_start..i],
|
||||
};
|
||||
}
|
||||
|
||||
pub fn lineDelta(source: []const u8, start: usize, end: usize) isize {
|
||||
|
||||
@ -1801,17 +1801,21 @@ test "zig fmt: array literal with hint" {
|
||||
);
|
||||
}
|
||||
|
||||
test "zig fmt: array literal veritical column alignment" {
|
||||
test "zig fmt: array literal vertical column alignment" {
|
||||
try testTransform(
|
||||
\\const a = []u8{
|
||||
\\ 1000, 200,
|
||||
\\ 30, 4,
|
||||
\\ 50000, 60
|
||||
\\ 50000, 60,
|
||||
\\};
|
||||
\\const a = []u8{0, 1, 2, 3, 40,
|
||||
\\ 4,5,600,7,
|
||||
\\ 80,
|
||||
\\ 9, 10, 11, 0, 13, 14, 15};
|
||||
\\ 9, 10, 11, 0, 13, 14, 15,};
|
||||
\\const a = [12]u8{
|
||||
\\ 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
|
||||
\\const a = [12]u8{
|
||||
\\ 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31, };
|
||||
\\
|
||||
,
|
||||
\\const a = []u8{
|
||||
@ -1825,6 +1829,21 @@ test "zig fmt: array literal veritical column alignment" {
|
||||
\\ 9, 10, 11, 0, 13,
|
||||
\\ 14, 15,
|
||||
\\};
|
||||
\\const a = [12]u8{ 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 };
|
||||
\\const a = [12]u8{
|
||||
\\ 31,
|
||||
\\ 28,
|
||||
\\ 31,
|
||||
\\ 30,
|
||||
\\ 31,
|
||||
\\ 30,
|
||||
\\ 31,
|
||||
\\ 31,
|
||||
\\ 30,
|
||||
\\ 31,
|
||||
\\ 30,
|
||||
\\ 31,
|
||||
\\};
|
||||
\\
|
||||
);
|
||||
}
|
||||
@ -2026,10 +2045,7 @@ test "zig fmt: add trailing comma to array literal" {
|
||||
\\ return []u16{
|
||||
\\ 'm', 's', 'y', 's', '-', // hi
|
||||
\\ };
|
||||
\\ return []u16{
|
||||
\\ 'm', 's', 'y', 's',
|
||||
\\ '-',
|
||||
\\ };
|
||||
\\ return []u16{ 'm', 's', 'y', 's', '-' };
|
||||
\\ return []u16{ 'm', 's', 'y', 's', '-' };
|
||||
\\}
|
||||
\\
|
||||
@ -4661,6 +4677,90 @@ test "zig fmt: insert trailing comma if there are comments between switch values
|
||||
);
|
||||
}
|
||||
|
||||
test "zig fmt: insert trailing comma if comments in array init" {
|
||||
try testTransform(
|
||||
\\var a = .{
|
||||
\\ "foo", //
|
||||
\\ "bar"
|
||||
\\};
|
||||
\\var a = .{
|
||||
\\ "foo",
|
||||
\\ "bar" //
|
||||
\\};
|
||||
\\var a = .{
|
||||
\\ "foo",
|
||||
\\ "//"
|
||||
\\};
|
||||
\\var a = .{
|
||||
\\ "foo",
|
||||
\\ "//" //
|
||||
\\};
|
||||
\\
|
||||
,
|
||||
\\var a = .{
|
||||
\\ "foo", //
|
||||
\\ "bar",
|
||||
\\};
|
||||
\\var a = .{
|
||||
\\ "foo",
|
||||
\\ "bar", //
|
||||
\\};
|
||||
\\var a = .{ "foo", "//" };
|
||||
\\var a = .{
|
||||
\\ "foo",
|
||||
\\ "//", //
|
||||
\\};
|
||||
\\
|
||||
);
|
||||
}
|
||||
|
||||
test "zig fmt: make single-line if no trailing comma" {
|
||||
try testTransform(
|
||||
\\test "function call no trailing comma" {
|
||||
\\ foo(
|
||||
\\ 1,
|
||||
\\ 2
|
||||
\\ );
|
||||
\\}
|
||||
\\
|
||||
,
|
||||
\\test "function call no trailing comma" {
|
||||
\\ foo(1, 2);
|
||||
\\}
|
||||
\\
|
||||
);
|
||||
|
||||
try testTransform(
|
||||
\\test "struct no trailing comma" {
|
||||
\\ const a = .{
|
||||
\\ .foo = 1,
|
||||
\\ .bar = 2
|
||||
\\ };
|
||||
\\}
|
||||
\\
|
||||
,
|
||||
\\test "struct no trailing comma" {
|
||||
\\ const a = .{ .foo = 1, .bar = 2 };
|
||||
\\}
|
||||
\\
|
||||
);
|
||||
|
||||
try testTransform(
|
||||
\\test "array no trailing comma" {
|
||||
\\ var stream = multiOutStream(.{
|
||||
\\ fbs1.outStream(),
|
||||
\\ fbs2.outStream()
|
||||
\\ });
|
||||
\\}
|
||||
\\
|
||||
,
|
||||
\\test "array no trailing comma" {
|
||||
\\ var stream = multiOutStream(.{ fbs1.outStream(), fbs2.outStream() });
|
||||
\\}
|
||||
\\
|
||||
);
|
||||
}
|
||||
|
||||
test "zig fmt: error for invalid bit range" {
|
||||
try testError(
|
||||
\\var x: []align(0:0:0)u8 = bar;
|
||||
|
||||
@ -9,6 +9,7 @@ const warn = std.debug.warn;
|
||||
const Tokenizer = std.zig.Tokenizer;
|
||||
const Parser = std.zig.Parser;
|
||||
const io = std.io;
|
||||
const fmtIntSizeBin = std.fmt.fmtIntSizeBin;
|
||||
|
||||
const source = @embedFile("../os.zig");
|
||||
var fixed_buffer_mem: [10 * 1024 * 1024]u8 = undefined;
|
||||
@ -25,12 +26,15 @@ pub fn main() !void {
|
||||
const end = timer.read();
|
||||
memory_used /= iterations;
|
||||
const elapsed_s = @intToFloat(f64, end - start) / std.time.ns_per_s;
|
||||
const bytes_per_sec = @intToFloat(f64, source.len * iterations) / elapsed_s;
|
||||
const mb_per_sec = bytes_per_sec / (1024 * 1024);
|
||||
const bytes_per_sec_float = @intToFloat(f64, source.len * iterations) / elapsed_s;
|
||||
const bytes_per_sec = @floatToInt(u64, @floor(bytes_per_sec_float));
|
||||
|
||||
var stdout_file = std.io.getStdOut();
|
||||
const stdout = stdout_file.writer();
|
||||
try stdout.print("{:.3} MiB/s, {} KiB used \n", .{ mb_per_sec, memory_used / 1024 });
|
||||
try stdout.print("parsing speed: {:.2}/s, {:.2} used \n", .{
|
||||
fmtIntSizeBin(bytes_per_sec),
|
||||
fmtIntSizeBin(memory_used),
|
||||
});
|
||||
}
|
||||
|
||||
fn testOnce() usize {
|
||||
|
||||
@ -1632,9 +1632,10 @@ fn renderArrayInit(
|
||||
}
|
||||
}
|
||||
|
||||
const contains_newlines = !tree.tokensOnSameLine(array_init.ast.lbrace, rbrace);
|
||||
const contains_comment = hasComment(tree, array_init.ast.lbrace, rbrace);
|
||||
const contains_multiline_string = hasMultilineString(tree, array_init.ast.lbrace, rbrace);
|
||||
|
||||
if (!trailing_comma and !contains_newlines) {
|
||||
if (!trailing_comma and !contains_comment and !contains_multiline_string) {
|
||||
// Render all on one line, no trailing comma.
|
||||
if (array_init.ast.elements.len == 1) {
|
||||
// If there is only one element, we don't use spaces
|
||||
@ -1653,7 +1654,8 @@ fn renderArrayInit(
|
||||
try renderToken(ais, tree, array_init.ast.lbrace, .newline);
|
||||
|
||||
var expr_index: usize = 0;
|
||||
while (rowSize(tree, array_init.ast.elements[expr_index..], rbrace)) |row_size| {
|
||||
while (true) {
|
||||
const row_size = rowSize(tree, array_init.ast.elements[expr_index..], rbrace);
|
||||
const row_exprs = array_init.ast.elements[expr_index..];
|
||||
// A place to store the width of each expression and its column's maximum
|
||||
const widths = try gpa.alloc(usize, row_exprs.len + row_size);
|
||||
@ -1686,7 +1688,7 @@ fn renderArrayInit(
|
||||
const maybe_comma = expr_last_token + 1;
|
||||
if (token_tags[maybe_comma] == .comma) {
|
||||
if (hasSameLineComment(tree, maybe_comma))
|
||||
break :sec_end i - this_line_size.? + 1;
|
||||
break :sec_end i - this_line_size + 1;
|
||||
}
|
||||
}
|
||||
break :sec_end row_exprs.len;
|
||||
@ -2238,17 +2240,36 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if there exists a comment between the start of token
|
||||
/// `start_token` and the start of token `end_token`. This is used to determine
|
||||
/// if e.g. a fn_proto should be wrapped and have a trailing comma inserted
|
||||
/// even if there is none in the source.
|
||||
/// Returns true if there exists a comment between any of the tokens from
|
||||
/// `start_token` to `end_token`. This is used to determine if e.g. a
|
||||
/// fn_proto should be wrapped and have a trailing comma inserted even if
|
||||
/// there is none in the source.
|
||||
fn hasComment(tree: ast.Tree, start_token: ast.TokenIndex, end_token: ast.TokenIndex) bool {
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
|
||||
const start = token_starts[start_token];
|
||||
const end = token_starts[end_token];
|
||||
var i = start_token;
|
||||
while (i < end_token) : (i += 1) {
|
||||
const start = token_starts[i] + tree.tokenSlice(i).len;
|
||||
const end = token_starts[i + 1];
|
||||
if (mem.indexOf(u8, tree.source[start..end], "//") != null) return true;
|
||||
}
|
||||
|
||||
return mem.indexOf(u8, tree.source[start..end], "//") != null;
|
||||
return false;
|
||||
}
|
||||
|
||||
/// Returns true if there exists a multiline string literal between the start
|
||||
/// of token `start_token` and the start of token `end_token`.
|
||||
fn hasMultilineString(tree: ast.Tree, start_token: ast.TokenIndex, end_token: ast.TokenIndex) bool {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
|
||||
for (token_tags[start_token..end_token]) |tag| {
|
||||
switch (tag) {
|
||||
.multiline_string_literal_line => return true,
|
||||
else => continue,
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// Assumes that start is the first byte past the previous token and
|
||||
@ -2500,9 +2521,8 @@ fn nodeCausesSliceOpSpace(tag: ast.Node.Tag) bool {
|
||||
};
|
||||
}
|
||||
|
||||
// Returns the number of nodes in `expr` that are on the same line as `rtoken`,
|
||||
// or null if they all are on the same line.
|
||||
fn rowSize(tree: ast.Tree, exprs: []const ast.Node.Index, rtoken: ast.TokenIndex) ?usize {
|
||||
// Returns the number of nodes in `expr` that are on the same line as `rtoken`.
|
||||
fn rowSize(tree: ast.Tree, exprs: []const ast.Node.Index, rtoken: ast.TokenIndex) usize {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
|
||||
const first_token = tree.firstToken(exprs[0]);
|
||||
@ -2510,7 +2530,7 @@ fn rowSize(tree: ast.Tree, exprs: []const ast.Node.Index, rtoken: ast.TokenIndex
|
||||
const maybe_comma = rtoken - 1;
|
||||
if (token_tags[maybe_comma] == .comma)
|
||||
return 1;
|
||||
return null; // no newlines
|
||||
return exprs.len; // no newlines
|
||||
}
|
||||
|
||||
var count: usize = 1;
|
||||
|
||||
411
src/AstGen.zig
411
src/AstGen.zig
@ -28,8 +28,6 @@ const BuiltinFn = @import("BuiltinFn.zig");
|
||||
instructions: std.MultiArrayList(zir.Inst) = .{},
|
||||
string_bytes: ArrayListUnmanaged(u8) = .{},
|
||||
extra: ArrayListUnmanaged(u32) = .{},
|
||||
decl_map: std.StringArrayHashMapUnmanaged(void) = .{},
|
||||
decls: ArrayListUnmanaged(*Decl) = .{},
|
||||
/// The end of special indexes. `zir.Inst.Ref` subtracts against this number to convert
|
||||
/// to `zir.Inst.Index`. The default here is correct if there are 0 parameters.
|
||||
ref_start_index: u32 = zir.Inst.Ref.typed_value_map.len,
|
||||
@ -110,8 +108,6 @@ pub fn deinit(astgen: *AstGen) void {
|
||||
astgen.instructions.deinit(gpa);
|
||||
astgen.extra.deinit(gpa);
|
||||
astgen.string_bytes.deinit(gpa);
|
||||
astgen.decl_map.deinit(gpa);
|
||||
astgen.decls.deinit(gpa);
|
||||
}
|
||||
|
||||
pub const ResultLoc = union(enum) {
|
||||
@ -124,6 +120,9 @@ pub const ResultLoc = union(enum) {
|
||||
/// The expression must generate a pointer rather than a value. For example, the left hand side
|
||||
/// of an assignment uses this kind of result location.
|
||||
ref,
|
||||
/// The callee will accept a ref, but it is not necessary, and the `ResultLoc`
|
||||
/// may be treated as `none` instead.
|
||||
none_or_ref,
|
||||
/// The expression will be coerced into this type, but it will be evaluated as an rvalue.
|
||||
ty: zir.Inst.Ref,
|
||||
/// The expression must store its result into this typed pointer. The result instruction
|
||||
@ -157,7 +156,7 @@ pub const ResultLoc = union(enum) {
|
||||
var elide_store_to_block_ptr_instructions = false;
|
||||
switch (rl) {
|
||||
// In this branch there will not be any store_to_block_ptr instructions.
|
||||
.discard, .none, .ty, .ref => return .{
|
||||
.discard, .none, .none_or_ref, .ty, .ref => return .{
|
||||
.tag = .break_operand,
|
||||
.elide_store_to_block_ptr_instructions = false,
|
||||
},
|
||||
@ -606,8 +605,13 @@ pub fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) Inn
|
||||
|
||||
.deref => {
|
||||
const lhs = try expr(gz, scope, .none, node_datas[node].lhs);
|
||||
const result = try gz.addUnNode(.load, lhs, node);
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
switch (rl) {
|
||||
.ref, .none_or_ref => return lhs,
|
||||
else => {
|
||||
const result = try gz.addUnNode(.load, lhs, node);
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
},
|
||||
}
|
||||
},
|
||||
.address_of => {
|
||||
const result = try expr(gz, scope, .ref, node_datas[node].lhs);
|
||||
@ -816,10 +820,34 @@ pub fn structInitExpr(
|
||||
}
|
||||
switch (rl) {
|
||||
.discard => return mod.failNode(scope, node, "TODO implement structInitExpr discard", .{}),
|
||||
.none => return mod.failNode(scope, node, "TODO implement structInitExpr none", .{}),
|
||||
.none, .none_or_ref => return mod.failNode(scope, node, "TODO implement structInitExpr none", .{}),
|
||||
.ref => unreachable, // struct literal not valid as l-value
|
||||
.ty => |ty_inst| {
|
||||
return mod.failNode(scope, node, "TODO implement structInitExpr ty", .{});
|
||||
const fields_list = try gpa.alloc(zir.Inst.StructInit.Item, struct_init.ast.fields.len);
|
||||
defer gpa.free(fields_list);
|
||||
|
||||
for (struct_init.ast.fields) |field_init, i| {
|
||||
const name_token = tree.firstToken(field_init) - 2;
|
||||
const str_index = try gz.identAsString(name_token);
|
||||
|
||||
const field_ty_inst = try gz.addPlNode(.field_type, field_init, zir.Inst.FieldType{
|
||||
.container_type = ty_inst,
|
||||
.name_start = str_index,
|
||||
});
|
||||
fields_list[i] = .{
|
||||
.field_type = astgen.refToIndex(field_ty_inst).?,
|
||||
.init = try expr(gz, scope, .{ .ty = field_ty_inst }, field_init),
|
||||
};
|
||||
}
|
||||
const init_inst = try gz.addPlNode(.struct_init, node, zir.Inst.StructInit{
|
||||
.fields_len = @intCast(u32, fields_list.len),
|
||||
});
|
||||
try astgen.extra.ensureCapacity(gpa, astgen.extra.items.len +
|
||||
fields_list.len * @typeInfo(zir.Inst.StructInit.Item).Struct.fields.len);
|
||||
for (fields_list) |field| {
|
||||
_ = gz.astgen.addExtraAssumeCapacity(field);
|
||||
}
|
||||
return rvalue(gz, scope, rl, init_inst, node);
|
||||
},
|
||||
.ptr => |ptr_inst| {
|
||||
const field_ptr_list = try gpa.alloc(zir.Inst.Index, struct_init.ast.fields.len);
|
||||
@ -1175,13 +1203,6 @@ fn blockExprStmts(
|
||||
// in the above while loop.
|
||||
const zir_tags = gz.astgen.instructions.items(.tag);
|
||||
switch (zir_tags[inst]) {
|
||||
.@"const" => {
|
||||
const tv = gz.astgen.instructions.items(.data)[inst].@"const";
|
||||
break :b switch (tv.ty.zigTypeTag()) {
|
||||
.NoReturn, .Void => true,
|
||||
else => false,
|
||||
};
|
||||
},
|
||||
// For some instructions, swap in a slightly different ZIR tag
|
||||
// so we can avoid a separate ensure_result_used instruction.
|
||||
.call_none_chkused => unreachable,
|
||||
@ -1248,7 +1269,10 @@ fn blockExprStmts(
|
||||
.fn_type_var_args,
|
||||
.fn_type_cc,
|
||||
.fn_type_cc_var_args,
|
||||
.has_decl,
|
||||
.int,
|
||||
.float,
|
||||
.float128,
|
||||
.intcast,
|
||||
.int_type,
|
||||
.is_non_null,
|
||||
@ -1321,12 +1345,18 @@ fn blockExprStmts(
|
||||
.switch_capture_else,
|
||||
.switch_capture_else_ref,
|
||||
.struct_init_empty,
|
||||
.struct_init,
|
||||
.field_type,
|
||||
.struct_decl,
|
||||
.struct_decl_packed,
|
||||
.struct_decl_extern,
|
||||
.union_decl,
|
||||
.enum_decl,
|
||||
.enum_decl_nonexhaustive,
|
||||
.opaque_decl,
|
||||
.int_to_enum,
|
||||
.enum_to_int,
|
||||
.type_info,
|
||||
=> break :b false,
|
||||
|
||||
// ZIR instructions that are always either `noreturn` or `void`.
|
||||
@ -1334,6 +1364,7 @@ fn blockExprStmts(
|
||||
.dbg_stmt_node,
|
||||
.ensure_result_used,
|
||||
.ensure_result_non_error,
|
||||
.@"export",
|
||||
.set_eval_branch_quota,
|
||||
.compile_log,
|
||||
.ensure_err_payload_void,
|
||||
@ -1482,7 +1513,7 @@ fn varDecl(
|
||||
init_scope.rl_ptr = try init_scope.addUnNode(.alloc, type_inst, node);
|
||||
init_scope.rl_ty_inst = type_inst;
|
||||
} else {
|
||||
const alloc = try init_scope.addUnNode(.alloc_inferred, undefined, node);
|
||||
const alloc = try init_scope.addNode(.alloc_inferred, node);
|
||||
resolve_inferred_alloc = alloc;
|
||||
init_scope.rl_ptr = alloc;
|
||||
}
|
||||
@ -1557,7 +1588,7 @@ fn varDecl(
|
||||
const alloc = try gz.addUnNode(.alloc_mut, type_inst, node);
|
||||
break :a .{ .alloc = alloc, .result_loc = .{ .ptr = alloc } };
|
||||
} else a: {
|
||||
const alloc = try gz.addUnNode(.alloc_inferred_mut, undefined, node);
|
||||
const alloc = try gz.addNode(.alloc_inferred_mut, node);
|
||||
resolve_inferred_alloc = alloc;
|
||||
break :a .{ .alloc = alloc, .result_loc = .{ .inferred_ptr = alloc } };
|
||||
};
|
||||
@ -1815,15 +1846,18 @@ fn containerDecl(
|
||||
defer bit_bag.deinit(gpa);
|
||||
|
||||
var cur_bit_bag: u32 = 0;
|
||||
var member_index: usize = 0;
|
||||
while (true) {
|
||||
const member_node = container_decl.ast.members[member_index];
|
||||
var field_index: usize = 0;
|
||||
for (container_decl.ast.members) |member_node| {
|
||||
const member = switch (node_tags[member_node]) {
|
||||
.container_field_init => tree.containerFieldInit(member_node),
|
||||
.container_field_align => tree.containerFieldAlign(member_node),
|
||||
.container_field => tree.containerField(member_node),
|
||||
else => unreachable,
|
||||
else => continue,
|
||||
};
|
||||
if (field_index % 16 == 0 and field_index != 0) {
|
||||
try bit_bag.append(gpa, cur_bit_bag);
|
||||
cur_bit_bag = 0;
|
||||
}
|
||||
if (member.comptime_token) |comptime_token| {
|
||||
return mod.failTok(scope, comptime_token, "TODO implement comptime struct fields", .{});
|
||||
}
|
||||
@ -1850,17 +1884,9 @@ fn containerDecl(
|
||||
fields_data.appendAssumeCapacity(@enumToInt(default_inst));
|
||||
}
|
||||
|
||||
member_index += 1;
|
||||
if (member_index < container_decl.ast.members.len) {
|
||||
if (member_index % 16 == 0) {
|
||||
try bit_bag.append(gpa, cur_bit_bag);
|
||||
cur_bit_bag = 0;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
field_index += 1;
|
||||
}
|
||||
const empty_slot_count = 16 - ((member_index - 1) % 16);
|
||||
const empty_slot_count = 16 - (field_index % 16);
|
||||
cur_bit_bag >>= @intCast(u5, empty_slot_count * 2);
|
||||
|
||||
const result = try gz.addPlNode(tag, node, zir.Inst.StructDecl{
|
||||
@ -1877,7 +1903,172 @@ fn containerDecl(
|
||||
return mod.failTok(scope, container_decl.ast.main_token, "TODO AstGen for union decl", .{});
|
||||
},
|
||||
.keyword_enum => {
|
||||
return mod.failTok(scope, container_decl.ast.main_token, "TODO AstGen for enum decl", .{});
|
||||
if (container_decl.layout_token) |t| {
|
||||
return mod.failTok(scope, t, "enums do not support 'packed' or 'extern'; instead provide an explicit integer tag type", .{});
|
||||
}
|
||||
// Count total fields as well as how many have explicitly provided tag values.
|
||||
const counts = blk: {
|
||||
var values: usize = 0;
|
||||
var total_fields: usize = 0;
|
||||
var decls: usize = 0;
|
||||
var nonexhaustive_node: ast.Node.Index = 0;
|
||||
for (container_decl.ast.members) |member_node| {
|
||||
const member = switch (node_tags[member_node]) {
|
||||
.container_field_init => tree.containerFieldInit(member_node),
|
||||
.container_field_align => tree.containerFieldAlign(member_node),
|
||||
.container_field => tree.containerField(member_node),
|
||||
else => {
|
||||
decls += 1;
|
||||
continue;
|
||||
},
|
||||
};
|
||||
if (member.comptime_token) |comptime_token| {
|
||||
return mod.failTok(scope, comptime_token, "enum fields cannot be marked comptime", .{});
|
||||
}
|
||||
if (member.ast.type_expr != 0) {
|
||||
return mod.failNode(scope, member.ast.type_expr, "enum fields do not have types", .{});
|
||||
}
|
||||
// Alignment expressions in enums are caught by the parser.
|
||||
assert(member.ast.align_expr == 0);
|
||||
|
||||
const name_token = member.ast.name_token;
|
||||
if (mem.eql(u8, tree.tokenSlice(name_token), "_")) {
|
||||
if (nonexhaustive_node != 0) {
|
||||
const msg = msg: {
|
||||
const msg = try mod.errMsg(
|
||||
scope,
|
||||
gz.nodeSrcLoc(member_node),
|
||||
"redundant non-exhaustive enum mark",
|
||||
.{},
|
||||
);
|
||||
errdefer msg.destroy(gpa);
|
||||
const other_src = gz.nodeSrcLoc(nonexhaustive_node);
|
||||
try mod.errNote(scope, other_src, msg, "other mark here", .{});
|
||||
break :msg msg;
|
||||
};
|
||||
return mod.failWithOwnedErrorMsg(scope, msg);
|
||||
}
|
||||
nonexhaustive_node = member_node;
|
||||
if (member.ast.value_expr != 0) {
|
||||
return mod.failNode(scope, member.ast.value_expr, "'_' is used to mark an enum as non-exhaustive and cannot be assigned a value", .{});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
total_fields += 1;
|
||||
if (member.ast.value_expr != 0) {
|
||||
values += 1;
|
||||
}
|
||||
}
|
||||
break :blk .{
|
||||
.total_fields = total_fields,
|
||||
.values = values,
|
||||
.decls = decls,
|
||||
.nonexhaustive_node = nonexhaustive_node,
|
||||
};
|
||||
};
|
||||
if (counts.total_fields == 0) {
|
||||
// One can construct an enum with no tags, and it functions the same as `noreturn`. But
|
||||
// this is only useful for generic code; when explicitly using `enum {}` syntax, there
|
||||
// must be at least one tag.
|
||||
return mod.failNode(scope, node, "enum declarations must have at least one tag", .{});
|
||||
}
|
||||
if (counts.nonexhaustive_node != 0 and arg_inst == .none) {
|
||||
const msg = msg: {
|
||||
const msg = try mod.errMsg(
|
||||
scope,
|
||||
gz.nodeSrcLoc(node),
|
||||
"non-exhaustive enum missing integer tag type",
|
||||
.{},
|
||||
);
|
||||
errdefer msg.destroy(gpa);
|
||||
const other_src = gz.nodeSrcLoc(counts.nonexhaustive_node);
|
||||
try mod.errNote(scope, other_src, msg, "marked non-exhaustive here", .{});
|
||||
break :msg msg;
|
||||
};
|
||||
return mod.failWithOwnedErrorMsg(scope, msg);
|
||||
}
|
||||
if (counts.values == 0 and counts.decls == 0 and arg_inst == .none) {
|
||||
// No explicitly provided tag values and no top level declarations! In this case,
|
||||
// we can construct the enum type in AstGen and it will be correctly shared by all
|
||||
// generic function instantiations and comptime function calls.
|
||||
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer new_decl_arena.deinit();
|
||||
const arena = &new_decl_arena.allocator;
|
||||
|
||||
var fields_map: std.StringArrayHashMapUnmanaged(void) = .{};
|
||||
try fields_map.ensureCapacity(arena, counts.total_fields);
|
||||
for (container_decl.ast.members) |member_node| {
|
||||
if (member_node == counts.nonexhaustive_node)
|
||||
continue;
|
||||
const member = switch (node_tags[member_node]) {
|
||||
.container_field_init => tree.containerFieldInit(member_node),
|
||||
.container_field_align => tree.containerFieldAlign(member_node),
|
||||
.container_field => tree.containerField(member_node),
|
||||
else => unreachable, // We checked earlier.
|
||||
};
|
||||
const name_token = member.ast.name_token;
|
||||
const tag_name = try mod.identifierTokenStringTreeArena(
|
||||
scope,
|
||||
name_token,
|
||||
tree,
|
||||
arena,
|
||||
);
|
||||
const gop = fields_map.getOrPutAssumeCapacity(tag_name);
|
||||
if (gop.found_existing) {
|
||||
const msg = msg: {
|
||||
const msg = try mod.errMsg(
|
||||
scope,
|
||||
gz.tokSrcLoc(name_token),
|
||||
"duplicate enum tag",
|
||||
.{},
|
||||
);
|
||||
errdefer msg.destroy(gpa);
|
||||
// Iterate to find the other tag. We don't eagerly store it in a hash
|
||||
// map because in the hot path there will be no compile error and we
|
||||
// don't need to waste time with a hash map.
|
||||
const bad_node = for (container_decl.ast.members) |other_member_node| {
|
||||
const other_member = switch (node_tags[other_member_node]) {
|
||||
.container_field_init => tree.containerFieldInit(other_member_node),
|
||||
.container_field_align => tree.containerFieldAlign(other_member_node),
|
||||
.container_field => tree.containerField(other_member_node),
|
||||
else => unreachable, // We checked earlier.
|
||||
};
|
||||
const other_tag_name = try mod.identifierTokenStringTreeArena(
|
||||
scope,
|
||||
other_member.ast.name_token,
|
||||
tree,
|
||||
arena,
|
||||
);
|
||||
if (mem.eql(u8, tag_name, other_tag_name))
|
||||
break other_member_node;
|
||||
} else unreachable;
|
||||
const other_src = gz.nodeSrcLoc(bad_node);
|
||||
try mod.errNote(scope, other_src, msg, "other tag here", .{});
|
||||
break :msg msg;
|
||||
};
|
||||
return mod.failWithOwnedErrorMsg(scope, msg);
|
||||
}
|
||||
}
|
||||
const enum_simple = try arena.create(Module.EnumSimple);
|
||||
enum_simple.* = .{
|
||||
.owner_decl = astgen.decl,
|
||||
.node_offset = astgen.decl.nodeIndexToRelative(node),
|
||||
.fields = fields_map,
|
||||
};
|
||||
const enum_ty = try Type.Tag.enum_simple.create(arena, enum_simple);
|
||||
const enum_val = try Value.Tag.ty.create(arena, enum_ty);
|
||||
const new_decl = try mod.createAnonymousDecl(scope, &new_decl_arena, .{
|
||||
.ty = Type.initTag(.type),
|
||||
.val = enum_val,
|
||||
});
|
||||
const decl_index = try mod.declareDeclDependency(astgen.decl, new_decl);
|
||||
const result = try gz.addDecl(.decl_val, decl_index, node);
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
}
|
||||
// In this case we must generate ZIR code for the tag values, similar to
|
||||
// how structs are handled above. The new anonymous Decl will be created in
|
||||
// Sema, not AstGen.
|
||||
return mod.failNode(scope, node, "TODO AstGen for enum decl with decls or explicitly provided field values", .{});
|
||||
},
|
||||
.keyword_opaque => {
|
||||
const result = try gz.addNode(.opaque_decl, node);
|
||||
@ -1893,11 +2084,11 @@ fn errorSetDecl(
|
||||
rl: ResultLoc,
|
||||
node: ast.Node.Index,
|
||||
) InnerError!zir.Inst.Ref {
|
||||
const mod = gz.astgen.mod;
|
||||
const astgen = gz.astgen;
|
||||
const mod = astgen.mod;
|
||||
const tree = gz.tree();
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
const arena = gz.astgen.arena;
|
||||
|
||||
// Count how many fields there are.
|
||||
const error_token = main_tokens[node];
|
||||
@ -1914,6 +2105,11 @@ fn errorSetDecl(
|
||||
} else unreachable; // TODO should not need else unreachable here
|
||||
};
|
||||
|
||||
const gpa = mod.gpa;
|
||||
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
|
||||
errdefer new_decl_arena.deinit();
|
||||
const arena = &new_decl_arena.allocator;
|
||||
|
||||
const fields = try arena.alloc([]const u8, count);
|
||||
{
|
||||
var tok_i = error_token + 2;
|
||||
@ -1922,7 +2118,7 @@ fn errorSetDecl(
|
||||
switch (token_tags[tok_i]) {
|
||||
.doc_comment, .comma => {},
|
||||
.identifier => {
|
||||
fields[field_i] = try mod.identifierTokenString(scope, tok_i);
|
||||
fields[field_i] = try mod.identifierTokenStringTreeArena(scope, tok_i, tree, arena);
|
||||
field_i += 1;
|
||||
},
|
||||
.r_brace => break,
|
||||
@ -1932,18 +2128,19 @@ fn errorSetDecl(
|
||||
}
|
||||
const error_set = try arena.create(Module.ErrorSet);
|
||||
error_set.* = .{
|
||||
.owner_decl = gz.astgen.decl,
|
||||
.node_offset = gz.astgen.decl.nodeIndexToRelative(node),
|
||||
.owner_decl = astgen.decl,
|
||||
.node_offset = astgen.decl.nodeIndexToRelative(node),
|
||||
.names_ptr = fields.ptr,
|
||||
.names_len = @intCast(u32, fields.len),
|
||||
};
|
||||
const error_set_ty = try Type.Tag.error_set.create(arena, error_set);
|
||||
const typed_value = try arena.create(TypedValue);
|
||||
typed_value.* = .{
|
||||
const error_set_val = try Value.Tag.ty.create(arena, error_set_ty);
|
||||
const new_decl = try mod.createAnonymousDecl(scope, &new_decl_arena, .{
|
||||
.ty = Type.initTag(.type),
|
||||
.val = try Value.Tag.ty.create(arena, error_set_ty),
|
||||
};
|
||||
const result = try gz.addConst(typed_value);
|
||||
.val = error_set_val,
|
||||
});
|
||||
const decl_index = try mod.declareDeclDependency(astgen.decl, new_decl);
|
||||
const result = try gz.addDecl(.decl_val, decl_index, node);
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
}
|
||||
|
||||
@ -1980,7 +2177,7 @@ fn orelseCatchExpr(
|
||||
// TODO handle catch
|
||||
const operand_rl: ResultLoc = switch (block_scope.break_result_loc) {
|
||||
.ref => .ref,
|
||||
.discard, .none, .block_ptr, .inferred_ptr => .none,
|
||||
.discard, .none, .none_or_ref, .block_ptr, .inferred_ptr => .none,
|
||||
.ty => |elem_ty| blk: {
|
||||
const wrapped_ty = try block_scope.addUnNode(.optional_type, elem_ty, node);
|
||||
break :blk .{ .ty = wrapped_ty };
|
||||
@ -2156,7 +2353,7 @@ pub fn fieldAccess(
|
||||
.field_name_start = str_index,
|
||||
}),
|
||||
else => return rvalue(gz, scope, rl, try gz.addPlNode(.field_val, node, zir.Inst.Field{
|
||||
.lhs = try expr(gz, scope, .none, object_node),
|
||||
.lhs = try expr(gz, scope, .none_or_ref, object_node),
|
||||
.field_name_start = str_index,
|
||||
}), node),
|
||||
}
|
||||
@ -2179,7 +2376,7 @@ fn arrayAccess(
|
||||
),
|
||||
else => return rvalue(gz, scope, rl, try gz.addBin(
|
||||
.elem_val,
|
||||
try expr(gz, scope, .none, node_datas[node].lhs),
|
||||
try expr(gz, scope, .none_or_ref, node_datas[node].lhs),
|
||||
try expr(gz, scope, .{ .ty = .usize_type }, node_datas[node].rhs),
|
||||
), node),
|
||||
}
|
||||
@ -3188,8 +3385,13 @@ fn switchExpr(
|
||||
switch (strat.tag) {
|
||||
.break_operand => {
|
||||
// Switch expressions return `true` for `nodeMayNeedMemoryLocation` thus
|
||||
// this is always true.
|
||||
assert(strat.elide_store_to_block_ptr_instructions);
|
||||
// `elide_store_to_block_ptr_instructions` will either be true,
|
||||
// or all prongs are noreturn.
|
||||
if (!strat.elide_store_to_block_ptr_instructions) {
|
||||
astgen.extra.appendSliceAssumeCapacity(scalar_cases_payload.items);
|
||||
astgen.extra.appendSliceAssumeCapacity(multi_cases_payload.items);
|
||||
return astgen.indexToRef(switch_block);
|
||||
}
|
||||
|
||||
// There will necessarily be a store_to_block_ptr for
|
||||
// all prongs, except for prongs that ended with a noreturn instruction.
|
||||
@ -3418,7 +3620,8 @@ fn identifier(
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const mod = gz.astgen.mod;
|
||||
const astgen = gz.astgen;
|
||||
const mod = astgen.mod;
|
||||
const tree = gz.tree();
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
|
||||
@ -3451,7 +3654,7 @@ fn identifier(
|
||||
const result = try gz.add(.{
|
||||
.tag = .int_type,
|
||||
.data = .{ .int_type = .{
|
||||
.src_node = gz.astgen.decl.nodeIndexToRelative(ident),
|
||||
.src_node = astgen.decl.nodeIndexToRelative(ident),
|
||||
.signedness = signedness,
|
||||
.bit_count = bit_count,
|
||||
} },
|
||||
@ -3474,9 +3677,13 @@ fn identifier(
|
||||
.local_ptr => {
|
||||
const local_ptr = s.cast(Scope.LocalPtr).?;
|
||||
if (mem.eql(u8, local_ptr.name, ident_name)) {
|
||||
if (rl == .ref) return local_ptr.ptr;
|
||||
const loaded = try gz.addUnNode(.load, local_ptr.ptr, ident);
|
||||
return rvalue(gz, scope, rl, loaded, ident);
|
||||
switch (rl) {
|
||||
.ref, .none_or_ref => return local_ptr.ptr,
|
||||
else => {
|
||||
const loaded = try gz.addUnNode(.load, local_ptr.ptr, ident);
|
||||
return rvalue(gz, scope, rl, loaded, ident);
|
||||
},
|
||||
}
|
||||
}
|
||||
s = local_ptr.parent;
|
||||
},
|
||||
@ -3485,15 +3692,15 @@ fn identifier(
|
||||
};
|
||||
}
|
||||
|
||||
const gop = try gz.astgen.decl_map.getOrPut(mod.gpa, ident_name);
|
||||
if (!gop.found_existing) {
|
||||
const decl = mod.lookupDeclName(scope, ident_name) orelse
|
||||
return mod.failNode(scope, ident, "use of undeclared identifier '{s}'", .{ident_name});
|
||||
try gz.astgen.decls.append(mod.gpa, decl);
|
||||
}
|
||||
const decl_index = @intCast(u32, gop.index);
|
||||
const decl = mod.lookupDeclName(scope, ident_name) orelse {
|
||||
// TODO insert a "dependency on the non-existence of a decl" here to make this
|
||||
// compile error go away when the decl is introduced. This data should be in a global
|
||||
// sparse map since it is only relevant when a compile error occurs.
|
||||
return mod.failNode(scope, ident, "use of undeclared identifier '{s}'", .{ident_name});
|
||||
};
|
||||
const decl_index = try mod.declareDeclDependency(astgen.decl, decl);
|
||||
switch (rl) {
|
||||
.ref => return gz.addDecl(.decl_ref, decl_index, ident),
|
||||
.ref, .none_or_ref => return gz.addDecl(.decl_ref, decl_index, ident),
|
||||
else => return rvalue(gz, scope, rl, try gz.addDecl(.decl_val, decl_index, ident), ident),
|
||||
}
|
||||
}
|
||||
@ -3626,12 +3833,23 @@ fn floatLiteral(
|
||||
const float_number = std.fmt.parseFloat(f128, bytes) catch |e| switch (e) {
|
||||
error.InvalidCharacter => unreachable, // validated by tokenizer
|
||||
};
|
||||
const typed_value = try arena.create(TypedValue);
|
||||
typed_value.* = .{
|
||||
.ty = Type.initTag(.comptime_float),
|
||||
.val = try Value.Tag.float_128.create(arena, float_number),
|
||||
};
|
||||
const result = try gz.addConst(typed_value);
|
||||
// If the value fits into a f32 without losing any precision, store it that way.
|
||||
@setFloatMode(.Strict);
|
||||
const smaller_float = @floatCast(f32, float_number);
|
||||
const bigger_again: f128 = smaller_float;
|
||||
if (bigger_again == float_number) {
|
||||
const result = try gz.addFloat(smaller_float, node);
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
}
|
||||
// We need to use 128 bits. Break the float into 4 u32 values so we can
|
||||
// put it into the `extra` array.
|
||||
const int_bits = @bitCast(u128, float_number);
|
||||
const result = try gz.addPlNode(.float128, node, zir.Inst.Float128{
|
||||
.piece0 = @truncate(u32, int_bits),
|
||||
.piece1 = @truncate(u32, int_bits >> 32),
|
||||
.piece2 = @truncate(u32, int_bits >> 64),
|
||||
.piece3 = @truncate(u32, int_bits >> 96),
|
||||
});
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
}
|
||||
|
||||
@ -3697,7 +3915,7 @@ fn as(
|
||||
) InnerError!zir.Inst.Ref {
|
||||
const dest_type = try typeExpr(gz, scope, lhs);
|
||||
switch (rl) {
|
||||
.none, .discard, .ref, .ty => {
|
||||
.none, .none_or_ref, .discard, .ref, .ty => {
|
||||
const result = try expr(gz, scope, .{ .ty = dest_type }, rhs);
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
},
|
||||
@ -3781,7 +3999,7 @@ fn bitCast(
|
||||
});
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
},
|
||||
.ref => unreachable, // `@bitCast` is not allowed as an r-value.
|
||||
.ref, .none_or_ref => unreachable, // `@bitCast` is not allowed as an r-value.
|
||||
.ptr => |result_ptr| {
|
||||
const casted_result_ptr = try gz.addUnNode(.bitcast_result_ptr, result_ptr, node);
|
||||
return expr(gz, scope, .{ .ptr = casted_result_ptr }, rhs);
|
||||
@ -3882,11 +4100,11 @@ fn builtinCall(
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
},
|
||||
.breakpoint => {
|
||||
const result = try gz.add(.{
|
||||
_ = try gz.add(.{
|
||||
.tag = .breakpoint,
|
||||
.data = .{ .node = gz.astgen.decl.nodeIndexToRelative(node) },
|
||||
});
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
return rvalue(gz, scope, rl, .void_value, node);
|
||||
},
|
||||
.import => {
|
||||
const target = try expr(gz, scope, .none, params[0]);
|
||||
@ -3943,6 +4161,50 @@ fn builtinCall(
|
||||
.bit_cast => return bitCast(gz, scope, rl, node, params[0], params[1]),
|
||||
.TypeOf => return typeOf(gz, scope, rl, node, params),
|
||||
|
||||
.int_to_enum => {
|
||||
const result = try gz.addPlNode(.int_to_enum, node, zir.Inst.Bin{
|
||||
.lhs = try typeExpr(gz, scope, params[0]),
|
||||
.rhs = try expr(gz, scope, .none, params[1]),
|
||||
});
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
},
|
||||
|
||||
.enum_to_int => {
|
||||
const operand = try expr(gz, scope, .none, params[0]);
|
||||
const result = try gz.addUnNode(.enum_to_int, operand, node);
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
},
|
||||
|
||||
.@"export" => {
|
||||
// TODO: @export is supposed to be able to export things other than functions.
|
||||
// Instead of `comptimeExpr` here we need `decl_ref`.
|
||||
const fn_to_export = try comptimeExpr(gz, scope, .none, params[0]);
|
||||
// TODO: the second parameter here is supposed to be
|
||||
// `std.builtin.ExportOptions`, not a string.
|
||||
const export_name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, params[1]);
|
||||
_ = try gz.addPlNode(.@"export", node, zir.Inst.Bin{
|
||||
.lhs = fn_to_export,
|
||||
.rhs = export_name,
|
||||
});
|
||||
return rvalue(gz, scope, rl, .void_value, node);
|
||||
},
|
||||
|
||||
.has_decl => {
|
||||
const container_type = try typeExpr(gz, scope, params[0]);
|
||||
const name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, params[1]);
|
||||
const result = try gz.addPlNode(.has_decl, node, zir.Inst.Bin{
|
||||
.lhs = container_type,
|
||||
.rhs = name,
|
||||
});
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
},
|
||||
|
||||
.type_info => {
|
||||
const operand = try typeExpr(gz, scope, params[0]);
|
||||
const result = try gz.addUnNode(.type_info, operand, node);
|
||||
return rvalue(gz, scope, rl, result, node);
|
||||
},
|
||||
|
||||
.add_with_overflow,
|
||||
.align_cast,
|
||||
.align_of,
|
||||
@ -3969,17 +4231,13 @@ fn builtinCall(
|
||||
.div_floor,
|
||||
.div_trunc,
|
||||
.embed_file,
|
||||
.enum_to_int,
|
||||
.error_name,
|
||||
.error_return_trace,
|
||||
.err_set_cast,
|
||||
.@"export",
|
||||
.fence,
|
||||
.field_parent_ptr,
|
||||
.float_to_int,
|
||||
.has_decl,
|
||||
.has_field,
|
||||
.int_to_enum,
|
||||
.int_to_float,
|
||||
.int_to_ptr,
|
||||
.memcpy,
|
||||
@ -4023,7 +4281,6 @@ fn builtinCall(
|
||||
.This,
|
||||
.truncate,
|
||||
.Type,
|
||||
.type_info,
|
||||
.type_name,
|
||||
.union_init,
|
||||
=> return mod.failNode(scope, node, "TODO: implement builtin function {s}", .{
|
||||
@ -4354,7 +4611,7 @@ fn rvalue(
|
||||
src_node: ast.Node.Index,
|
||||
) InnerError!zir.Inst.Ref {
|
||||
switch (rl) {
|
||||
.none => return result,
|
||||
.none, .none_or_ref => return result,
|
||||
.discard => {
|
||||
// Emit a compile error for discarding error values.
|
||||
_ = try gz.addUnNode(.ensure_result_non_error, result, src_node);
|
||||
|
||||
@ -484,7 +484,7 @@ pub const list = list: {
|
||||
"@intToEnum",
|
||||
.{
|
||||
.tag = .int_to_enum,
|
||||
.param_count = 1,
|
||||
.param_count = 2,
|
||||
},
|
||||
},
|
||||
.{
|
||||
|
||||
@ -272,32 +272,57 @@ pub const AllErrors = struct {
|
||||
line: u32,
|
||||
column: u32,
|
||||
byte_offset: u32,
|
||||
/// Does not include the trailing newline.
|
||||
source_line: ?[]const u8,
|
||||
notes: []Message = &.{},
|
||||
},
|
||||
plain: struct {
|
||||
msg: []const u8,
|
||||
},
|
||||
|
||||
pub fn renderToStdErr(msg: Message) void {
|
||||
return msg.renderToStdErrInner("error");
|
||||
pub fn renderToStdErr(msg: Message, ttyconf: std.debug.TTY.Config) void {
|
||||
const stderr_mutex = std.debug.getStderrMutex();
|
||||
const held = std.debug.getStderrMutex().acquire();
|
||||
defer held.release();
|
||||
const stderr = std.io.getStdErr();
|
||||
return msg.renderToStdErrInner(ttyconf, stderr, "error:", .Red) catch return;
|
||||
}
|
||||
|
||||
fn renderToStdErrInner(msg: Message, kind: []const u8) void {
|
||||
fn renderToStdErrInner(
|
||||
msg: Message,
|
||||
ttyconf: std.debug.TTY.Config,
|
||||
stderr_file: std.fs.File,
|
||||
kind: []const u8,
|
||||
color: std.debug.TTY.Color,
|
||||
) anyerror!void {
|
||||
const stderr = stderr_file.writer();
|
||||
switch (msg) {
|
||||
.src => |src| {
|
||||
std.debug.print("{s}:{d}:{d}: {s}: {s}\n", .{
|
||||
ttyconf.setColor(stderr, .Bold);
|
||||
try stderr.print("{s}:{d}:{d}: ", .{
|
||||
src.src_path,
|
||||
src.line + 1,
|
||||
src.column + 1,
|
||||
kind,
|
||||
src.msg,
|
||||
});
|
||||
ttyconf.setColor(stderr, color);
|
||||
try stderr.writeAll(kind);
|
||||
ttyconf.setColor(stderr, .Bold);
|
||||
try stderr.print(" {s}\n", .{src.msg});
|
||||
ttyconf.setColor(stderr, .Reset);
|
||||
if (src.source_line) |line| {
|
||||
try stderr.writeAll(line);
|
||||
try stderr.writeByte('\n');
|
||||
try stderr.writeByteNTimes(' ', src.column);
|
||||
ttyconf.setColor(stderr, .Green);
|
||||
try stderr.writeAll("^\n");
|
||||
ttyconf.setColor(stderr, .Reset);
|
||||
}
|
||||
for (src.notes) |note| {
|
||||
note.renderToStdErrInner("note");
|
||||
try note.renderToStdErrInner(ttyconf, stderr_file, "note:", .Cyan);
|
||||
}
|
||||
},
|
||||
.plain => |plain| {
|
||||
std.debug.print("{s}: {s}\n", .{ kind, plain.msg });
|
||||
try stderr.print("{s}: {s}\n", .{ kind, plain.msg });
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -327,6 +352,7 @@ pub const AllErrors = struct {
|
||||
.byte_offset = byte_offset,
|
||||
.line = @intCast(u32, loc.line),
|
||||
.column = @intCast(u32, loc.column),
|
||||
.source_line = try arena.allocator.dupe(u8, loc.source_line),
|
||||
},
|
||||
};
|
||||
}
|
||||
@ -342,6 +368,7 @@ pub const AllErrors = struct {
|
||||
.line = @intCast(u32, loc.line),
|
||||
.column = @intCast(u32, loc.column),
|
||||
.notes = notes,
|
||||
.source_line = try arena.allocator.dupe(u8, loc.source_line),
|
||||
},
|
||||
});
|
||||
}
|
||||
@ -906,38 +933,56 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation {
|
||||
artifact_sub_dir,
|
||||
};
|
||||
|
||||
// TODO when we implement serialization and deserialization of incremental compilation metadata,
|
||||
// this is where we would load it. We have open a handle to the directory where
|
||||
// the output either already is, or will be.
|
||||
// If we rely on stage1, we must not redundantly add these packages.
|
||||
const use_stage1 = build_options.is_stage1 and use_llvm;
|
||||
if (!use_stage1) {
|
||||
const builtin_pkg = try Package.createWithDir(
|
||||
gpa,
|
||||
zig_cache_artifact_directory,
|
||||
null,
|
||||
"builtin.zig",
|
||||
);
|
||||
errdefer builtin_pkg.destroy(gpa);
|
||||
|
||||
const std_pkg = try Package.createWithDir(
|
||||
gpa,
|
||||
options.zig_lib_directory,
|
||||
"std",
|
||||
"std.zig",
|
||||
);
|
||||
errdefer std_pkg.destroy(gpa);
|
||||
|
||||
try root_pkg.addAndAdopt(gpa, "builtin", builtin_pkg);
|
||||
try root_pkg.add(gpa, "root", root_pkg);
|
||||
try root_pkg.addAndAdopt(gpa, "std", std_pkg);
|
||||
|
||||
try std_pkg.add(gpa, "builtin", builtin_pkg);
|
||||
try std_pkg.add(gpa, "root", root_pkg);
|
||||
}
|
||||
|
||||
// TODO when we implement serialization and deserialization of incremental
|
||||
// compilation metadata, this is where we would load it. We have open a handle
|
||||
// to the directory where the output either already is, or will be.
|
||||
// However we currently do not have serialization of such metadata, so for now
|
||||
// we set up an empty Module that does the entire compilation fresh.
|
||||
|
||||
const root_scope = rs: {
|
||||
if (mem.endsWith(u8, root_pkg.root_src_path, ".zig")) {
|
||||
const root_scope = try gpa.create(Module.Scope.File);
|
||||
const struct_ty = try Type.Tag.empty_struct.create(
|
||||
gpa,
|
||||
&root_scope.root_container,
|
||||
);
|
||||
root_scope.* = .{
|
||||
// TODO this is duped so it can be freed in Container.deinit
|
||||
.sub_file_path = try gpa.dupe(u8, root_pkg.root_src_path),
|
||||
.source = .{ .unloaded = {} },
|
||||
.tree = undefined,
|
||||
.status = .never_loaded,
|
||||
.pkg = root_pkg,
|
||||
.root_container = .{
|
||||
.file_scope = root_scope,
|
||||
.decls = .{},
|
||||
.ty = struct_ty,
|
||||
},
|
||||
};
|
||||
break :rs root_scope;
|
||||
} else if (mem.endsWith(u8, root_pkg.root_src_path, ".zir")) {
|
||||
return error.ZirFilesUnsupported;
|
||||
} else {
|
||||
unreachable;
|
||||
}
|
||||
const root_scope = try gpa.create(Module.Scope.File);
|
||||
errdefer gpa.destroy(root_scope);
|
||||
|
||||
const struct_ty = try Type.Tag.empty_struct.create(gpa, &root_scope.root_container);
|
||||
root_scope.* = .{
|
||||
// TODO this is duped so it can be freed in Container.deinit
|
||||
.sub_file_path = try gpa.dupe(u8, root_pkg.root_src_path),
|
||||
.source = .{ .unloaded = {} },
|
||||
.tree = undefined,
|
||||
.status = .never_loaded,
|
||||
.pkg = root_pkg,
|
||||
.root_container = .{
|
||||
.file_scope = root_scope,
|
||||
.decls = .{},
|
||||
.ty = struct_ty,
|
||||
.parent_name_hash = root_pkg.namespace_hash,
|
||||
},
|
||||
};
|
||||
|
||||
const module = try arena.create(Module);
|
||||
@ -1339,16 +1384,17 @@ pub fn update(self: *Compilation) !void {
|
||||
self.c_object_work_queue.writeItemAssumeCapacity(entry.key);
|
||||
}
|
||||
|
||||
const use_stage1 = build_options.omit_stage2 or build_options.is_stage1 and self.bin_file.options.use_llvm;
|
||||
const use_stage1 = build_options.omit_stage2 or
|
||||
(build_options.is_stage1 and self.bin_file.options.use_llvm);
|
||||
if (!use_stage1) {
|
||||
if (self.bin_file.options.module) |module| {
|
||||
module.compile_log_text.shrinkAndFree(module.gpa, 0);
|
||||
module.generation += 1;
|
||||
|
||||
// TODO Detect which source files changed.
|
||||
// Until then we simulate a full cache miss. Source files could have been loaded for any reason;
|
||||
// to force a refresh we unload now.
|
||||
module.root_scope.unload(module.gpa);
|
||||
// Until then we simulate a full cache miss. Source files could have been loaded
|
||||
// for any reason; to force a refresh we unload now.
|
||||
module.unloadFile(module.root_scope);
|
||||
module.failed_root_src_file = null;
|
||||
module.analyzeContainer(&module.root_scope.root_container) catch |err| switch (err) {
|
||||
error.AnalysisFail => {
|
||||
@ -1362,7 +1408,7 @@ pub fn update(self: *Compilation) !void {
|
||||
|
||||
// TODO only analyze imports if they are still referenced
|
||||
for (module.import_table.items()) |entry| {
|
||||
entry.value.unload(module.gpa);
|
||||
module.unloadFile(entry.value);
|
||||
module.analyzeContainer(&entry.value.root_container) catch |err| switch (err) {
|
||||
error.AnalysisFail => {
|
||||
assert(self.totalErrorCount() != 0);
|
||||
@ -1377,14 +1423,17 @@ pub fn update(self: *Compilation) !void {
|
||||
|
||||
if (!use_stage1) {
|
||||
if (self.bin_file.options.module) |module| {
|
||||
// Process the deletion set.
|
||||
while (module.deletion_set.popOrNull()) |decl| {
|
||||
if (decl.dependants.items().len != 0) {
|
||||
decl.deletion_flag = false;
|
||||
continue;
|
||||
}
|
||||
try module.deleteDecl(decl);
|
||||
// Process the deletion set. We use a while loop here because the
|
||||
// deletion set may grow as we call `deleteDecl` within this loop,
|
||||
// and more unreferenced Decls are revealed.
|
||||
var entry_i: usize = 0;
|
||||
while (entry_i < module.deletion_set.entries.items.len) : (entry_i += 1) {
|
||||
const decl = module.deletion_set.entries.items[entry_i].key;
|
||||
assert(decl.deletion_flag);
|
||||
assert(decl.dependants.items().len == 0);
|
||||
try module.deleteDecl(decl, null);
|
||||
}
|
||||
module.deletion_set.shrinkRetainingCapacity(0);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1429,11 +1478,25 @@ pub fn totalErrorCount(self: *Compilation) usize {
|
||||
var total: usize = self.failed_c_objects.items().len;
|
||||
|
||||
if (self.bin_file.options.module) |module| {
|
||||
total += module.failed_decls.count() +
|
||||
module.emit_h_failed_decls.count() +
|
||||
module.failed_exports.items().len +
|
||||
total += module.failed_exports.items().len +
|
||||
module.failed_files.items().len +
|
||||
@boolToInt(module.failed_root_src_file != null);
|
||||
// Skip errors for Decls within files that failed parsing.
|
||||
// When a parse error is introduced, we keep all the semantic analysis for
|
||||
// the previous parse success, including compile errors, but we cannot
|
||||
// emit them until the file succeeds parsing.
|
||||
for (module.failed_decls.items()) |entry| {
|
||||
if (entry.key.container.file_scope.status == .unloaded_parse_failure) {
|
||||
continue;
|
||||
}
|
||||
total += 1;
|
||||
}
|
||||
for (module.emit_h_failed_decls.items()) |entry| {
|
||||
if (entry.key.container.file_scope.status == .unloaded_parse_failure) {
|
||||
continue;
|
||||
}
|
||||
total += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// The "no entry point found" error only counts if there are no other errors.
|
||||
@ -1472,6 +1535,7 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
|
||||
.byte_offset = 0,
|
||||
.line = err_msg.line,
|
||||
.column = err_msg.column,
|
||||
.source_line = null, // TODO
|
||||
},
|
||||
});
|
||||
}
|
||||
@ -1480,9 +1544,19 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
|
||||
try AllErrors.add(module, &arena, &errors, entry.value.*);
|
||||
}
|
||||
for (module.failed_decls.items()) |entry| {
|
||||
if (entry.key.container.file_scope.status == .unloaded_parse_failure) {
|
||||
// Skip errors for Decls within files that had a parse failure.
|
||||
// We'll try again once parsing succeeds.
|
||||
continue;
|
||||
}
|
||||
try AllErrors.add(module, &arena, &errors, entry.value.*);
|
||||
}
|
||||
for (module.emit_h_failed_decls.items()) |entry| {
|
||||
if (entry.key.container.file_scope.status == .unloaded_parse_failure) {
|
||||
// Skip errors for Decls within files that had a parse failure.
|
||||
// We'll try again once parsing succeeds.
|
||||
continue;
|
||||
}
|
||||
try AllErrors.add(module, &arena, &errors, entry.value.*);
|
||||
}
|
||||
for (module.failed_exports.items()) |entry| {
|
||||
@ -2437,7 +2511,7 @@ pub fn addCCArgs(
|
||||
try argv.append("-fPIC");
|
||||
}
|
||||
},
|
||||
.shared_library, .assembly, .ll, .bc, .unknown, .static_library, .object, .zig, .zir => {},
|
||||
.shared_library, .assembly, .ll, .bc, .unknown, .static_library, .object, .zig => {},
|
||||
}
|
||||
if (out_dep_path) |p| {
|
||||
try argv.appendSlice(&[_][]const u8{ "-MD", "-MV", "-MF", p });
|
||||
@ -2511,7 +2585,6 @@ pub const FileExt = enum {
|
||||
object,
|
||||
static_library,
|
||||
zig,
|
||||
zir,
|
||||
unknown,
|
||||
|
||||
pub fn clangSupportsDepFile(ext: FileExt) bool {
|
||||
@ -2525,7 +2598,6 @@ pub const FileExt = enum {
|
||||
.object,
|
||||
.static_library,
|
||||
.zig,
|
||||
.zir,
|
||||
.unknown,
|
||||
=> false,
|
||||
};
|
||||
@ -2597,8 +2669,6 @@ pub fn classifyFileExt(filename: []const u8) FileExt {
|
||||
return .h;
|
||||
} else if (mem.endsWith(u8, filename, ".zig")) {
|
||||
return .zig;
|
||||
} else if (mem.endsWith(u8, filename, ".zir")) {
|
||||
return .zir;
|
||||
} else if (hasSharedLibraryExt(filename)) {
|
||||
return .shared_library;
|
||||
} else if (hasStaticLibraryExt(filename)) {
|
||||
@ -2619,7 +2689,6 @@ test "classifyFileExt" {
|
||||
std.testing.expectEqual(FileExt.shared_library, classifyFileExt("foo.so.1.2.3"));
|
||||
std.testing.expectEqual(FileExt.unknown, classifyFileExt("foo.so.1.2.3~"));
|
||||
std.testing.expectEqual(FileExt.zig, classifyFileExt("foo.zig"));
|
||||
std.testing.expectEqual(FileExt.zir, classifyFileExt("foo.zir"));
|
||||
}
|
||||
|
||||
fn haveFramePointer(comp: *const Compilation) bool {
|
||||
@ -2814,6 +2883,8 @@ pub fn generateBuiltinZigSource(comp: *Compilation, allocator: *Allocator) ![]u8
|
||||
|
||||
const target = comp.getTarget();
|
||||
const generic_arch_name = target.cpu.arch.genericName();
|
||||
const use_stage1 = build_options.omit_stage2 or
|
||||
(build_options.is_stage1 and comp.bin_file.options.use_llvm);
|
||||
|
||||
@setEvalBranchQuota(4000);
|
||||
try buffer.writer().print(
|
||||
@ -2826,6 +2897,7 @@ pub fn generateBuiltinZigSource(comp: *Compilation, allocator: *Allocator) ![]u8
|
||||
\\/// Zig version. When writing code that supports multiple versions of Zig, prefer
|
||||
\\/// feature detection (i.e. with `@hasDecl` or `@hasField`) over version checks.
|
||||
\\pub const zig_version = try @import("std").SemanticVersion.parse("{s}");
|
||||
\\pub const zig_is_stage2 = {};
|
||||
\\
|
||||
\\pub const output_mode = OutputMode.{};
|
||||
\\pub const link_mode = LinkMode.{};
|
||||
@ -2839,6 +2911,7 @@ pub fn generateBuiltinZigSource(comp: *Compilation, allocator: *Allocator) ![]u8
|
||||
\\
|
||||
, .{
|
||||
build_options.version,
|
||||
!use_stage1,
|
||||
std.zig.fmtId(@tagName(comp.bin_file.options.output_mode)),
|
||||
std.zig.fmtId(@tagName(comp.bin_file.options.link_mode)),
|
||||
comp.bin_file.options.is_test,
|
||||
@ -3044,6 +3117,7 @@ fn buildOutputFromZig(
|
||||
.handle = special_dir,
|
||||
},
|
||||
.root_src_path = src_basename,
|
||||
.namespace_hash = Package.root_namespace_hash,
|
||||
};
|
||||
const root_name = src_basename[0 .. src_basename.len - std.fs.path.extension(src_basename).len];
|
||||
const target = comp.getTarget();
|
||||
|
||||
452
src/Module.zig
452
src/Module.zig
@ -65,8 +65,8 @@ emit_h_failed_decls: std.AutoArrayHashMapUnmanaged(*Decl, *ErrorMsg) = .{},
|
||||
/// Keep track of one `@compileLog` callsite per owner Decl.
|
||||
compile_log_decls: std.AutoArrayHashMapUnmanaged(*Decl, SrcLoc) = .{},
|
||||
/// Using a map here for consistency with the other fields here.
|
||||
/// The ErrorMsg memory is owned by the `Scope`, using Module's general purpose allocator.
|
||||
failed_files: std.AutoArrayHashMapUnmanaged(*Scope, *ErrorMsg) = .{},
|
||||
/// The ErrorMsg memory is owned by the `Scope.File`, using Module's general purpose allocator.
|
||||
failed_files: std.AutoArrayHashMapUnmanaged(*Scope.File, *ErrorMsg) = .{},
|
||||
/// Using a map here for consistency with the other fields here.
|
||||
/// The ErrorMsg memory is owned by the `Export`, using Module's general purpose allocator.
|
||||
failed_exports: std.AutoArrayHashMapUnmanaged(*Export, *ErrorMsg) = .{},
|
||||
@ -75,7 +75,7 @@ next_anon_name_index: usize = 0,
|
||||
|
||||
/// Candidates for deletion. After a semantic analysis update completes, this list
|
||||
/// contains Decls that need to be deleted if they end up having no references to them.
|
||||
deletion_set: ArrayListUnmanaged(*Decl) = .{},
|
||||
deletion_set: std.AutoArrayHashMapUnmanaged(*Decl, void) = .{},
|
||||
|
||||
/// Error tags and their values, tag names are duped with mod.gpa.
|
||||
/// Corresponds with `error_name_list`.
|
||||
@ -150,9 +150,15 @@ pub const Decl = struct {
|
||||
/// The direct parent container of the Decl.
|
||||
/// Reference to externally owned memory.
|
||||
container: *Scope.Container,
|
||||
/// The AST Node decl index or ZIR Inst index that contains this declaration.
|
||||
|
||||
/// An integer that can be checked against the corresponding incrementing
|
||||
/// generation field of Module. This is used to determine whether `complete` status
|
||||
/// represents pre- or post- re-analysis.
|
||||
generation: u32,
|
||||
/// The AST Node index or ZIR Inst index that contains this declaration.
|
||||
/// Must be recomputed when the corresponding source file is modified.
|
||||
src_index: usize,
|
||||
src_node: ast.Node.Index,
|
||||
|
||||
/// The most recent value of the Decl after a successful semantic analysis.
|
||||
typed_value: union(enum) {
|
||||
never_succeeded: void,
|
||||
@ -192,17 +198,12 @@ pub const Decl = struct {
|
||||
/// to require re-analysis.
|
||||
outdated,
|
||||
},
|
||||
/// This flag is set when this Decl is added to a check_for_deletion set, and cleared
|
||||
/// This flag is set when this Decl is added to `Module.deletion_set`, and cleared
|
||||
/// when removed.
|
||||
deletion_flag: bool,
|
||||
/// Whether the corresponding AST decl has a `pub` keyword.
|
||||
is_pub: bool,
|
||||
|
||||
/// An integer that can be checked against the corresponding incrementing
|
||||
/// generation field of Module. This is used to determine whether `complete` status
|
||||
/// represents pre- or post- re-analysis.
|
||||
generation: u32,
|
||||
|
||||
/// Represents the position of the code in the output file.
|
||||
/// This is populated regardless of semantic analysis and code generation.
|
||||
link: link.File.LinkBlock,
|
||||
@ -249,11 +250,11 @@ pub const Decl = struct {
|
||||
}
|
||||
|
||||
pub fn relativeToNodeIndex(decl: Decl, offset: i32) ast.Node.Index {
|
||||
return @bitCast(ast.Node.Index, offset + @bitCast(i32, decl.srcNode()));
|
||||
return @bitCast(ast.Node.Index, offset + @bitCast(i32, decl.src_node));
|
||||
}
|
||||
|
||||
pub fn nodeIndexToRelative(decl: Decl, node_index: ast.Node.Index) i32 {
|
||||
return @bitCast(i32, node_index) - @bitCast(i32, decl.srcNode());
|
||||
return @bitCast(i32, node_index) - @bitCast(i32, decl.src_node);
|
||||
}
|
||||
|
||||
pub fn tokSrcLoc(decl: Decl, token_index: ast.TokenIndex) LazySrcLoc {
|
||||
@ -271,14 +272,9 @@ pub const Decl = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn srcNode(decl: Decl) u32 {
|
||||
const tree = &decl.container.file_scope.tree;
|
||||
return tree.rootDecls()[decl.src_index];
|
||||
}
|
||||
|
||||
pub fn srcToken(decl: Decl) u32 {
|
||||
const tree = &decl.container.file_scope.tree;
|
||||
return tree.firstToken(decl.srcNode());
|
||||
return tree.firstToken(decl.src_node);
|
||||
}
|
||||
|
||||
pub fn srcByteOffset(decl: Decl) u32 {
|
||||
@ -290,6 +286,18 @@ pub const Decl = struct {
|
||||
return decl.container.fullyQualifiedNameHash(mem.spanZ(decl.name));
|
||||
}
|
||||
|
||||
pub fn renderFullyQualifiedName(decl: Decl, writer: anytype) !void {
|
||||
const unqualified_name = mem.spanZ(decl.name);
|
||||
return decl.container.renderFullyQualifiedName(unqualified_name, writer);
|
||||
}
|
||||
|
||||
pub fn getFullyQualifiedName(decl: Decl, gpa: *Allocator) ![]u8 {
|
||||
var buffer = std.ArrayList(u8).init(gpa);
|
||||
defer buffer.deinit();
|
||||
try decl.renderFullyQualifiedName(buffer.writer());
|
||||
return buffer.toOwnedSlice();
|
||||
}
|
||||
|
||||
pub fn typedValue(decl: *Decl) error{AnalysisFail}!TypedValue {
|
||||
const tvm = decl.typedValueManaged() orelse return error.AnalysisFail;
|
||||
return tvm.typed_value;
|
||||
@ -354,6 +362,13 @@ pub const ErrorSet = struct {
|
||||
/// The string bytes are stored in the owner Decl arena.
|
||||
/// They are in the same order they appear in the AST.
|
||||
names_ptr: [*]const []const u8,
|
||||
|
||||
pub fn srcLoc(self: ErrorSet) SrcLoc {
|
||||
return .{
|
||||
.container = .{ .decl = self.owner_decl },
|
||||
.lazy = .{ .node_offset = self.node_offset },
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/// Represents the data that a struct declaration provides.
|
||||
@ -364,7 +379,7 @@ pub const Struct = struct {
|
||||
/// Represents the declarations inside this struct.
|
||||
container: Scope.Container,
|
||||
|
||||
/// Offset from Decl node index, points to the struct AST node.
|
||||
/// Offset from `owner_decl`, points to the struct AST node.
|
||||
node_offset: i32,
|
||||
|
||||
pub const Field = struct {
|
||||
@ -373,6 +388,64 @@ pub const Struct = struct {
|
||||
/// Uses `unreachable_value` to indicate no default.
|
||||
default_val: Value,
|
||||
};
|
||||
|
||||
pub fn getFullyQualifiedName(s: *Struct, gpa: *Allocator) ![]u8 {
|
||||
return s.owner_decl.getFullyQualifiedName(gpa);
|
||||
}
|
||||
|
||||
pub fn srcLoc(s: Struct) SrcLoc {
|
||||
return .{
|
||||
.container = .{ .decl = s.owner_decl },
|
||||
.lazy = .{ .node_offset = s.node_offset },
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/// Represents the data that an enum declaration provides, when the fields
|
||||
/// are auto-numbered, and there are no declarations. The integer tag type
|
||||
/// is inferred to be the smallest power of two unsigned int that fits
|
||||
/// the number of fields.
|
||||
pub const EnumSimple = struct {
|
||||
owner_decl: *Decl,
|
||||
/// Set of field names in declaration order.
|
||||
fields: std.StringArrayHashMapUnmanaged(void),
|
||||
/// Offset from `owner_decl`, points to the enum decl AST node.
|
||||
node_offset: i32,
|
||||
|
||||
pub fn srcLoc(self: EnumSimple) SrcLoc {
|
||||
return .{
|
||||
.container = .{ .decl = self.owner_decl },
|
||||
.lazy = .{ .node_offset = self.node_offset },
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/// Represents the data that an enum declaration provides, when there is
|
||||
/// at least one tag value explicitly specified, or at least one declaration.
|
||||
pub const EnumFull = struct {
|
||||
owner_decl: *Decl,
|
||||
/// An integer type which is used for the numerical value of the enum.
|
||||
/// Whether zig chooses this type or the user specifies it, it is stored here.
|
||||
tag_ty: Type,
|
||||
/// Set of field names in declaration order.
|
||||
fields: std.StringArrayHashMapUnmanaged(void),
|
||||
/// Maps integer tag value to field index.
|
||||
/// Entries are in declaration order, same as `fields`.
|
||||
/// If this hash map is empty, it means the enum tags are auto-numbered.
|
||||
values: ValueMap,
|
||||
/// Represents the declarations inside this struct.
|
||||
container: Scope.Container,
|
||||
/// Offset from `owner_decl`, points to the enum decl AST node.
|
||||
node_offset: i32,
|
||||
|
||||
pub const ValueMap = std.ArrayHashMapUnmanaged(Value, void, Value.hash_u32, Value.eql, false);
|
||||
|
||||
pub fn srcLoc(self: EnumFull) SrcLoc {
|
||||
return .{
|
||||
.container = .{ .decl = self.owner_decl },
|
||||
.lazy = .{ .node_offset = self.node_offset },
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/// Some Fn struct memory is owned by the Decl's TypedValue.Managed arena allocator.
|
||||
@ -601,6 +674,7 @@ pub const Scope = struct {
|
||||
base: Scope = Scope{ .tag = base_tag },
|
||||
|
||||
file_scope: *Scope.File,
|
||||
parent_name_hash: NameHash,
|
||||
|
||||
/// Direct children of the file.
|
||||
decls: std.AutoArrayHashMapUnmanaged(*Decl, void) = .{},
|
||||
@ -619,8 +693,12 @@ pub const Scope = struct {
|
||||
}
|
||||
|
||||
pub fn fullyQualifiedNameHash(cont: *Container, name: []const u8) NameHash {
|
||||
// TODO container scope qualified names.
|
||||
return std.zig.hashSrc(name);
|
||||
return std.zig.hashName(cont.parent_name_hash, ".", name);
|
||||
}
|
||||
|
||||
pub fn renderFullyQualifiedName(cont: Container, name: []const u8, writer: anytype) !void {
|
||||
// TODO this should render e.g. "std.fs.Dir.OpenOptions"
|
||||
return writer.writeAll(name);
|
||||
}
|
||||
};
|
||||
|
||||
@ -650,10 +728,12 @@ pub const Scope = struct {
|
||||
|
||||
pub fn unload(file: *File, gpa: *Allocator) void {
|
||||
switch (file.status) {
|
||||
.never_loaded,
|
||||
.unloaded_parse_failure,
|
||||
.never_loaded,
|
||||
.unloaded_success,
|
||||
=> {},
|
||||
=> {
|
||||
file.status = .unloaded_success;
|
||||
},
|
||||
|
||||
.loaded_success => {
|
||||
file.tree.deinit(gpa);
|
||||
@ -1018,7 +1098,6 @@ pub const Scope = struct {
|
||||
.instructions = gz.astgen.instructions.toOwnedSlice(),
|
||||
.string_bytes = gz.astgen.string_bytes.toOwnedSlice(gpa),
|
||||
.extra = gz.astgen.extra.toOwnedSlice(gpa),
|
||||
.decls = gz.astgen.decls.toOwnedSlice(gpa),
|
||||
};
|
||||
}
|
||||
|
||||
@ -1048,6 +1127,9 @@ pub const Scope = struct {
|
||||
gz.rl_ty_inst = ty_inst;
|
||||
gz.break_result_loc = parent_rl;
|
||||
},
|
||||
.none_or_ref => {
|
||||
gz.break_result_loc = .ref;
|
||||
},
|
||||
.discard, .none, .ptr, .ref => {
|
||||
gz.break_result_loc = parent_rl;
|
||||
},
|
||||
@ -1227,6 +1309,16 @@ pub const Scope = struct {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn addFloat(gz: *GenZir, number: f32, src_node: ast.Node.Index) !zir.Inst.Ref {
|
||||
return gz.add(.{
|
||||
.tag = .float,
|
||||
.data = .{ .float = .{
|
||||
.src_node = gz.astgen.decl.nodeIndexToRelative(src_node),
|
||||
.number = number,
|
||||
} },
|
||||
});
|
||||
}
|
||||
|
||||
pub fn addUnNode(
|
||||
gz: *GenZir,
|
||||
tag: zir.Inst.Tag,
|
||||
@ -1435,13 +1527,6 @@ pub const Scope = struct {
|
||||
return new_index;
|
||||
}
|
||||
|
||||
pub fn addConst(gz: *GenZir, typed_value: *TypedValue) !zir.Inst.Ref {
|
||||
return gz.add(.{
|
||||
.tag = .@"const",
|
||||
.data = .{ .@"const" = typed_value },
|
||||
});
|
||||
}
|
||||
|
||||
pub fn add(gz: *GenZir, inst: zir.Inst) !zir.Inst.Ref {
|
||||
return gz.astgen.indexToRef(try gz.addAsIndex(inst));
|
||||
}
|
||||
@ -1572,6 +1657,7 @@ pub const SrcLoc = struct {
|
||||
.byte_offset,
|
||||
.token_offset,
|
||||
.node_offset,
|
||||
.node_offset_back2tok,
|
||||
.node_offset_var_decl_ty,
|
||||
.node_offset_for_cond,
|
||||
.node_offset_builtin_call_arg0,
|
||||
@ -1633,6 +1719,14 @@ pub const SrcLoc = struct {
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_offset_back2tok => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const tree = decl.container.file_scope.base.tree();
|
||||
const tok_index = tree.firstToken(node) - 2;
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
.node_offset_var_decl_ty => |node_off| {
|
||||
const decl = src_loc.container.decl;
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
@ -1747,7 +1841,10 @@ pub const SrcLoc = struct {
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node = decl.relativeToNodeIndex(node_off);
|
||||
const tok_index = node_datas[node].rhs;
|
||||
const tok_index = switch (node_tags[node]) {
|
||||
.field_access => node_datas[node].rhs,
|
||||
else => tree.firstToken(node) - 2,
|
||||
};
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
return token_starts[tok_index];
|
||||
},
|
||||
@ -1988,6 +2085,10 @@ pub const LazySrcLoc = union(enum) {
|
||||
/// from its containing Decl node AST index.
|
||||
/// The Decl is determined contextually.
|
||||
node_offset: i32,
|
||||
/// The source location points to two tokens left of the first token of an AST node,
|
||||
/// which is this value offset from its containing Decl node AST index.
|
||||
/// The Decl is determined contextually.
|
||||
node_offset_back2tok: i32,
|
||||
/// The source location points to a variable declaration type expression,
|
||||
/// found by taking this AST node index offset from the containing
|
||||
/// Decl AST node, which points to a variable declaration AST node. Next, navigate
|
||||
@ -2026,10 +2127,10 @@ pub const LazySrcLoc = union(enum) {
|
||||
/// to the callee expression.
|
||||
/// The Decl is determined contextually.
|
||||
node_offset_call_func: i32,
|
||||
/// The source location points to the field name of a field access expression,
|
||||
/// found by taking this AST node index offset from the containing
|
||||
/// Decl AST node, which points to a field access AST node. Next, navigate
|
||||
/// to the field name token.
|
||||
/// The payload is offset from the containing Decl AST node.
|
||||
/// The source location points to the field name of:
|
||||
/// * a field access expression (`a.b`), or
|
||||
/// * the operand ("b" node) of a field initialization expression (`.a = b`)
|
||||
/// The Decl is determined contextually.
|
||||
node_offset_field_name: i32,
|
||||
/// The source location points to the pointer of a pointer deref expression,
|
||||
@ -2114,6 +2215,7 @@ pub const LazySrcLoc = union(enum) {
|
||||
.byte_offset,
|
||||
.token_offset,
|
||||
.node_offset,
|
||||
.node_offset_back2tok,
|
||||
.node_offset_var_decl_ty,
|
||||
.node_offset_for_cond,
|
||||
.node_offset_builtin_call_arg0,
|
||||
@ -2156,6 +2258,7 @@ pub const LazySrcLoc = union(enum) {
|
||||
.byte_offset,
|
||||
.token_offset,
|
||||
.node_offset,
|
||||
.node_offset_back2tok,
|
||||
.node_offset_var_decl_ty,
|
||||
.node_offset_for_cond,
|
||||
.node_offset_builtin_call_arg0,
|
||||
@ -2189,6 +2292,20 @@ pub const InnerError = error{ OutOfMemory, AnalysisFail };
|
||||
pub fn deinit(mod: *Module) void {
|
||||
const gpa = mod.gpa;
|
||||
|
||||
// The callsite of `Compilation.create` owns the `root_pkg`, however
|
||||
// Module owns the builtin and std packages that it adds.
|
||||
if (mod.root_pkg.table.remove("builtin")) |entry| {
|
||||
gpa.free(entry.key);
|
||||
entry.value.destroy(gpa);
|
||||
}
|
||||
if (mod.root_pkg.table.remove("std")) |entry| {
|
||||
gpa.free(entry.key);
|
||||
entry.value.destroy(gpa);
|
||||
}
|
||||
if (mod.root_pkg.table.remove("root")) |entry| {
|
||||
gpa.free(entry.key);
|
||||
}
|
||||
|
||||
mod.compile_log_text.deinit(gpa);
|
||||
|
||||
mod.zig_cache_artifact_directory.handle.close();
|
||||
@ -2288,7 +2405,7 @@ pub fn ensureDeclAnalyzed(mod: *Module, decl: *Decl) InnerError!void {
|
||||
// We don't perform a deletion here, because this Decl or another one
|
||||
// may end up referencing it before the update is complete.
|
||||
dep.deletion_flag = true;
|
||||
try mod.deletion_set.append(mod.gpa, dep);
|
||||
try mod.deletion_set.put(mod.gpa, dep, {});
|
||||
}
|
||||
}
|
||||
decl.dependencies.clearRetainingCapacity();
|
||||
@ -2351,7 +2468,7 @@ fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool {
|
||||
const tree = try mod.getAstTree(decl.container.file_scope);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const decl_node = tree.rootDecls()[decl.src_index];
|
||||
const decl_node = decl.src_node;
|
||||
switch (node_tags[decl_node]) {
|
||||
.fn_decl => {
|
||||
const fn_proto = node_datas[decl_node].lhs;
|
||||
@ -2406,6 +2523,7 @@ fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool {
|
||||
|
||||
const block_expr = node_datas[decl_node].lhs;
|
||||
_ = try AstGen.comptimeExpr(&gen_scope, &gen_scope.base, .none, block_expr);
|
||||
_ = try gen_scope.addBreak(.break_inline, 0, .void_value);
|
||||
|
||||
const code = try gen_scope.finish();
|
||||
if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
|
||||
@ -3087,12 +3205,19 @@ fn astgenAndSemaVarDecl(
|
||||
return type_changed;
|
||||
}
|
||||
|
||||
pub fn declareDeclDependency(mod: *Module, depender: *Decl, dependee: *Decl) !void {
|
||||
try depender.dependencies.ensureCapacity(mod.gpa, depender.dependencies.items().len + 1);
|
||||
try dependee.dependants.ensureCapacity(mod.gpa, dependee.dependants.items().len + 1);
|
||||
/// Returns the depender's index of the dependee.
|
||||
pub fn declareDeclDependency(mod: *Module, depender: *Decl, dependee: *Decl) !u32 {
|
||||
try depender.dependencies.ensureCapacity(mod.gpa, depender.dependencies.count() + 1);
|
||||
try dependee.dependants.ensureCapacity(mod.gpa, dependee.dependants.count() + 1);
|
||||
|
||||
if (dependee.deletion_flag) {
|
||||
dependee.deletion_flag = false;
|
||||
mod.deletion_set.removeAssertDiscard(dependee);
|
||||
}
|
||||
|
||||
depender.dependencies.putAssumeCapacity(dependee, {});
|
||||
dependee.dependants.putAssumeCapacity(depender, {});
|
||||
const gop = depender.dependencies.getOrPutAssumeCapacity(dependee);
|
||||
return @intCast(u32, gop.index);
|
||||
}
|
||||
|
||||
pub fn getAstTree(mod: *Module, root_scope: *Scope.File) !*const ast.Tree {
|
||||
@ -3117,17 +3242,19 @@ pub fn getAstTree(mod: *Module, root_scope: *Scope.File) !*const ast.Tree {
|
||||
var msg = std.ArrayList(u8).init(mod.gpa);
|
||||
defer msg.deinit();
|
||||
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
|
||||
try tree.renderError(parse_err, msg.writer());
|
||||
const err_msg = try mod.gpa.create(ErrorMsg);
|
||||
err_msg.* = .{
|
||||
.src_loc = .{
|
||||
.container = .{ .file_scope = root_scope },
|
||||
.lazy = .{ .token_abs = parse_err.token },
|
||||
.lazy = .{ .byte_abs = token_starts[parse_err.token] },
|
||||
},
|
||||
.msg = msg.toOwnedSlice(),
|
||||
};
|
||||
|
||||
mod.failed_files.putAssumeCapacityNoClobber(&root_scope.base, err_msg);
|
||||
mod.failed_files.putAssumeCapacityNoClobber(root_scope, err_msg);
|
||||
root_scope.status = .unloaded_parse_failure;
|
||||
return error.AnalysisFail;
|
||||
}
|
||||
@ -3167,7 +3294,15 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
|
||||
deleted_decls.putAssumeCapacityNoClobber(entry.key, {});
|
||||
}
|
||||
|
||||
for (decls) |decl_node, decl_i| switch (node_tags[decl_node]) {
|
||||
// Keep track of decls that are invalidated from the update. Ultimately,
|
||||
// the goal is to queue up `analyze_decl` tasks in the work queue for
|
||||
// the outdated decls, but we cannot queue up the tasks until after
|
||||
// we find out which ones have been deleted, otherwise there would be
|
||||
// deleted Decl pointers in the work queue.
|
||||
var outdated_decls = std.AutoArrayHashMap(*Decl, void).init(mod.gpa);
|
||||
defer outdated_decls.deinit();
|
||||
|
||||
for (decls) |decl_node| switch (node_tags[decl_node]) {
|
||||
.fn_decl => {
|
||||
const fn_proto = node_datas[decl_node].lhs;
|
||||
const body = node_datas[decl_node].rhs;
|
||||
@ -3177,8 +3312,8 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
|
||||
try mod.semaContainerFn(
|
||||
container_scope,
|
||||
&deleted_decls,
|
||||
&outdated_decls,
|
||||
decl_node,
|
||||
decl_i,
|
||||
tree.*,
|
||||
body,
|
||||
tree.fnProtoSimple(¶ms, fn_proto),
|
||||
@ -3187,8 +3322,8 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
|
||||
.fn_proto_multi => try mod.semaContainerFn(
|
||||
container_scope,
|
||||
&deleted_decls,
|
||||
&outdated_decls,
|
||||
decl_node,
|
||||
decl_i,
|
||||
tree.*,
|
||||
body,
|
||||
tree.fnProtoMulti(fn_proto),
|
||||
@ -3198,8 +3333,8 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
|
||||
try mod.semaContainerFn(
|
||||
container_scope,
|
||||
&deleted_decls,
|
||||
&outdated_decls,
|
||||
decl_node,
|
||||
decl_i,
|
||||
tree.*,
|
||||
body,
|
||||
tree.fnProtoOne(¶ms, fn_proto),
|
||||
@ -3208,8 +3343,8 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
|
||||
.fn_proto => try mod.semaContainerFn(
|
||||
container_scope,
|
||||
&deleted_decls,
|
||||
&outdated_decls,
|
||||
decl_node,
|
||||
decl_i,
|
||||
tree.*,
|
||||
body,
|
||||
tree.fnProto(fn_proto),
|
||||
@ -3222,8 +3357,8 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
|
||||
try mod.semaContainerFn(
|
||||
container_scope,
|
||||
&deleted_decls,
|
||||
&outdated_decls,
|
||||
decl_node,
|
||||
decl_i,
|
||||
tree.*,
|
||||
0,
|
||||
tree.fnProtoSimple(¶ms, decl_node),
|
||||
@ -3232,8 +3367,8 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
|
||||
.fn_proto_multi => try mod.semaContainerFn(
|
||||
container_scope,
|
||||
&deleted_decls,
|
||||
&outdated_decls,
|
||||
decl_node,
|
||||
decl_i,
|
||||
tree.*,
|
||||
0,
|
||||
tree.fnProtoMulti(decl_node),
|
||||
@ -3243,8 +3378,8 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
|
||||
try mod.semaContainerFn(
|
||||
container_scope,
|
||||
&deleted_decls,
|
||||
&outdated_decls,
|
||||
decl_node,
|
||||
decl_i,
|
||||
tree.*,
|
||||
0,
|
||||
tree.fnProtoOne(¶ms, decl_node),
|
||||
@ -3253,8 +3388,8 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
|
||||
.fn_proto => try mod.semaContainerFn(
|
||||
container_scope,
|
||||
&deleted_decls,
|
||||
&outdated_decls,
|
||||
decl_node,
|
||||
decl_i,
|
||||
tree.*,
|
||||
0,
|
||||
tree.fnProto(decl_node),
|
||||
@ -3263,32 +3398,32 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
|
||||
.global_var_decl => try mod.semaContainerVar(
|
||||
container_scope,
|
||||
&deleted_decls,
|
||||
&outdated_decls,
|
||||
decl_node,
|
||||
decl_i,
|
||||
tree.*,
|
||||
tree.globalVarDecl(decl_node),
|
||||
),
|
||||
.local_var_decl => try mod.semaContainerVar(
|
||||
container_scope,
|
||||
&deleted_decls,
|
||||
&outdated_decls,
|
||||
decl_node,
|
||||
decl_i,
|
||||
tree.*,
|
||||
tree.localVarDecl(decl_node),
|
||||
),
|
||||
.simple_var_decl => try mod.semaContainerVar(
|
||||
container_scope,
|
||||
&deleted_decls,
|
||||
&outdated_decls,
|
||||
decl_node,
|
||||
decl_i,
|
||||
tree.*,
|
||||
tree.simpleVarDecl(decl_node),
|
||||
),
|
||||
.aligned_var_decl => try mod.semaContainerVar(
|
||||
container_scope,
|
||||
&deleted_decls,
|
||||
&outdated_decls,
|
||||
decl_node,
|
||||
decl_i,
|
||||
tree.*,
|
||||
tree.alignedVarDecl(decl_node),
|
||||
),
|
||||
@ -3301,49 +3436,48 @@ pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
|
||||
const name_hash = container_scope.fullyQualifiedNameHash(name);
|
||||
const contents_hash = std.zig.hashSrc(tree.getNodeSource(decl_node));
|
||||
|
||||
const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
|
||||
const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_node, name_hash, contents_hash);
|
||||
container_scope.decls.putAssumeCapacity(new_decl, {});
|
||||
mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
|
||||
},
|
||||
|
||||
.container_field_init => try mod.semaContainerField(
|
||||
container_scope,
|
||||
&deleted_decls,
|
||||
decl_node,
|
||||
decl_i,
|
||||
tree.*,
|
||||
tree.containerFieldInit(decl_node),
|
||||
),
|
||||
.container_field_align => try mod.semaContainerField(
|
||||
container_scope,
|
||||
&deleted_decls,
|
||||
decl_node,
|
||||
decl_i,
|
||||
tree.*,
|
||||
tree.containerFieldAlign(decl_node),
|
||||
),
|
||||
.container_field => try mod.semaContainerField(
|
||||
container_scope,
|
||||
&deleted_decls,
|
||||
decl_node,
|
||||
decl_i,
|
||||
tree.*,
|
||||
tree.containerField(decl_node),
|
||||
),
|
||||
// Container fields are handled in AstGen.
|
||||
.container_field_init,
|
||||
.container_field_align,
|
||||
.container_field,
|
||||
=> continue,
|
||||
|
||||
.test_decl => {
|
||||
log.err("TODO: analyze test decl", .{});
|
||||
if (mod.comp.bin_file.options.is_test) {
|
||||
log.err("TODO: analyze test decl", .{});
|
||||
}
|
||||
},
|
||||
.@"usingnamespace" => {
|
||||
log.err("TODO: analyze usingnamespace decl", .{});
|
||||
},
|
||||
else => unreachable,
|
||||
};
|
||||
// Handle explicitly deleted decls from the source code. Not to be confused
|
||||
// with when we delete decls because they are no longer referenced.
|
||||
// Handle explicitly deleted decls from the source code. This is one of two
|
||||
// places that Decl deletions happen. The other is in `Compilation`, after
|
||||
// `performAllTheWork`, where we iterate over `Module.deletion_set` and
|
||||
// delete Decls which are no longer referenced.
|
||||
// If a Decl is explicitly deleted from source, and also no longer referenced,
|
||||
// it may be both in this `deleted_decls` set, as well as in the
|
||||
// `Module.deletion_set`. To avoid deleting it twice, we remove it from the
|
||||
// deletion set at this time.
|
||||
for (deleted_decls.items()) |entry| {
|
||||
log.debug("noticed '{s}' deleted from source", .{entry.key.name});
|
||||
try mod.deleteDecl(entry.key);
|
||||
const decl = entry.key;
|
||||
log.debug("'{s}' deleted from source", .{decl.name});
|
||||
if (decl.deletion_flag) {
|
||||
log.debug("'{s}' redundantly in deletion set; removing", .{decl.name});
|
||||
mod.deletion_set.removeAssertDiscard(decl);
|
||||
}
|
||||
try mod.deleteDecl(decl, &outdated_decls);
|
||||
}
|
||||
// Finally we can queue up re-analysis tasks after we have processed
|
||||
// the deleted decls.
|
||||
for (outdated_decls.items()) |entry| {
|
||||
try mod.markOutdatedDecl(entry.key);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3351,8 +3485,8 @@ fn semaContainerFn(
|
||||
mod: *Module,
|
||||
container_scope: *Scope.Container,
|
||||
deleted_decls: *std.AutoArrayHashMap(*Decl, void),
|
||||
outdated_decls: *std.AutoArrayHashMap(*Decl, void),
|
||||
decl_node: ast.Node.Index,
|
||||
decl_i: usize,
|
||||
tree: ast.Tree,
|
||||
body_node: ast.Node.Index,
|
||||
fn_proto: ast.full.FnProto,
|
||||
@ -3361,28 +3495,34 @@ fn semaContainerFn(
|
||||
defer tracy.end();
|
||||
|
||||
// We will create a Decl for it regardless of analysis status.
|
||||
const name_tok = fn_proto.name_token orelse {
|
||||
const name_token = fn_proto.name_token orelse {
|
||||
// This problem will go away with #1717.
|
||||
@panic("TODO missing function name");
|
||||
};
|
||||
const name = tree.tokenSlice(name_tok); // TODO use identifierTokenString
|
||||
const name = tree.tokenSlice(name_token); // TODO use identifierTokenString
|
||||
const name_hash = container_scope.fullyQualifiedNameHash(name);
|
||||
const contents_hash = std.zig.hashSrc(tree.getNodeSource(decl_node));
|
||||
if (mod.decl_table.get(name_hash)) |decl| {
|
||||
// Update the AST Node index of the decl, even if its contents are unchanged, it may
|
||||
// have been re-ordered.
|
||||
decl.src_index = decl_i;
|
||||
const prev_src_node = decl.src_node;
|
||||
decl.src_node = decl_node;
|
||||
if (deleted_decls.swapRemove(decl) == null) {
|
||||
decl.analysis = .sema_failure;
|
||||
const msg = try ErrorMsg.create(mod.gpa, .{
|
||||
.container = .{ .file_scope = container_scope.file_scope },
|
||||
.lazy = .{ .token_abs = name_tok },
|
||||
.lazy = .{ .token_abs = name_token },
|
||||
}, "redefinition of '{s}'", .{decl.name});
|
||||
errdefer msg.destroy(mod.gpa);
|
||||
const other_src_loc: SrcLoc = .{
|
||||
.container = .{ .file_scope = decl.container.file_scope },
|
||||
.lazy = .{ .node_abs = prev_src_node },
|
||||
};
|
||||
try mod.errNoteNonLazy(other_src_loc, msg, "previous definition here", .{});
|
||||
try mod.failed_decls.putNoClobber(mod.gpa, decl, msg);
|
||||
} else {
|
||||
if (!srcHashEql(decl.contents_hash, contents_hash)) {
|
||||
try mod.markOutdatedDecl(decl);
|
||||
try outdated_decls.put(decl, {});
|
||||
decl.contents_hash = contents_hash;
|
||||
} else switch (mod.comp.bin_file.tag) {
|
||||
.coff => {
|
||||
@ -3402,7 +3542,7 @@ fn semaContainerFn(
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
|
||||
const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_node, name_hash, contents_hash);
|
||||
container_scope.decls.putAssumeCapacity(new_decl, {});
|
||||
if (fn_proto.extern_export_token) |maybe_export_token| {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
@ -3410,6 +3550,7 @@ fn semaContainerFn(
|
||||
mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
|
||||
}
|
||||
}
|
||||
new_decl.is_pub = fn_proto.visib_token != null;
|
||||
}
|
||||
}
|
||||
|
||||
@ -3417,8 +3558,8 @@ fn semaContainerVar(
|
||||
mod: *Module,
|
||||
container_scope: *Scope.Container,
|
||||
deleted_decls: *std.AutoArrayHashMap(*Decl, void),
|
||||
outdated_decls: *std.AutoArrayHashMap(*Decl, void),
|
||||
decl_node: ast.Node.Index,
|
||||
decl_i: usize,
|
||||
tree: ast.Tree,
|
||||
var_decl: ast.full.VarDecl,
|
||||
) !void {
|
||||
@ -3432,21 +3573,27 @@ fn semaContainerVar(
|
||||
if (mod.decl_table.get(name_hash)) |decl| {
|
||||
// Update the AST Node index of the decl, even if its contents are unchanged, it may
|
||||
// have been re-ordered.
|
||||
decl.src_index = decl_i;
|
||||
const prev_src_node = decl.src_node;
|
||||
decl.src_node = decl_node;
|
||||
if (deleted_decls.swapRemove(decl) == null) {
|
||||
decl.analysis = .sema_failure;
|
||||
const err_msg = try ErrorMsg.create(mod.gpa, .{
|
||||
const msg = try ErrorMsg.create(mod.gpa, .{
|
||||
.container = .{ .file_scope = container_scope.file_scope },
|
||||
.lazy = .{ .token_abs = name_token },
|
||||
}, "redefinition of '{s}'", .{decl.name});
|
||||
errdefer err_msg.destroy(mod.gpa);
|
||||
try mod.failed_decls.putNoClobber(mod.gpa, decl, err_msg);
|
||||
errdefer msg.destroy(mod.gpa);
|
||||
const other_src_loc: SrcLoc = .{
|
||||
.container = .{ .file_scope = decl.container.file_scope },
|
||||
.lazy = .{ .node_abs = prev_src_node },
|
||||
};
|
||||
try mod.errNoteNonLazy(other_src_loc, msg, "previous definition here", .{});
|
||||
try mod.failed_decls.putNoClobber(mod.gpa, decl, msg);
|
||||
} else if (!srcHashEql(decl.contents_hash, contents_hash)) {
|
||||
try mod.markOutdatedDecl(decl);
|
||||
try outdated_decls.put(decl, {});
|
||||
decl.contents_hash = contents_hash;
|
||||
}
|
||||
} else {
|
||||
const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
|
||||
const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_node, name_hash, contents_hash);
|
||||
container_scope.decls.putAssumeCapacity(new_decl, {});
|
||||
if (var_decl.extern_export_token) |maybe_export_token| {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
@ -3454,35 +3601,31 @@ fn semaContainerVar(
|
||||
mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
|
||||
}
|
||||
}
|
||||
new_decl.is_pub = var_decl.visib_token != null;
|
||||
}
|
||||
}
|
||||
|
||||
fn semaContainerField(
|
||||
pub fn deleteDecl(
|
||||
mod: *Module,
|
||||
container_scope: *Scope.Container,
|
||||
deleted_decls: *std.AutoArrayHashMap(*Decl, void),
|
||||
decl_node: ast.Node.Index,
|
||||
decl_i: usize,
|
||||
tree: ast.Tree,
|
||||
field: ast.full.ContainerField,
|
||||
decl: *Decl,
|
||||
outdated_decls: ?*std.AutoArrayHashMap(*Decl, void),
|
||||
) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
log.err("TODO: analyze container field", .{});
|
||||
}
|
||||
log.debug("deleting decl '{s}'", .{decl.name});
|
||||
|
||||
pub fn deleteDecl(mod: *Module, decl: *Decl) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
try mod.deletion_set.ensureCapacity(mod.gpa, mod.deletion_set.items.len + decl.dependencies.items().len);
|
||||
if (outdated_decls) |map| {
|
||||
_ = map.swapRemove(decl);
|
||||
try map.ensureCapacity(map.count() + decl.dependants.count());
|
||||
}
|
||||
try mod.deletion_set.ensureCapacity(mod.gpa, mod.deletion_set.count() +
|
||||
decl.dependencies.count());
|
||||
|
||||
// Remove from the namespace it resides in. In the case of an anonymous Decl it will
|
||||
// not be present in the set, and this does nothing.
|
||||
decl.container.removeDecl(decl);
|
||||
|
||||
log.debug("deleting decl '{s}'", .{decl.name});
|
||||
const name_hash = decl.fullyQualifiedNameHash();
|
||||
mod.decl_table.removeAssertDiscard(name_hash);
|
||||
// Remove itself from its dependencies, because we are about to destroy the decl pointer.
|
||||
@ -3493,16 +3636,22 @@ pub fn deleteDecl(mod: *Module, decl: *Decl) !void {
|
||||
// We don't recursively perform a deletion here, because during the update,
|
||||
// another reference to it may turn up.
|
||||
dep.deletion_flag = true;
|
||||
mod.deletion_set.appendAssumeCapacity(dep);
|
||||
mod.deletion_set.putAssumeCapacity(dep, {});
|
||||
}
|
||||
}
|
||||
// Anything that depends on this deleted decl certainly needs to be re-analyzed.
|
||||
// Anything that depends on this deleted decl needs to be re-analyzed.
|
||||
for (decl.dependants.items()) |entry| {
|
||||
const dep = entry.key;
|
||||
dep.removeDependency(decl);
|
||||
if (dep.analysis != .outdated) {
|
||||
// TODO Move this failure possibility to the top of the function.
|
||||
try mod.markOutdatedDecl(dep);
|
||||
if (outdated_decls) |map| {
|
||||
map.putAssumeCapacity(dep, {});
|
||||
} else if (std.debug.runtime_safety) {
|
||||
// If `outdated_decls` is `null`, it means we're being called from
|
||||
// `Compilation` after `performAllTheWork` and we cannot queue up any
|
||||
// more work. `dep` must necessarily be another Decl that is no longer
|
||||
// being referenced, and will be in the `deletion_set`. Otherwise,
|
||||
// something has gone wrong.
|
||||
assert(mod.deletion_set.contains(dep));
|
||||
}
|
||||
}
|
||||
if (mod.failed_decls.swapRemove(decl)) |entry| {
|
||||
@ -3638,7 +3787,7 @@ fn markOutdatedDecl(mod: *Module, decl: *Decl) !void {
|
||||
fn allocateNewDecl(
|
||||
mod: *Module,
|
||||
scope: *Scope,
|
||||
src_index: usize,
|
||||
src_node: ast.Node.Index,
|
||||
contents_hash: std.zig.SrcHash,
|
||||
) !*Decl {
|
||||
// If we have emit-h then we must allocate a bigger structure to store the emit-h state.
|
||||
@ -3654,7 +3803,7 @@ fn allocateNewDecl(
|
||||
new_decl.* = .{
|
||||
.name = "",
|
||||
.container = scope.namespace(),
|
||||
.src_index = src_index,
|
||||
.src_node = src_node,
|
||||
.typed_value = .{ .never_succeeded = {} },
|
||||
.analysis = .unreferenced,
|
||||
.deletion_flag = false,
|
||||
@ -3664,7 +3813,7 @@ fn allocateNewDecl(
|
||||
.elf => .{ .elf = link.File.Elf.TextBlock.empty },
|
||||
.macho => .{ .macho = link.File.MachO.TextBlock.empty },
|
||||
.c => .{ .c = link.File.C.DeclBlock.empty },
|
||||
.wasm => .{ .wasm = {} },
|
||||
.wasm => .{ .wasm = link.File.Wasm.DeclBlock.empty },
|
||||
.spirv => .{ .spirv = {} },
|
||||
},
|
||||
.fn_link = switch (mod.comp.bin_file.tag) {
|
||||
@ -3672,7 +3821,7 @@ fn allocateNewDecl(
|
||||
.elf => .{ .elf = link.File.Elf.SrcFn.empty },
|
||||
.macho => .{ .macho = link.File.MachO.SrcFn.empty },
|
||||
.c => .{ .c = link.File.C.FnBlock.empty },
|
||||
.wasm => .{ .wasm = null },
|
||||
.wasm => .{ .wasm = link.File.Wasm.FnData.empty },
|
||||
.spirv => .{ .spirv = .{} },
|
||||
},
|
||||
.generation = 0,
|
||||
@ -3685,12 +3834,12 @@ fn createNewDecl(
|
||||
mod: *Module,
|
||||
scope: *Scope,
|
||||
decl_name: []const u8,
|
||||
src_index: usize,
|
||||
src_node: ast.Node.Index,
|
||||
name_hash: Scope.NameHash,
|
||||
contents_hash: std.zig.SrcHash,
|
||||
) !*Decl {
|
||||
try mod.decl_table.ensureCapacity(mod.gpa, mod.decl_table.items().len + 1);
|
||||
const new_decl = try mod.allocateNewDecl(scope, src_index, contents_hash);
|
||||
const new_decl = try mod.allocateNewDecl(scope, src_node, contents_hash);
|
||||
errdefer mod.gpa.destroy(new_decl);
|
||||
new_decl.name = try mem.dupeZ(mod.gpa, u8, decl_name);
|
||||
mod.decl_table.putAssumeCapacityNoClobber(name_hash, new_decl);
|
||||
@ -3903,7 +4052,7 @@ pub fn createAnonymousDecl(
|
||||
defer mod.gpa.free(name);
|
||||
const name_hash = scope.namespace().fullyQualifiedNameHash(name);
|
||||
const src_hash: std.zig.SrcHash = undefined;
|
||||
const new_decl = try mod.createNewDecl(scope, name, scope_decl.src_index, name_hash, src_hash);
|
||||
const new_decl = try mod.createNewDecl(scope, name, scope_decl.src_node, name_hash, src_hash);
|
||||
const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
|
||||
|
||||
decl_arena_state.* = decl_arena.state;
|
||||
@ -3939,7 +4088,7 @@ pub fn createContainerDecl(
|
||||
defer mod.gpa.free(name);
|
||||
const name_hash = scope.namespace().fullyQualifiedNameHash(name);
|
||||
const src_hash: std.zig.SrcHash = undefined;
|
||||
const new_decl = try mod.createNewDecl(scope, name, scope_decl.src_index, name_hash, src_hash);
|
||||
const new_decl = try mod.createNewDecl(scope, name, scope_decl.src_node, name_hash, src_hash);
|
||||
const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
|
||||
|
||||
decl_arena_state.* = decl_arena.state;
|
||||
@ -4003,13 +4152,23 @@ pub fn errNote(
|
||||
parent: *ErrorMsg,
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
return mod.errNoteNonLazy(src.toSrcLoc(scope), parent, format, args);
|
||||
}
|
||||
|
||||
pub fn errNoteNonLazy(
|
||||
mod: *Module,
|
||||
src_loc: SrcLoc,
|
||||
parent: *ErrorMsg,
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) error{OutOfMemory}!void {
|
||||
const msg = try std.fmt.allocPrint(mod.gpa, format, args);
|
||||
errdefer mod.gpa.free(msg);
|
||||
|
||||
parent.notes = try mod.gpa.realloc(parent.notes, parent.notes.len + 1);
|
||||
parent.notes[parent.notes.len - 1] = .{
|
||||
.src_loc = src.toSrcLoc(scope),
|
||||
.src_loc = src_loc,
|
||||
.msg = msg,
|
||||
};
|
||||
}
|
||||
@ -4412,7 +4571,29 @@ pub fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex)
|
||||
var buf: ArrayListUnmanaged(u8) = .{};
|
||||
defer buf.deinit(mod.gpa);
|
||||
try parseStrLit(mod, scope, token, &buf, ident_name, 1);
|
||||
return buf.toOwnedSlice(mod.gpa);
|
||||
const duped = try scope.arena().dupe(u8, buf.items);
|
||||
return duped;
|
||||
}
|
||||
|
||||
/// `scope` is only used for error reporting.
|
||||
/// The string is stored in `arena` regardless of whether it uses @"" syntax.
|
||||
pub fn identifierTokenStringTreeArena(
|
||||
mod: *Module,
|
||||
scope: *Scope,
|
||||
token: ast.TokenIndex,
|
||||
tree: *const ast.Tree,
|
||||
arena: *Allocator,
|
||||
) InnerError![]u8 {
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
assert(token_tags[token] == .identifier);
|
||||
const ident_name = tree.tokenSlice(token);
|
||||
if (!mem.startsWith(u8, ident_name, "@")) {
|
||||
return arena.dupe(u8, ident_name);
|
||||
}
|
||||
var buf: ArrayListUnmanaged(u8) = .{};
|
||||
defer buf.deinit(mod.gpa);
|
||||
try parseStrLit(mod, scope, token, &buf, ident_name, 1);
|
||||
return arena.dupe(u8, buf.items);
|
||||
}
|
||||
|
||||
/// Given an identifier token, obtain the string for it (possibly parsing as a string
|
||||
@ -4502,3 +4683,10 @@ pub fn parseStrLit(
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unloadFile(mod: *Module, file_scope: *Scope.File) void {
|
||||
if (file_scope.status == .unloaded_parse_failure) {
|
||||
mod.failed_files.swapRemove(file_scope).?.value.destroy(mod.gpa);
|
||||
}
|
||||
file_scope.unload(mod.gpa);
|
||||
}
|
||||
|
||||
@ -4,18 +4,29 @@ const std = @import("std");
|
||||
const fs = std.fs;
|
||||
const mem = std.mem;
|
||||
const Allocator = mem.Allocator;
|
||||
const assert = std.debug.assert;
|
||||
|
||||
const Compilation = @import("Compilation.zig");
|
||||
const Module = @import("Module.zig");
|
||||
|
||||
pub const Table = std.StringHashMapUnmanaged(*Package);
|
||||
|
||||
pub const root_namespace_hash: Module.Scope.NameHash = .{
|
||||
0, 0, 6, 6, 6, 0, 0, 0,
|
||||
6, 9, 0, 0, 0, 4, 2, 0,
|
||||
};
|
||||
|
||||
root_src_directory: Compilation.Directory,
|
||||
/// Relative to `root_src_directory`. May contain path separators.
|
||||
root_src_path: []const u8,
|
||||
table: Table = .{},
|
||||
parent: ?*Package = null,
|
||||
namespace_hash: Module.Scope.NameHash,
|
||||
/// Whether to free `root_src_directory` on `destroy`.
|
||||
root_src_directory_owned: bool = false,
|
||||
|
||||
/// Allocate a Package. No references to the slices passed are kept.
|
||||
/// Don't forget to set `namespace_hash` later.
|
||||
pub fn create(
|
||||
gpa: *Allocator,
|
||||
/// Null indicates the current working directory
|
||||
@ -38,27 +49,69 @@ pub fn create(
|
||||
.handle = if (owned_dir_path) |p| try fs.cwd().openDir(p, .{}) else fs.cwd(),
|
||||
},
|
||||
.root_src_path = owned_src_path,
|
||||
.root_src_directory_owned = true,
|
||||
.namespace_hash = undefined,
|
||||
};
|
||||
|
||||
return ptr;
|
||||
}
|
||||
|
||||
/// Free all memory associated with this package and recursively call destroy
|
||||
/// on all packages in its table
|
||||
pub fn createWithDir(
|
||||
gpa: *Allocator,
|
||||
directory: Compilation.Directory,
|
||||
/// Relative to `directory`. If null, means `directory` is the root src dir
|
||||
/// and is owned externally.
|
||||
root_src_dir_path: ?[]const u8,
|
||||
/// Relative to root_src_dir_path
|
||||
root_src_path: []const u8,
|
||||
) !*Package {
|
||||
const ptr = try gpa.create(Package);
|
||||
errdefer gpa.destroy(ptr);
|
||||
|
||||
const owned_src_path = try gpa.dupe(u8, root_src_path);
|
||||
errdefer gpa.free(owned_src_path);
|
||||
|
||||
if (root_src_dir_path) |p| {
|
||||
const owned_dir_path = try directory.join(gpa, &[1][]const u8{p});
|
||||
errdefer gpa.free(owned_dir_path);
|
||||
|
||||
ptr.* = .{
|
||||
.root_src_directory = .{
|
||||
.path = owned_dir_path,
|
||||
.handle = try directory.handle.openDir(p, .{}),
|
||||
},
|
||||
.root_src_directory_owned = true,
|
||||
.root_src_path = owned_src_path,
|
||||
.namespace_hash = undefined,
|
||||
};
|
||||
} else {
|
||||
ptr.* = .{
|
||||
.root_src_directory = directory,
|
||||
.root_src_directory_owned = false,
|
||||
.root_src_path = owned_src_path,
|
||||
.namespace_hash = undefined,
|
||||
};
|
||||
}
|
||||
return ptr;
|
||||
}
|
||||
|
||||
/// Free all memory associated with this package. It does not destroy any packages
|
||||
/// inside its table; the caller is responsible for calling destroy() on them.
|
||||
pub fn destroy(pkg: *Package, gpa: *Allocator) void {
|
||||
gpa.free(pkg.root_src_path);
|
||||
|
||||
// If root_src_directory.path is null then the handle is the cwd()
|
||||
// which shouldn't be closed.
|
||||
if (pkg.root_src_directory.path) |p| {
|
||||
gpa.free(p);
|
||||
pkg.root_src_directory.handle.close();
|
||||
if (pkg.root_src_directory_owned) {
|
||||
// If root_src_directory.path is null then the handle is the cwd()
|
||||
// which shouldn't be closed.
|
||||
if (pkg.root_src_directory.path) |p| {
|
||||
gpa.free(p);
|
||||
pkg.root_src_directory.handle.close();
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
var it = pkg.table.iterator();
|
||||
while (it.next()) |kv| {
|
||||
kv.value.destroy(gpa);
|
||||
gpa.free(kv.key);
|
||||
}
|
||||
}
|
||||
@ -72,3 +125,10 @@ pub fn add(pkg: *Package, gpa: *Allocator, name: []const u8, package: *Package)
|
||||
const name_dupe = try mem.dupe(gpa, u8, name);
|
||||
pkg.table.putAssumeCapacityNoClobber(name_dupe, package);
|
||||
}
|
||||
|
||||
pub fn addAndAdopt(parent: *Package, gpa: *Allocator, name: []const u8, child: *Package) !void {
|
||||
assert(child.parent == null); // make up your mind, who is the parent??
|
||||
child.parent = parent;
|
||||
child.namespace_hash = std.zig.hashName(parent.namespace_hash, ":", name);
|
||||
return parent.add(gpa, name, child);
|
||||
}
|
||||
|
||||
867
src/Sema.zig
867
src/Sema.zig
File diff suppressed because it is too large
Load Diff
@ -300,6 +300,14 @@ pub const ConstantExpr = opaque {};
|
||||
|
||||
pub const ContinueStmt = opaque {};
|
||||
|
||||
pub const ConvertVectorExpr = opaque {
|
||||
pub const getSrcExpr = ZigClangConvertVectorExpr_getSrcExpr;
|
||||
extern fn ZigClangConvertVectorExpr_getSrcExpr(*const ConvertVectorExpr) *const Expr;
|
||||
|
||||
pub const getTypeSourceInfo_getType = ZigClangConvertVectorExpr_getTypeSourceInfo_getType;
|
||||
extern fn ZigClangConvertVectorExpr_getTypeSourceInfo_getType(*const ConvertVectorExpr) QualType;
|
||||
};
|
||||
|
||||
pub const DecayedType = opaque {
|
||||
pub const getDecayedType = ZigClangDecayedType_getDecayedType;
|
||||
extern fn ZigClangDecayedType_getDecayedType(*const DecayedType) QualType;
|
||||
@ -748,6 +756,14 @@ pub const ReturnStmt = opaque {
|
||||
extern fn ZigClangReturnStmt_getRetValue(*const ReturnStmt) ?*const Expr;
|
||||
};
|
||||
|
||||
pub const ShuffleVectorExpr = opaque {
|
||||
pub const getNumSubExprs = ZigClangShuffleVectorExpr_getNumSubExprs;
|
||||
extern fn ZigClangShuffleVectorExpr_getNumSubExprs(*const ShuffleVectorExpr) c_uint;
|
||||
|
||||
pub const getExpr = ZigClangShuffleVectorExpr_getExpr;
|
||||
extern fn ZigClangShuffleVectorExpr_getExpr(*const ShuffleVectorExpr, c_uint) *const Expr;
|
||||
};
|
||||
|
||||
pub const SourceManager = opaque {
|
||||
pub const getSpellingLoc = ZigClangSourceManager_getSpellingLoc;
|
||||
extern fn ZigClangSourceManager_getSpellingLoc(*const SourceManager, Loc: SourceLocation) SourceLocation;
|
||||
@ -837,6 +853,9 @@ pub const Type = opaque {
|
||||
pub const isRecordType = ZigClangType_isRecordType;
|
||||
extern fn ZigClangType_isRecordType(*const Type) bool;
|
||||
|
||||
pub const isVectorType = ZigClangType_isVectorType;
|
||||
extern fn ZigClangType_isVectorType(*const Type) bool;
|
||||
|
||||
pub const isIncompleteOrZeroLengthArrayType = ZigClangType_isIncompleteOrZeroLengthArrayType;
|
||||
extern fn ZigClangType_isIncompleteOrZeroLengthArrayType(*const Type, *const ASTContext) bool;
|
||||
|
||||
@ -937,6 +956,14 @@ pub const VarDecl = opaque {
|
||||
extern fn ZigClangVarDecl_getTypeSourceInfo_getType(*const VarDecl) QualType;
|
||||
};
|
||||
|
||||
pub const VectorType = opaque {
|
||||
pub const getElementType = ZigClangVectorType_getElementType;
|
||||
extern fn ZigClangVectorType_getElementType(*const VectorType) QualType;
|
||||
|
||||
pub const getNumElements = ZigClangVectorType_getNumElements;
|
||||
extern fn ZigClangVectorType_getNumElements(*const VectorType) c_uint;
|
||||
};
|
||||
|
||||
pub const WhileStmt = opaque {
|
||||
pub const getCond = ZigClangWhileStmt_getCond;
|
||||
extern fn ZigClangWhileStmt_getCond(*const WhileStmt) *const Expr;
|
||||
|
||||
@ -417,7 +417,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
|
||||
const fn_decl = tree.rootDecls()[module_fn.owner_decl.src_index];
|
||||
const fn_decl = module_fn.owner_decl.src_node;
|
||||
assert(node_tags[fn_decl] == .fn_decl);
|
||||
const block = node_datas[fn_decl].rhs;
|
||||
const lbrace_src = token_starts[tree.firstToken(block)];
|
||||
@ -855,6 +855,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
.not => return self.genNot(inst.castTag(.not).?),
|
||||
.mul => return self.genMul(inst.castTag(.mul).?),
|
||||
.mulwrap => return self.genMulWrap(inst.castTag(.mulwrap).?),
|
||||
.div => return self.genDiv(inst.castTag(.div).?),
|
||||
.ptrtoint => return self.genPtrToInt(inst.castTag(.ptrtoint).?),
|
||||
.ref => return self.genRef(inst.castTag(.ref).?),
|
||||
.ret => return self.genRet(inst.castTag(.ret).?),
|
||||
@ -1092,6 +1093,15 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
}
|
||||
}
|
||||
|
||||
fn genDiv(self: *Self, inst: *ir.Inst.BinOp) !MCValue {
|
||||
// No side effects, so if it's unreferenced, do nothing.
|
||||
if (inst.base.isUnused())
|
||||
return MCValue.dead;
|
||||
switch (arch) {
|
||||
else => return self.fail(inst.base.src, "TODO implement div for {}", .{self.target.cpu.arch}),
|
||||
}
|
||||
}
|
||||
|
||||
fn genBitAnd(self: *Self, inst: *ir.Inst.BinOp) !MCValue {
|
||||
// No side effects, so if it's unreferenced, do nothing.
|
||||
if (inst.base.isUnused())
|
||||
@ -1735,7 +1745,8 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
|
||||
switch (result) {
|
||||
.register => |reg| {
|
||||
try self.register_manager.getRegAssumeFree(toCanonicalReg(reg), &inst.base);
|
||||
try self.register_manager.registers.ensureCapacity(self.gpa, self.register_manager.registers.count() + 1);
|
||||
self.register_manager.getRegAssumeFree(toCanonicalReg(reg), &inst.base);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
@ -1783,8 +1794,8 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
switch (mc_arg) {
|
||||
.none => continue,
|
||||
.register => |reg| {
|
||||
try self.register_manager.getRegWithoutTracking(reg);
|
||||
try self.genSetReg(arg.src, arg.ty, reg, arg_mcv);
|
||||
// TODO interact with the register allocator to mark the instruction as moved.
|
||||
},
|
||||
.stack_offset => {
|
||||
// Here we need to emit instructions like this:
|
||||
@ -1925,8 +1936,8 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
.compare_flags_signed => unreachable,
|
||||
.compare_flags_unsigned => unreachable,
|
||||
.register => |reg| {
|
||||
try self.register_manager.getRegWithoutTracking(reg);
|
||||
try self.genSetReg(arg.src, arg.ty, reg, arg_mcv);
|
||||
// TODO interact with the register allocator to mark the instruction as moved.
|
||||
},
|
||||
.stack_offset => {
|
||||
return self.fail(inst.base.src, "TODO implement calling with parameters in memory", .{});
|
||||
@ -1988,8 +1999,8 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
.compare_flags_signed => unreachable,
|
||||
.compare_flags_unsigned => unreachable,
|
||||
.register => |reg| {
|
||||
try self.register_manager.getRegWithoutTracking(reg);
|
||||
try self.genSetReg(arg.src, arg.ty, reg, arg_mcv);
|
||||
// TODO interact with the register allocator to mark the instruction as moved.
|
||||
},
|
||||
.stack_offset => {
|
||||
return self.fail(inst.base.src, "TODO implement calling with parameters in memory", .{});
|
||||
@ -2039,8 +2050,8 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
switch (mc_arg) {
|
||||
.none => continue,
|
||||
.register => |reg| {
|
||||
try self.register_manager.getRegWithoutTracking(reg);
|
||||
try self.genSetReg(arg.src, arg.ty, reg, arg_mcv);
|
||||
// TODO interact with the register allocator to mark the instruction as moved.
|
||||
},
|
||||
.stack_offset => {
|
||||
// Here we need to emit instructions like this:
|
||||
@ -2704,8 +2715,11 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
const reg_name = input[1 .. input.len - 1];
|
||||
const reg = parseRegName(reg_name) orelse
|
||||
return self.fail(inst.base.src, "unrecognized register: '{s}'", .{reg_name});
|
||||
const arg = try self.resolveInst(inst.args[i]);
|
||||
try self.genSetReg(inst.base.src, inst.args[i].ty, reg, arg);
|
||||
|
||||
const arg = inst.args[i];
|
||||
const arg_mcv = try self.resolveInst(arg);
|
||||
try self.register_manager.getRegWithoutTracking(reg);
|
||||
try self.genSetReg(inst.base.src, arg.ty, reg, arg_mcv);
|
||||
}
|
||||
|
||||
if (mem.eql(u8, inst.asm_source, "svc #0")) {
|
||||
@ -2734,8 +2748,11 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
const reg_name = input[1 .. input.len - 1];
|
||||
const reg = parseRegName(reg_name) orelse
|
||||
return self.fail(inst.base.src, "unrecognized register: '{s}'", .{reg_name});
|
||||
const arg = try self.resolveInst(inst.args[i]);
|
||||
try self.genSetReg(inst.base.src, inst.args[i].ty, reg, arg);
|
||||
|
||||
const arg = inst.args[i];
|
||||
const arg_mcv = try self.resolveInst(arg);
|
||||
try self.register_manager.getRegWithoutTracking(reg);
|
||||
try self.genSetReg(inst.base.src, arg.ty, reg, arg_mcv);
|
||||
}
|
||||
|
||||
if (mem.eql(u8, inst.asm_source, "svc #0")) {
|
||||
@ -2766,8 +2783,11 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
const reg_name = input[1 .. input.len - 1];
|
||||
const reg = parseRegName(reg_name) orelse
|
||||
return self.fail(inst.base.src, "unrecognized register: '{s}'", .{reg_name});
|
||||
const arg = try self.resolveInst(inst.args[i]);
|
||||
try self.genSetReg(inst.base.src, inst.args[i].ty, reg, arg);
|
||||
|
||||
const arg = inst.args[i];
|
||||
const arg_mcv = try self.resolveInst(arg);
|
||||
try self.register_manager.getRegWithoutTracking(reg);
|
||||
try self.genSetReg(inst.base.src, arg.ty, reg, arg_mcv);
|
||||
}
|
||||
|
||||
if (mem.eql(u8, inst.asm_source, "ecall")) {
|
||||
@ -2796,8 +2816,11 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
const reg_name = input[1 .. input.len - 1];
|
||||
const reg = parseRegName(reg_name) orelse
|
||||
return self.fail(inst.base.src, "unrecognized register: '{s}'", .{reg_name});
|
||||
const arg = try self.resolveInst(inst.args[i]);
|
||||
try self.genSetReg(inst.base.src, inst.args[i].ty, reg, arg);
|
||||
|
||||
const arg = inst.args[i];
|
||||
const arg_mcv = try self.resolveInst(arg);
|
||||
try self.register_manager.getRegWithoutTracking(reg);
|
||||
try self.genSetReg(inst.base.src, arg.ty, reg, arg_mcv);
|
||||
}
|
||||
|
||||
if (mem.eql(u8, inst.asm_source, "syscall")) {
|
||||
@ -3302,6 +3325,43 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
|
||||
mem.writeIntLittle(u32, try self.code.addManyAsArray(4), Instruction.ldr(reg, .{ .register = .{ .rn = reg } }).toU32());
|
||||
}
|
||||
},
|
||||
.stack_offset => |unadjusted_off| {
|
||||
// TODO: maybe addressing from sp instead of fp
|
||||
const abi_size = ty.abiSize(self.target.*);
|
||||
const adj_off = unadjusted_off + abi_size;
|
||||
|
||||
const rn: Register = switch (arch) {
|
||||
.aarch64, .aarch64_be => .x29,
|
||||
.aarch64_32 => .w29,
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
const offset = if (math.cast(i9, adj_off)) |imm|
|
||||
Instruction.LoadStoreOffset.imm_post_index(-imm)
|
||||
else |_|
|
||||
Instruction.LoadStoreOffset.reg(try self.copyToTmpRegister(src, Type.initTag(.u64), MCValue{ .immediate = adj_off }));
|
||||
|
||||
switch (abi_size) {
|
||||
1, 2 => {
|
||||
const ldr = switch (abi_size) {
|
||||
1 => Instruction.ldrb,
|
||||
2 => Instruction.ldrh,
|
||||
else => unreachable, // unexpected abi size
|
||||
};
|
||||
|
||||
writeInt(u32, try self.code.addManyAsArray(4), ldr(reg, rn, .{
|
||||
.offset = offset,
|
||||
}).toU32());
|
||||
},
|
||||
4, 8 => {
|
||||
writeInt(u32, try self.code.addManyAsArray(4), Instruction.ldr(reg, .{ .register = .{
|
||||
.rn = rn,
|
||||
.offset = offset,
|
||||
} }).toU32());
|
||||
},
|
||||
else => return self.fail(src, "TODO implement genSetReg other types abi_size={}", .{abi_size}),
|
||||
}
|
||||
},
|
||||
else => return self.fail(src, "TODO implement genSetReg for aarch64 {}", .{mcv}),
|
||||
},
|
||||
.riscv64 => switch (mcv) {
|
||||
|
||||
@ -200,7 +200,7 @@ test "FloatingPointRegister.toX" {
|
||||
|
||||
/// Represents an instruction in the AArch64 instruction set
|
||||
pub const Instruction = union(enum) {
|
||||
MoveWideImmediate: packed struct {
|
||||
move_wide_immediate: packed struct {
|
||||
rd: u5,
|
||||
imm16: u16,
|
||||
hw: u2,
|
||||
@ -208,14 +208,14 @@ pub const Instruction = union(enum) {
|
||||
opc: u2,
|
||||
sf: u1,
|
||||
},
|
||||
PCRelativeAddress: packed struct {
|
||||
pc_relative_address: packed struct {
|
||||
rd: u5,
|
||||
immhi: u19,
|
||||
fixed: u5 = 0b10000,
|
||||
immlo: u2,
|
||||
op: u1,
|
||||
},
|
||||
LoadStoreRegister: packed struct {
|
||||
load_store_register: packed struct {
|
||||
rt: u5,
|
||||
rn: u5,
|
||||
offset: u12,
|
||||
@ -225,7 +225,7 @@ pub const Instruction = union(enum) {
|
||||
fixed: u3 = 0b111,
|
||||
size: u2,
|
||||
},
|
||||
LoadStorePairOfRegisters: packed struct {
|
||||
load_store_register_pair: packed struct {
|
||||
rt1: u5,
|
||||
rn: u5,
|
||||
rt2: u5,
|
||||
@ -235,20 +235,20 @@ pub const Instruction = union(enum) {
|
||||
fixed: u5 = 0b101_0_0,
|
||||
opc: u2,
|
||||
},
|
||||
LoadLiteral: packed struct {
|
||||
load_literal: packed struct {
|
||||
rt: u5,
|
||||
imm19: u19,
|
||||
fixed: u6 = 0b011_0_00,
|
||||
opc: u2,
|
||||
},
|
||||
ExceptionGeneration: packed struct {
|
||||
exception_generation: packed struct {
|
||||
ll: u2,
|
||||
op2: u3,
|
||||
imm16: u16,
|
||||
opc: u3,
|
||||
fixed: u8 = 0b1101_0100,
|
||||
},
|
||||
UnconditionalBranchRegister: packed struct {
|
||||
unconditional_branch_register: packed struct {
|
||||
op4: u5,
|
||||
rn: u5,
|
||||
op3: u6,
|
||||
@ -256,15 +256,15 @@ pub const Instruction = union(enum) {
|
||||
opc: u4,
|
||||
fixed: u7 = 0b1101_011,
|
||||
},
|
||||
UnconditionalBranchImmediate: packed struct {
|
||||
unconditional_branch_immediate: packed struct {
|
||||
imm26: u26,
|
||||
fixed: u5 = 0b00101,
|
||||
op: u1,
|
||||
},
|
||||
NoOperation: packed struct {
|
||||
no_operation: packed struct {
|
||||
fixed: u32 = 0b1101010100_0_00_011_0010_0000_000_11111,
|
||||
},
|
||||
LogicalShiftedRegister: packed struct {
|
||||
logical_shifted_register: packed struct {
|
||||
rd: u5,
|
||||
rn: u5,
|
||||
imm6: u6,
|
||||
@ -275,7 +275,7 @@ pub const Instruction = union(enum) {
|
||||
opc: u2,
|
||||
sf: u1,
|
||||
},
|
||||
AddSubtractImmediate: packed struct {
|
||||
add_subtract_immediate: packed struct {
|
||||
rd: u5,
|
||||
rn: u5,
|
||||
imm12: u12,
|
||||
@ -285,6 +285,20 @@ pub const Instruction = union(enum) {
|
||||
op: u1,
|
||||
sf: u1,
|
||||
},
|
||||
conditional_branch: struct {
|
||||
cond: u4,
|
||||
o0: u1,
|
||||
imm19: u19,
|
||||
o1: u1,
|
||||
fixed: u7 = 0b0101010,
|
||||
},
|
||||
compare_and_branch: struct {
|
||||
rt: u5,
|
||||
imm19: u19,
|
||||
op: u1,
|
||||
fixed: u6 = 0b011010,
|
||||
sf: u1,
|
||||
},
|
||||
|
||||
pub const Shift = struct {
|
||||
shift: Type = .lsl,
|
||||
@ -303,19 +317,73 @@ pub const Instruction = union(enum) {
|
||||
};
|
||||
};
|
||||
|
||||
pub const Condition = enum(u4) {
|
||||
/// Integer: Equal
|
||||
/// Floating point: Equal
|
||||
eq,
|
||||
/// Integer: Not equal
|
||||
/// Floating point: Not equal or unordered
|
||||
ne,
|
||||
/// Integer: Carry set
|
||||
/// Floating point: Greater than, equal, or unordered
|
||||
cs,
|
||||
/// Integer: Carry clear
|
||||
/// Floating point: Less than
|
||||
cc,
|
||||
/// Integer: Minus, negative
|
||||
/// Floating point: Less than
|
||||
mi,
|
||||
/// Integer: Plus, positive or zero
|
||||
/// Floating point: Greater than, equal, or unordered
|
||||
pl,
|
||||
/// Integer: Overflow
|
||||
/// Floating point: Unordered
|
||||
vs,
|
||||
/// Integer: No overflow
|
||||
/// Floating point: Ordered
|
||||
vc,
|
||||
/// Integer: Unsigned higher
|
||||
/// Floating point: Greater than, or unordered
|
||||
hi,
|
||||
/// Integer: Unsigned lower or same
|
||||
/// Floating point: Less than or equal
|
||||
ls,
|
||||
/// Integer: Signed greater than or equal
|
||||
/// Floating point: Greater than or equal
|
||||
ge,
|
||||
/// Integer: Signed less than
|
||||
/// Floating point: Less than, or unordered
|
||||
lt,
|
||||
/// Integer: Signed greater than
|
||||
/// Floating point: Greater than
|
||||
gt,
|
||||
/// Integer: Signed less than or equal
|
||||
/// Floating point: Less than, equal, or unordered
|
||||
le,
|
||||
/// Integer: Always
|
||||
/// Floating point: Always
|
||||
al,
|
||||
/// Integer: Always
|
||||
/// Floating point: Always
|
||||
nv,
|
||||
};
|
||||
|
||||
pub fn toU32(self: Instruction) u32 {
|
||||
return switch (self) {
|
||||
.MoveWideImmediate => |v| @bitCast(u32, v),
|
||||
.PCRelativeAddress => |v| @bitCast(u32, v),
|
||||
.LoadStoreRegister => |v| @bitCast(u32, v),
|
||||
.LoadStorePairOfRegisters => |v| @bitCast(u32, v),
|
||||
.LoadLiteral => |v| @bitCast(u32, v),
|
||||
.ExceptionGeneration => |v| @bitCast(u32, v),
|
||||
.UnconditionalBranchRegister => |v| @bitCast(u32, v),
|
||||
.UnconditionalBranchImmediate => |v| @bitCast(u32, v),
|
||||
.NoOperation => |v| @bitCast(u32, v),
|
||||
.LogicalShiftedRegister => |v| @bitCast(u32, v),
|
||||
.AddSubtractImmediate => |v| @bitCast(u32, v),
|
||||
.move_wide_immediate => |v| @bitCast(u32, v),
|
||||
.pc_relative_address => |v| @bitCast(u32, v),
|
||||
.load_store_register => |v| @bitCast(u32, v),
|
||||
.load_store_register_pair => |v| @bitCast(u32, v),
|
||||
.load_literal => |v| @bitCast(u32, v),
|
||||
.exception_generation => |v| @bitCast(u32, v),
|
||||
.unconditional_branch_register => |v| @bitCast(u32, v),
|
||||
.unconditional_branch_immediate => |v| @bitCast(u32, v),
|
||||
.no_operation => |v| @bitCast(u32, v),
|
||||
.logical_shifted_register => |v| @bitCast(u32, v),
|
||||
.add_subtract_immediate => |v| @bitCast(u32, v),
|
||||
// TODO once packed structs work, this can be refactored
|
||||
.conditional_branch => |v| @as(u32, v.cond) | (@as(u32, v.o0) << 4) | (@as(u32, v.imm19) << 5) | (@as(u32, v.o1) << 24) | (@as(u32, v.fixed) << 25),
|
||||
.compare_and_branch => |v| @as(u32, v.rt) | (@as(u32, v.imm19) << 5) | (@as(u32, v.op) << 24) | (@as(u32, v.fixed) << 25) | (@as(u32, v.sf) << 31),
|
||||
};
|
||||
}
|
||||
|
||||
@ -329,7 +397,7 @@ pub const Instruction = union(enum) {
|
||||
32 => {
|
||||
assert(shift % 16 == 0 and shift <= 16);
|
||||
return Instruction{
|
||||
.MoveWideImmediate = .{
|
||||
.move_wide_immediate = .{
|
||||
.rd = rd.id(),
|
||||
.imm16 = imm16,
|
||||
.hw = @intCast(u2, shift / 16),
|
||||
@ -341,7 +409,7 @@ pub const Instruction = union(enum) {
|
||||
64 => {
|
||||
assert(shift % 16 == 0 and shift <= 48);
|
||||
return Instruction{
|
||||
.MoveWideImmediate = .{
|
||||
.move_wide_immediate = .{
|
||||
.rd = rd.id(),
|
||||
.imm16 = imm16,
|
||||
.hw = @intCast(u2, shift / 16),
|
||||
@ -358,7 +426,7 @@ pub const Instruction = union(enum) {
|
||||
assert(rd.size() == 64);
|
||||
const imm21_u = @bitCast(u21, imm21);
|
||||
return Instruction{
|
||||
.PCRelativeAddress = .{
|
||||
.pc_relative_address = .{
|
||||
.rd = rd.id(),
|
||||
.immlo = @truncate(u2, imm21_u),
|
||||
.immhi = @truncate(u19, imm21_u >> 2),
|
||||
@ -487,11 +555,17 @@ pub const Instruction = union(enum) {
|
||||
/// Which kind of load/store to perform
|
||||
const LoadStoreVariant = enum {
|
||||
/// 32-bit or 64-bit
|
||||
normal,
|
||||
/// 16-bit
|
||||
half,
|
||||
/// 8-bit
|
||||
byte,
|
||||
str,
|
||||
/// 16-bit, zero-extended
|
||||
strh,
|
||||
/// 8-bit, zero-extended
|
||||
strb,
|
||||
/// 32-bit or 64-bit
|
||||
ldr,
|
||||
/// 16-bit, zero-extended
|
||||
ldrh,
|
||||
/// 8-bit, zero-extended
|
||||
ldrb,
|
||||
};
|
||||
|
||||
fn loadStoreRegister(
|
||||
@ -499,7 +573,6 @@ pub const Instruction = union(enum) {
|
||||
rn: Register,
|
||||
offset: LoadStoreOffset,
|
||||
variant: LoadStoreVariant,
|
||||
load: bool,
|
||||
) Instruction {
|
||||
const off = offset.toU12();
|
||||
const op1: u2 = blk: {
|
||||
@ -512,9 +585,12 @@ pub const Instruction = union(enum) {
|
||||
}
|
||||
break :blk 0b00;
|
||||
};
|
||||
const opc: u2 = if (load) 0b01 else 0b00;
|
||||
const opc: u2 = switch (variant) {
|
||||
.ldr, .ldrh, .ldrb => 0b01,
|
||||
.str, .strh, .strb => 0b00,
|
||||
};
|
||||
return Instruction{
|
||||
.LoadStoreRegister = .{
|
||||
.load_store_register = .{
|
||||
.rt = rt.id(),
|
||||
.rn = rn.id(),
|
||||
.offset = off,
|
||||
@ -523,20 +599,20 @@ pub const Instruction = union(enum) {
|
||||
.v = 0,
|
||||
.size = blk: {
|
||||
switch (variant) {
|
||||
.normal => switch (rt.size()) {
|
||||
.ldr, .str => switch (rt.size()) {
|
||||
32 => break :blk 0b10,
|
||||
64 => break :blk 0b11,
|
||||
else => unreachable, // unexpected register size
|
||||
},
|
||||
.half => break :blk 0b01,
|
||||
.byte => break :blk 0b00,
|
||||
.ldrh, .strh => break :blk 0b01,
|
||||
.ldrb, .strb => break :blk 0b00,
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
fn loadStorePairOfRegisters(
|
||||
fn loadStoreRegisterPair(
|
||||
rt1: Register,
|
||||
rt2: Register,
|
||||
rn: Register,
|
||||
@ -549,7 +625,7 @@ pub const Instruction = union(enum) {
|
||||
assert(-256 <= offset and offset <= 252);
|
||||
const imm7 = @truncate(u7, @bitCast(u9, offset >> 2));
|
||||
return Instruction{
|
||||
.LoadStorePairOfRegisters = .{
|
||||
.load_store_register_pair = .{
|
||||
.rt1 = rt1.id(),
|
||||
.rn = rn.id(),
|
||||
.rt2 = rt2.id(),
|
||||
@ -564,7 +640,7 @@ pub const Instruction = union(enum) {
|
||||
assert(-512 <= offset and offset <= 504);
|
||||
const imm7 = @truncate(u7, @bitCast(u9, offset >> 3));
|
||||
return Instruction{
|
||||
.LoadStorePairOfRegisters = .{
|
||||
.load_store_register_pair = .{
|
||||
.rt1 = rt1.id(),
|
||||
.rn = rn.id(),
|
||||
.rt2 = rt2.id(),
|
||||
@ -583,7 +659,7 @@ pub const Instruction = union(enum) {
|
||||
switch (rt.size()) {
|
||||
32 => {
|
||||
return Instruction{
|
||||
.LoadLiteral = .{
|
||||
.load_literal = .{
|
||||
.rt = rt.id(),
|
||||
.imm19 = imm19,
|
||||
.opc = 0b00,
|
||||
@ -592,7 +668,7 @@ pub const Instruction = union(enum) {
|
||||
},
|
||||
64 => {
|
||||
return Instruction{
|
||||
.LoadLiteral = .{
|
||||
.load_literal = .{
|
||||
.rt = rt.id(),
|
||||
.imm19 = imm19,
|
||||
.opc = 0b01,
|
||||
@ -610,7 +686,7 @@ pub const Instruction = union(enum) {
|
||||
imm16: u16,
|
||||
) Instruction {
|
||||
return Instruction{
|
||||
.ExceptionGeneration = .{
|
||||
.exception_generation = .{
|
||||
.ll = ll,
|
||||
.op2 = op2,
|
||||
.imm16 = imm16,
|
||||
@ -629,7 +705,7 @@ pub const Instruction = union(enum) {
|
||||
assert(rn.size() == 64);
|
||||
|
||||
return Instruction{
|
||||
.UnconditionalBranchRegister = .{
|
||||
.unconditional_branch_register = .{
|
||||
.op4 = op4,
|
||||
.rn = rn.id(),
|
||||
.op3 = op3,
|
||||
@ -644,7 +720,7 @@ pub const Instruction = union(enum) {
|
||||
offset: i28,
|
||||
) Instruction {
|
||||
return Instruction{
|
||||
.UnconditionalBranchImmediate = .{
|
||||
.unconditional_branch_immediate = .{
|
||||
.imm26 = @bitCast(u26, @intCast(i26, offset >> 2)),
|
||||
.op = op,
|
||||
},
|
||||
@ -663,7 +739,7 @@ pub const Instruction = union(enum) {
|
||||
32 => {
|
||||
assert(shift.amount < 32);
|
||||
return Instruction{
|
||||
.LogicalShiftedRegister = .{
|
||||
.logical_shifted_register = .{
|
||||
.rd = rd.id(),
|
||||
.rn = rn.id(),
|
||||
.imm6 = shift.amount,
|
||||
@ -677,7 +753,7 @@ pub const Instruction = union(enum) {
|
||||
},
|
||||
64 => {
|
||||
return Instruction{
|
||||
.LogicalShiftedRegister = .{
|
||||
.logical_shifted_register = .{
|
||||
.rd = rd.id(),
|
||||
.rn = rn.id(),
|
||||
.imm6 = shift.amount,
|
||||
@ -702,7 +778,7 @@ pub const Instruction = union(enum) {
|
||||
shift: bool,
|
||||
) Instruction {
|
||||
return Instruction{
|
||||
.AddSubtractImmediate = .{
|
||||
.add_subtract_immediate = .{
|
||||
.rd = rd.id(),
|
||||
.rn = rn.id(),
|
||||
.imm12 = imm12,
|
||||
@ -718,6 +794,43 @@ pub const Instruction = union(enum) {
|
||||
};
|
||||
}
|
||||
|
||||
fn conditionalBranch(
|
||||
o0: u1,
|
||||
o1: u1,
|
||||
cond: Condition,
|
||||
offset: i21,
|
||||
) Instruction {
|
||||
assert(offset & 0b11 == 0b00);
|
||||
return Instruction{
|
||||
.conditional_branch = .{
|
||||
.cond = @enumToInt(cond),
|
||||
.o0 = o0,
|
||||
.imm19 = @bitCast(u19, @intCast(i19, offset >> 2)),
|
||||
.o1 = o1,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
fn compareAndBranch(
|
||||
op: u1,
|
||||
rt: Register,
|
||||
offset: i21,
|
||||
) Instruction {
|
||||
assert(offset & 0b11 == 0b00);
|
||||
return Instruction{
|
||||
.compare_and_branch = .{
|
||||
.rt = rt.id(),
|
||||
.imm19 = @bitCast(u19, @intCast(i19, offset >> 2)),
|
||||
.op = op,
|
||||
.sf = switch (rt.size()) {
|
||||
32 => 0b0,
|
||||
64 => 0b1,
|
||||
else => unreachable, // unexpected register size
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Helper functions for assembly syntax functions
|
||||
|
||||
// Move wide (immediate)
|
||||
@ -756,25 +869,33 @@ pub const Instruction = union(enum) {
|
||||
|
||||
pub fn ldr(rt: Register, args: LdrArgs) Instruction {
|
||||
switch (args) {
|
||||
.register => |info| return loadStoreRegister(rt, info.rn, info.offset, .normal, true),
|
||||
.register => |info| return loadStoreRegister(rt, info.rn, info.offset, .ldr),
|
||||
.literal => |literal| return loadLiteral(rt, literal),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ldrh(rt: Register, rn: Register, args: StrArgs) Instruction {
|
||||
return loadStoreRegister(rt, rn, args.offset, .ldrh);
|
||||
}
|
||||
|
||||
pub fn ldrb(rt: Register, rn: Register, args: StrArgs) Instruction {
|
||||
return loadStoreRegister(rt, rn, args.offset, .ldrb);
|
||||
}
|
||||
|
||||
pub const StrArgs = struct {
|
||||
offset: LoadStoreOffset = LoadStoreOffset.none,
|
||||
};
|
||||
|
||||
pub fn str(rt: Register, rn: Register, args: StrArgs) Instruction {
|
||||
return loadStoreRegister(rt, rn, args.offset, .normal, false);
|
||||
return loadStoreRegister(rt, rn, args.offset, .str);
|
||||
}
|
||||
|
||||
pub fn strh(rt: Register, rn: Register, args: StrArgs) Instruction {
|
||||
return loadStoreRegister(rt, rn, args.offset, .half, false);
|
||||
return loadStoreRegister(rt, rn, args.offset, .strh);
|
||||
}
|
||||
|
||||
pub fn strb(rt: Register, rn: Register, args: StrArgs) Instruction {
|
||||
return loadStoreRegister(rt, rn, args.offset, .byte, false);
|
||||
return loadStoreRegister(rt, rn, args.offset, .strb);
|
||||
}
|
||||
|
||||
// Load or store pair of registers
|
||||
@ -805,19 +926,19 @@ pub const Instruction = union(enum) {
|
||||
};
|
||||
|
||||
pub fn ldp(rt1: Register, rt2: Register, rn: Register, offset: LoadStorePairOffset) Instruction {
|
||||
return loadStorePairOfRegisters(rt1, rt2, rn, offset.offset, @enumToInt(offset.encoding), true);
|
||||
return loadStoreRegisterPair(rt1, rt2, rn, offset.offset, @enumToInt(offset.encoding), true);
|
||||
}
|
||||
|
||||
pub fn ldnp(rt1: Register, rt2: Register, rn: Register, offset: i9) Instruction {
|
||||
return loadStorePairOfRegisters(rt1, rt2, rn, offset, 0, true);
|
||||
return loadStoreRegisterPair(rt1, rt2, rn, offset, 0, true);
|
||||
}
|
||||
|
||||
pub fn stp(rt1: Register, rt2: Register, rn: Register, offset: LoadStorePairOffset) Instruction {
|
||||
return loadStorePairOfRegisters(rt1, rt2, rn, offset.offset, @enumToInt(offset.encoding), false);
|
||||
return loadStoreRegisterPair(rt1, rt2, rn, offset.offset, @enumToInt(offset.encoding), false);
|
||||
}
|
||||
|
||||
pub fn stnp(rt1: Register, rt2: Register, rn: Register, offset: i9) Instruction {
|
||||
return loadStorePairOfRegisters(rt1, rt2, rn, offset, 0, false);
|
||||
return loadStoreRegisterPair(rt1, rt2, rn, offset, 0, false);
|
||||
}
|
||||
|
||||
// Exception generation
|
||||
@ -869,7 +990,7 @@ pub const Instruction = union(enum) {
|
||||
// Nop
|
||||
|
||||
pub fn nop() Instruction {
|
||||
return Instruction{ .NoOperation = .{} };
|
||||
return Instruction{ .no_operation = .{} };
|
||||
}
|
||||
|
||||
// Logical (shifted register)
|
||||
@ -923,6 +1044,22 @@ pub const Instruction = union(enum) {
|
||||
pub fn subs(rd: Register, rn: Register, imm: u12, shift: bool) Instruction {
|
||||
return addSubtractImmediate(0b1, 0b1, rd, rn, imm, shift);
|
||||
}
|
||||
|
||||
// Conditional branch
|
||||
|
||||
pub fn bCond(cond: Condition, offset: i21) Instruction {
|
||||
return conditionalBranch(0b0, 0b0, cond, offset);
|
||||
}
|
||||
|
||||
// Compare and branch
|
||||
|
||||
pub fn cbz(rt: Register, offset: i21) Instruction {
|
||||
return compareAndBranch(0b0, rt, offset);
|
||||
}
|
||||
|
||||
pub fn cbnz(rt: Register, offset: i21) Instruction {
|
||||
return compareAndBranch(0b1, rt, offset);
|
||||
}
|
||||
};
|
||||
|
||||
test {
|
||||
@ -1004,6 +1141,14 @@ test "serialize instructions" {
|
||||
.inst = Instruction.ldr(.x2, .{ .literal = 0x1 }),
|
||||
.expected = 0b01_011_0_00_0000000000000000001_00010,
|
||||
},
|
||||
.{ // ldrh x7, [x4], #0xaa
|
||||
.inst = Instruction.ldrh(.x7, .x4, .{ .offset = Instruction.LoadStoreOffset.imm_post_index(0xaa) }),
|
||||
.expected = 0b01_111_0_00_01_0_010101010_01_00100_00111,
|
||||
},
|
||||
.{ // ldrb x9, [x15, #0xff]!
|
||||
.inst = Instruction.ldrb(.x9, .x15, .{ .offset = Instruction.LoadStoreOffset.imm_pre_index(0xff) }),
|
||||
.expected = 0b00_111_0_00_01_0_011111111_11_01111_01001,
|
||||
},
|
||||
.{ // str x2, [x1]
|
||||
.inst = Instruction.str(.x2, .x1, .{}),
|
||||
.expected = 0b11_111_0_01_00_000000000000_00001_00010,
|
||||
@ -1068,6 +1213,14 @@ test "serialize instructions" {
|
||||
.inst = Instruction.subs(.x0, .x5, 11, true),
|
||||
.expected = 0b1_1_1_100010_1_0000_0000_1011_00101_00000,
|
||||
},
|
||||
.{ // b.hi #-4
|
||||
.inst = Instruction.bCond(.hi, -4),
|
||||
.expected = 0b0101010_0_1111111111111111111_0_1000,
|
||||
},
|
||||
.{ // cbz x10, #40
|
||||
.inst = Instruction.cbz(.x10, 40),
|
||||
.expected = 0b1_011010_0_0000000000000001010_01010,
|
||||
},
|
||||
};
|
||||
|
||||
for (testcases) |case| {
|
||||
|
||||
@ -44,24 +44,36 @@ fn formatTypeAsCIdentifier(
|
||||
var buffer = [1]u8{0} ** 128;
|
||||
// We don't care if it gets cut off, it's still more unique than a number
|
||||
var buf = std.fmt.bufPrint(&buffer, "{}", .{data}) catch &buffer;
|
||||
|
||||
for (buf) |c, i| {
|
||||
switch (c) {
|
||||
0 => return writer.writeAll(buf[0..i]),
|
||||
'a'...'z', 'A'...'Z', '_', '$' => {},
|
||||
'0'...'9' => if (i == 0) {
|
||||
buf[i] = '_';
|
||||
},
|
||||
else => buf[i] = '_',
|
||||
}
|
||||
}
|
||||
return writer.writeAll(buf);
|
||||
return formatIdent(buf, "", .{}, writer);
|
||||
}
|
||||
|
||||
pub fn typeToCIdentifier(t: Type) std.fmt.Formatter(formatTypeAsCIdentifier) {
|
||||
return .{ .data = t };
|
||||
}
|
||||
|
||||
fn formatIdent(
|
||||
ident: []const u8,
|
||||
comptime fmt: []const u8,
|
||||
options: std.fmt.FormatOptions,
|
||||
writer: anytype,
|
||||
) !void {
|
||||
for (ident) |c, i| {
|
||||
switch (c) {
|
||||
'a'...'z', 'A'...'Z', '_' => try writer.writeByte(c),
|
||||
'0'...'9' => if (i == 0) {
|
||||
try writer.print("${x:2}", .{c});
|
||||
} else {
|
||||
try writer.writeByte(c);
|
||||
},
|
||||
else => try writer.print("${x:2}", .{c}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fmtIdent(ident: []const u8) std.fmt.Formatter(formatIdent) {
|
||||
return .{ .data = ident };
|
||||
}
|
||||
|
||||
/// This data is available when outputting .c code for a Module.
|
||||
/// It is not available when generating .h file.
|
||||
pub const Object = struct {
|
||||
@ -160,7 +172,10 @@ pub const DeclGen = struct {
|
||||
val: Value,
|
||||
) error{ OutOfMemory, AnalysisFail }!void {
|
||||
if (val.isUndef()) {
|
||||
return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: properly handle undefined in all cases (with debug safety?)", .{});
|
||||
// This should lower to 0xaa bytes in safe modes, and for unsafe modes should
|
||||
// lower to leaving variables uninitialized (that might need to be implemented
|
||||
// outside of this function).
|
||||
return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement renderValue undef", .{});
|
||||
}
|
||||
switch (t.zigTypeTag()) {
|
||||
.Int => {
|
||||
@ -276,6 +291,31 @@ pub const DeclGen = struct {
|
||||
try writer.writeAll(", .error = 0 }");
|
||||
}
|
||||
},
|
||||
.Enum => {
|
||||
switch (val.tag()) {
|
||||
.enum_field_index => {
|
||||
const field_index = val.castTag(.enum_field_index).?.data;
|
||||
switch (t.tag()) {
|
||||
.enum_simple => return writer.print("{d}", .{field_index}),
|
||||
.enum_full, .enum_nonexhaustive => {
|
||||
const enum_full = t.cast(Type.Payload.EnumFull).?.data;
|
||||
if (enum_full.values.count() != 0) {
|
||||
const tag_val = enum_full.values.entries.items[field_index].key;
|
||||
return dg.renderValue(writer, enum_full.tag_ty, tag_val);
|
||||
} else {
|
||||
return writer.print("{d}", .{field_index});
|
||||
}
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
},
|
||||
else => {
|
||||
var int_tag_ty_buffer: Type.Payload.Bits = undefined;
|
||||
const int_tag_ty = t.intTagType(&int_tag_ty_buffer);
|
||||
return dg.renderValue(writer, int_tag_ty, val);
|
||||
},
|
||||
}
|
||||
},
|
||||
else => |e| return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement value {s}", .{
|
||||
@tagName(e),
|
||||
}),
|
||||
@ -356,6 +396,9 @@ pub const DeclGen = struct {
|
||||
else => unreachable,
|
||||
}
|
||||
},
|
||||
|
||||
.Float => return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement type Float", .{}),
|
||||
|
||||
.Pointer => {
|
||||
if (t.isSlice()) {
|
||||
return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement slices", .{});
|
||||
@ -430,10 +473,59 @@ pub const DeclGen = struct {
|
||||
try w.writeAll(name);
|
||||
dg.typedefs.putAssumeCapacityNoClobber(t, .{ .name = name, .rendered = rendered });
|
||||
},
|
||||
.Null, .Undefined => unreachable, // must be const or comptime
|
||||
else => |e| return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement type {s}", .{
|
||||
@tagName(e),
|
||||
}),
|
||||
.Struct => {
|
||||
if (dg.typedefs.get(t)) |some| {
|
||||
return w.writeAll(some.name);
|
||||
}
|
||||
const struct_obj = t.castTag(.@"struct").?.data; // Handle 0 bit types elsewhere.
|
||||
const fqn = try struct_obj.getFullyQualifiedName(dg.typedefs.allocator);
|
||||
defer dg.typedefs.allocator.free(fqn);
|
||||
|
||||
var buffer = std.ArrayList(u8).init(dg.typedefs.allocator);
|
||||
defer buffer.deinit();
|
||||
|
||||
try buffer.appendSlice("typedef struct {\n");
|
||||
for (struct_obj.fields.entries.items) |entry| {
|
||||
try buffer.append(' ');
|
||||
try dg.renderType(buffer.writer(), entry.value.ty);
|
||||
try buffer.writer().print(" {s};\n", .{fmtIdent(entry.key)});
|
||||
}
|
||||
try buffer.appendSlice("} ");
|
||||
|
||||
const name_start = buffer.items.len;
|
||||
try buffer.writer().print("zig_S_{s};\n", .{fmtIdent(fqn)});
|
||||
|
||||
const rendered = buffer.toOwnedSlice();
|
||||
errdefer dg.typedefs.allocator.free(rendered);
|
||||
const name = rendered[name_start .. rendered.len - 2];
|
||||
|
||||
try dg.typedefs.ensureCapacity(dg.typedefs.capacity() + 1);
|
||||
try w.writeAll(name);
|
||||
dg.typedefs.putAssumeCapacityNoClobber(t, .{ .name = name, .rendered = rendered });
|
||||
},
|
||||
.Enum => {
|
||||
// For enums, we simply use the integer tag type.
|
||||
var int_tag_ty_buffer: Type.Payload.Bits = undefined;
|
||||
const int_tag_ty = t.intTagType(&int_tag_ty_buffer);
|
||||
|
||||
try dg.renderType(w, int_tag_ty);
|
||||
},
|
||||
.Union => return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement type Union", .{}),
|
||||
.Fn => return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement type Fn", .{}),
|
||||
.Opaque => return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement type Opaque", .{}),
|
||||
.Frame => return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement type Frame", .{}),
|
||||
.AnyFrame => return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement type AnyFrame", .{}),
|
||||
.Vector => return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement type Vector", .{}),
|
||||
|
||||
.Null,
|
||||
.Undefined,
|
||||
.EnumLiteral,
|
||||
.ComptimeFloat,
|
||||
.ComptimeInt,
|
||||
.Type,
|
||||
=> unreachable, // must be const or comptime
|
||||
|
||||
.BoundFn => unreachable, // this type will be deleted from the language
|
||||
}
|
||||
}
|
||||
|
||||
@ -525,8 +617,26 @@ pub fn genBody(o: *Object, body: ir.Body) error{ AnalysisFail, OutOfMemory }!voi
|
||||
|
||||
for (body.instructions) |inst| {
|
||||
const result_value = switch (inst.tag) {
|
||||
.constant => unreachable, // excluded from function bodies
|
||||
// TODO use a different strategy for add that communicates to the optimizer
|
||||
// that wrapping is UB.
|
||||
.add => try genBinOp(o, inst.castTag(.add).?, " + "),
|
||||
// TODO make this do wrapping arithmetic for signed ints
|
||||
.addwrap => try genBinOp(o, inst.castTag(.add).?, " + "),
|
||||
// TODO use a different strategy for sub that communicates to the optimizer
|
||||
// that wrapping is UB.
|
||||
.sub => try genBinOp(o, inst.castTag(.sub).?, " - "),
|
||||
// TODO make this do wrapping arithmetic for signed ints
|
||||
.subwrap => try genBinOp(o, inst.castTag(.sub).?, " - "),
|
||||
// TODO use a different strategy for mul that communicates to the optimizer
|
||||
// that wrapping is UB.
|
||||
.mul => try genBinOp(o, inst.castTag(.sub).?, " * "),
|
||||
// TODO make this do wrapping multiplication for signed ints
|
||||
.mulwrap => try genBinOp(o, inst.castTag(.sub).?, " * "),
|
||||
// TODO use a different strategy for div that communicates to the optimizer
|
||||
// that wrapping is UB.
|
||||
.div => try genBinOp(o, inst.castTag(.div).?, " / "),
|
||||
|
||||
.constant => unreachable, // excluded from function bodies
|
||||
.alloc => try genAlloc(o, inst.castTag(.alloc).?),
|
||||
.arg => genArg(o),
|
||||
.assembly => try genAsm(o, inst.castTag(.assembly).?),
|
||||
@ -546,7 +656,6 @@ pub fn genBody(o: *Object, body: ir.Body) error{ AnalysisFail, OutOfMemory }!voi
|
||||
.ret => try genRet(o, inst.castTag(.ret).?),
|
||||
.retvoid => try genRetVoid(o),
|
||||
.store => try genStore(o, inst.castTag(.store).?),
|
||||
.sub => try genBinOp(o, inst.castTag(.sub).?, " - "),
|
||||
.unreach => try genUnreach(o, inst.castTag(.unreach).?),
|
||||
.loop => try genLoop(o, inst.castTag(.loop).?),
|
||||
.condbr => try genCondBr(o, inst.castTag(.condbr).?),
|
||||
@ -567,17 +676,24 @@ pub fn genBody(o: *Object, body: ir.Body) error{ AnalysisFail, OutOfMemory }!voi
|
||||
.wrap_optional => try genWrapOptional(o, inst.castTag(.wrap_optional).?),
|
||||
.optional_payload => try genOptionalPayload(o, inst.castTag(.optional_payload).?),
|
||||
.optional_payload_ptr => try genOptionalPayload(o, inst.castTag(.optional_payload_ptr).?),
|
||||
.ref => try genRef(o, inst.castTag(.ref).?),
|
||||
.struct_field_ptr => try genStructFieldPtr(o, inst.castTag(.struct_field_ptr).?),
|
||||
|
||||
.is_err => try genIsErr(o, inst.castTag(.is_err).?),
|
||||
.is_err_ptr => try genIsErr(o, inst.castTag(.is_err_ptr).?),
|
||||
.error_to_int => try genErrorToInt(o, inst.castTag(.error_to_int).?),
|
||||
.int_to_error => try genIntToError(o, inst.castTag(.int_to_error).?),
|
||||
|
||||
.unwrap_errunion_payload => try genUnwrapErrUnionPay(o, inst.castTag(.unwrap_errunion_payload).?),
|
||||
.unwrap_errunion_err => try genUnwrapErrUnionErr(o, inst.castTag(.unwrap_errunion_err).?),
|
||||
.unwrap_errunion_payload_ptr => try genUnwrapErrUnionPay(o, inst.castTag(.unwrap_errunion_payload_ptr).?),
|
||||
.unwrap_errunion_err_ptr => try genUnwrapErrUnionErr(o, inst.castTag(.unwrap_errunion_err_ptr).?),
|
||||
.wrap_errunion_payload => try genWrapErrUnionPay(o, inst.castTag(.wrap_errunion_payload).?),
|
||||
.wrap_errunion_err => try genWrapErrUnionErr(o, inst.castTag(.wrap_errunion_err).?),
|
||||
else => |e| return o.dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement codegen for {}", .{e}),
|
||||
.br_block_flat => return o.dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement codegen for br_block_flat", .{}),
|
||||
.ptrtoint => return o.dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement codegen for ptrtoint", .{}),
|
||||
.varptr => return o.dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement codegen for varptr", .{}),
|
||||
.floatcast => return o.dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement codegen for floatcast", .{}),
|
||||
};
|
||||
switch (result_value) {
|
||||
.none => {},
|
||||
@ -996,6 +1112,37 @@ fn genOptionalPayload(o: *Object, inst: *Inst.UnOp) !CValue {
|
||||
return local;
|
||||
}
|
||||
|
||||
fn genRef(o: *Object, inst: *Inst.UnOp) !CValue {
|
||||
const writer = o.writer();
|
||||
const operand = try o.resolveInst(inst.operand);
|
||||
|
||||
const local = try o.allocLocal(inst.base.ty, .Const);
|
||||
try writer.writeAll(" = ");
|
||||
try o.writeCValue(writer, operand);
|
||||
try writer.writeAll(";\n");
|
||||
return local;
|
||||
}
|
||||
|
||||
fn genStructFieldPtr(o: *Object, inst: *Inst.StructFieldPtr) !CValue {
|
||||
const writer = o.writer();
|
||||
const struct_ptr = try o.resolveInst(inst.struct_ptr);
|
||||
const struct_obj = inst.struct_ptr.ty.elemType().castTag(.@"struct").?.data;
|
||||
const field_name = struct_obj.fields.entries.items[inst.field_index].key;
|
||||
|
||||
const local = try o.allocLocal(inst.base.ty, .Const);
|
||||
switch (struct_ptr) {
|
||||
.local_ref => |i| {
|
||||
try writer.print(" = &t{d}.{};\n", .{ i, fmtIdent(field_name) });
|
||||
},
|
||||
else => {
|
||||
try writer.writeAll(" = &");
|
||||
try o.writeCValue(writer, struct_ptr);
|
||||
try writer.print("->{};\n", .{fmtIdent(field_name)});
|
||||
},
|
||||
}
|
||||
return local;
|
||||
}
|
||||
|
||||
// *(E!T) -> E NOT *E
|
||||
fn genUnwrapErrUnionErr(o: *Object, inst: *Inst.UnOp) !CValue {
|
||||
const writer = o.writer();
|
||||
@ -1088,7 +1235,7 @@ fn IndentWriter(comptime UnderlyingWriter: type) type {
|
||||
pub const Error = UnderlyingWriter.Error;
|
||||
pub const Writer = std.io.Writer(*Self, Error, write);
|
||||
|
||||
pub const indent_delta = 4;
|
||||
pub const indent_delta = 1;
|
||||
|
||||
underlying_writer: UnderlyingWriter,
|
||||
indent_count: usize = 0,
|
||||
|
||||
@ -76,7 +76,6 @@ pub fn targetTriple(allocator: *Allocator, target: std.Target) ![:0]u8 {
|
||||
.spirv32 => return error.LLVMBackendDoesNotSupportSPIRV,
|
||||
.spirv64 => return error.LLVMBackendDoesNotSupportSPIRV,
|
||||
};
|
||||
// TODO Add a sub-arch for some architectures depending on CPU features.
|
||||
|
||||
const llvm_os = switch (target.os.tag) {
|
||||
.freestanding => "unknown",
|
||||
|
||||
@ -2,6 +2,7 @@ const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArrayList = std.ArrayList;
|
||||
const assert = std.debug.assert;
|
||||
const testing = std.testing;
|
||||
const leb = std.leb;
|
||||
const mem = std.mem;
|
||||
const wasm = std.wasm;
|
||||
@ -15,9 +16,12 @@ const Value = @import("../value.zig").Value;
|
||||
const Compilation = @import("../Compilation.zig");
|
||||
const AnyMCValue = @import("../codegen.zig").AnyMCValue;
|
||||
const LazySrcLoc = Module.LazySrcLoc;
|
||||
const link = @import("../link.zig");
|
||||
const TypedValue = @import("../TypedValue.zig");
|
||||
|
||||
/// Wasm Value, created when generating an instruction
|
||||
const WValue = union(enum) {
|
||||
/// May be referenced but is unused
|
||||
none: void,
|
||||
/// Index of the local variable
|
||||
local: u32,
|
||||
@ -29,6 +33,450 @@ const WValue = union(enum) {
|
||||
block_idx: u32,
|
||||
};
|
||||
|
||||
/// Wasm ops, but without input/output/signedness information
|
||||
/// Used for `buildOpcode`
|
||||
const Op = enum {
|
||||
@"unreachable",
|
||||
nop,
|
||||
block,
|
||||
loop,
|
||||
@"if",
|
||||
@"else",
|
||||
end,
|
||||
br,
|
||||
br_if,
|
||||
br_table,
|
||||
@"return",
|
||||
call,
|
||||
call_indirect,
|
||||
drop,
|
||||
select,
|
||||
local_get,
|
||||
local_set,
|
||||
local_tee,
|
||||
global_get,
|
||||
global_set,
|
||||
load,
|
||||
store,
|
||||
memory_size,
|
||||
memory_grow,
|
||||
@"const",
|
||||
eqz,
|
||||
eq,
|
||||
ne,
|
||||
lt,
|
||||
gt,
|
||||
le,
|
||||
ge,
|
||||
clz,
|
||||
ctz,
|
||||
popcnt,
|
||||
add,
|
||||
sub,
|
||||
mul,
|
||||
div,
|
||||
rem,
|
||||
@"and",
|
||||
@"or",
|
||||
xor,
|
||||
shl,
|
||||
shr,
|
||||
rotl,
|
||||
rotr,
|
||||
abs,
|
||||
neg,
|
||||
ceil,
|
||||
floor,
|
||||
trunc,
|
||||
nearest,
|
||||
sqrt,
|
||||
min,
|
||||
max,
|
||||
copysign,
|
||||
wrap,
|
||||
convert,
|
||||
demote,
|
||||
promote,
|
||||
reinterpret,
|
||||
extend,
|
||||
};
|
||||
|
||||
/// Contains the settings needed to create an `Opcode` using `buildOpcode`.
|
||||
///
|
||||
/// The fields correspond to the opcode name. Here is an example
|
||||
/// i32_trunc_f32_s
|
||||
/// ^ ^ ^ ^
|
||||
/// | | | |
|
||||
/// valtype1 | | |
|
||||
/// = .i32 | | |
|
||||
/// | | |
|
||||
/// op | |
|
||||
/// = .trunc | |
|
||||
/// | |
|
||||
/// valtype2 |
|
||||
/// = .f32 |
|
||||
/// |
|
||||
/// width |
|
||||
/// = null |
|
||||
/// |
|
||||
/// signed
|
||||
/// = true
|
||||
///
|
||||
/// There can be missing fields, here are some more examples:
|
||||
/// i64_load8_u
|
||||
/// --> .{ .valtype1 = .i64, .op = .load, .width = 8, signed = false }
|
||||
/// i32_mul
|
||||
/// --> .{ .valtype1 = .i32, .op = .trunc }
|
||||
/// nop
|
||||
/// --> .{ .op = .nop }
|
||||
const OpcodeBuildArguments = struct {
|
||||
/// First valtype in the opcode (usually represents the type of the output)
|
||||
valtype1: ?wasm.Valtype = null,
|
||||
/// The operation (e.g. call, unreachable, div, min, sqrt, etc.)
|
||||
op: Op,
|
||||
/// Width of the operation (e.g. 8 for i32_load8_s, 16 for i64_extend16_i32_s)
|
||||
width: ?u8 = null,
|
||||
/// Second valtype in the opcode name (usually represents the type of the input)
|
||||
valtype2: ?wasm.Valtype = null,
|
||||
/// Signedness of the op
|
||||
signedness: ?std.builtin.Signedness = null,
|
||||
};
|
||||
|
||||
/// Helper function that builds an Opcode given the arguments needed
|
||||
fn buildOpcode(args: OpcodeBuildArguments) wasm.Opcode {
|
||||
switch (args.op) {
|
||||
.@"unreachable" => return .@"unreachable",
|
||||
.nop => return .nop,
|
||||
.block => return .block,
|
||||
.loop => return .loop,
|
||||
.@"if" => return .@"if",
|
||||
.@"else" => return .@"else",
|
||||
.end => return .end,
|
||||
.br => return .br,
|
||||
.br_if => return .br_if,
|
||||
.br_table => return .br_table,
|
||||
.@"return" => return .@"return",
|
||||
.call => return .call,
|
||||
.call_indirect => return .call_indirect,
|
||||
.drop => return .drop,
|
||||
.select => return .select,
|
||||
.local_get => return .local_get,
|
||||
.local_set => return .local_set,
|
||||
.local_tee => return .local_tee,
|
||||
.global_get => return .global_get,
|
||||
.global_set => return .global_set,
|
||||
|
||||
.load => if (args.width) |width| switch (width) {
|
||||
8 => switch (args.valtype1.?) {
|
||||
.i32 => if (args.signedness.? == .signed) return .i32_load8_s else return .i32_load8_u,
|
||||
.i64 => if (args.signedness.? == .signed) return .i64_load8_s else return .i64_load8_u,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
16 => switch (args.valtype1.?) {
|
||||
.i32 => if (args.signedness.? == .signed) return .i32_load16_s else return .i32_load16_u,
|
||||
.i64 => if (args.signedness.? == .signed) return .i64_load16_s else return .i64_load16_u,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
32 => switch (args.valtype1.?) {
|
||||
.i64 => if (args.signedness.? == .signed) return .i64_load32_s else return .i64_load32_u,
|
||||
.i32, .f32, .f64 => unreachable,
|
||||
},
|
||||
else => unreachable,
|
||||
} else switch (args.valtype1.?) {
|
||||
.i32 => return .i32_load,
|
||||
.i64 => return .i64_load,
|
||||
.f32 => return .f32_load,
|
||||
.f64 => return .f64_load,
|
||||
},
|
||||
.store => if (args.width) |width| {
|
||||
switch (width) {
|
||||
8 => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_store8,
|
||||
.i64 => return .i64_store8,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
16 => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_store16,
|
||||
.i64 => return .i64_store16,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
32 => switch (args.valtype1.?) {
|
||||
.i64 => return .i64_store32,
|
||||
.i32, .f32, .f64 => unreachable,
|
||||
},
|
||||
else => unreachable,
|
||||
}
|
||||
} else {
|
||||
switch (args.valtype1.?) {
|
||||
.i32 => return .i32_store,
|
||||
.i64 => return .i64_store,
|
||||
.f32 => return .f32_store,
|
||||
.f64 => return .f64_store,
|
||||
}
|
||||
},
|
||||
|
||||
.memory_size => return .memory_size,
|
||||
.memory_grow => return .memory_grow,
|
||||
|
||||
.@"const" => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_const,
|
||||
.i64 => return .i64_const,
|
||||
.f32 => return .f32_const,
|
||||
.f64 => return .f64_const,
|
||||
},
|
||||
|
||||
.eqz => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_eqz,
|
||||
.i64 => return .i64_eqz,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
.eq => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_eq,
|
||||
.i64 => return .i64_eq,
|
||||
.f32 => return .f32_eq,
|
||||
.f64 => return .f64_eq,
|
||||
},
|
||||
.ne => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_ne,
|
||||
.i64 => return .i64_ne,
|
||||
.f32 => return .f32_ne,
|
||||
.f64 => return .f64_ne,
|
||||
},
|
||||
|
||||
.lt => switch (args.valtype1.?) {
|
||||
.i32 => if (args.signedness.? == .signed) return .i32_lt_s else return .i32_lt_u,
|
||||
.i64 => if (args.signedness.? == .signed) return .i64_lt_s else return .i64_lt_u,
|
||||
.f32 => return .f32_lt,
|
||||
.f64 => return .f64_lt,
|
||||
},
|
||||
.gt => switch (args.valtype1.?) {
|
||||
.i32 => if (args.signedness.? == .signed) return .i32_gt_s else return .i32_gt_u,
|
||||
.i64 => if (args.signedness.? == .signed) return .i64_gt_s else return .i64_gt_u,
|
||||
.f32 => return .f32_gt,
|
||||
.f64 => return .f64_gt,
|
||||
},
|
||||
.le => switch (args.valtype1.?) {
|
||||
.i32 => if (args.signedness.? == .signed) return .i32_le_s else return .i32_le_u,
|
||||
.i64 => if (args.signedness.? == .signed) return .i64_le_s else return .i64_le_u,
|
||||
.f32 => return .f32_le,
|
||||
.f64 => return .f64_le,
|
||||
},
|
||||
.ge => switch (args.valtype1.?) {
|
||||
.i32 => if (args.signedness.? == .signed) return .i32_ge_s else return .i32_ge_u,
|
||||
.i64 => if (args.signedness.? == .signed) return .i64_ge_s else return .i64_ge_u,
|
||||
.f32 => return .f32_ge,
|
||||
.f64 => return .f64_ge,
|
||||
},
|
||||
|
||||
.clz => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_clz,
|
||||
.i64 => return .i64_clz,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
.ctz => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_ctz,
|
||||
.i64 => return .i64_ctz,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
.popcnt => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_popcnt,
|
||||
.i64 => return .i64_popcnt,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
|
||||
.add => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_add,
|
||||
.i64 => return .i64_add,
|
||||
.f32 => return .f32_add,
|
||||
.f64 => return .f64_add,
|
||||
},
|
||||
.sub => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_sub,
|
||||
.i64 => return .i64_sub,
|
||||
.f32 => return .f32_sub,
|
||||
.f64 => return .f64_sub,
|
||||
},
|
||||
.mul => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_mul,
|
||||
.i64 => return .i64_mul,
|
||||
.f32 => return .f32_mul,
|
||||
.f64 => return .f64_mul,
|
||||
},
|
||||
|
||||
.div => switch (args.valtype1.?) {
|
||||
.i32 => if (args.signedness.? == .signed) return .i32_div_s else return .i32_div_u,
|
||||
.i64 => if (args.signedness.? == .signed) return .i64_div_s else return .i64_div_u,
|
||||
.f32 => return .f32_div,
|
||||
.f64 => return .f64_div,
|
||||
},
|
||||
.rem => switch (args.valtype1.?) {
|
||||
.i32 => if (args.signedness.? == .signed) return .i32_rem_s else return .i32_rem_u,
|
||||
.i64 => if (args.signedness.? == .signed) return .i64_rem_s else return .i64_rem_u,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
|
||||
.@"and" => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_and,
|
||||
.i64 => return .i64_and,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
.@"or" => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_or,
|
||||
.i64 => return .i64_or,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
.xor => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_xor,
|
||||
.i64 => return .i64_xor,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
|
||||
.shl => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_shl,
|
||||
.i64 => return .i64_shl,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
.shr => switch (args.valtype1.?) {
|
||||
.i32 => if (args.signedness.? == .signed) return .i32_shr_s else return .i32_shr_u,
|
||||
.i64 => if (args.signedness.? == .signed) return .i64_shr_s else return .i64_shr_u,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
.rotl => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_rotl,
|
||||
.i64 => return .i64_rotl,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
.rotr => switch (args.valtype1.?) {
|
||||
.i32 => return .i32_rotr,
|
||||
.i64 => return .i64_rotr,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
|
||||
.abs => switch (args.valtype1.?) {
|
||||
.i32, .i64 => unreachable,
|
||||
.f32 => return .f32_abs,
|
||||
.f64 => return .f64_abs,
|
||||
},
|
||||
.neg => switch (args.valtype1.?) {
|
||||
.i32, .i64 => unreachable,
|
||||
.f32 => return .f32_neg,
|
||||
.f64 => return .f64_neg,
|
||||
},
|
||||
.ceil => switch (args.valtype1.?) {
|
||||
.i32, .i64 => unreachable,
|
||||
.f32 => return .f32_ceil,
|
||||
.f64 => return .f64_ceil,
|
||||
},
|
||||
.floor => switch (args.valtype1.?) {
|
||||
.i32, .i64 => unreachable,
|
||||
.f32 => return .f32_floor,
|
||||
.f64 => return .f64_floor,
|
||||
},
|
||||
.trunc => switch (args.valtype1.?) {
|
||||
.i32 => switch (args.valtype2.?) {
|
||||
.i32 => unreachable,
|
||||
.i64 => unreachable,
|
||||
.f32 => if (args.signedness.? == .signed) return .i32_trunc_f32_s else return .i32_trunc_f32_u,
|
||||
.f64 => if (args.signedness.? == .signed) return .i32_trunc_f64_s else return .i32_trunc_f64_u,
|
||||
},
|
||||
.i64 => unreachable,
|
||||
.f32 => return .f32_trunc,
|
||||
.f64 => return .f64_trunc,
|
||||
},
|
||||
.nearest => switch (args.valtype1.?) {
|
||||
.i32, .i64 => unreachable,
|
||||
.f32 => return .f32_nearest,
|
||||
.f64 => return .f64_nearest,
|
||||
},
|
||||
.sqrt => switch (args.valtype1.?) {
|
||||
.i32, .i64 => unreachable,
|
||||
.f32 => return .f32_sqrt,
|
||||
.f64 => return .f64_sqrt,
|
||||
},
|
||||
.min => switch (args.valtype1.?) {
|
||||
.i32, .i64 => unreachable,
|
||||
.f32 => return .f32_min,
|
||||
.f64 => return .f64_min,
|
||||
},
|
||||
.max => switch (args.valtype1.?) {
|
||||
.i32, .i64 => unreachable,
|
||||
.f32 => return .f32_max,
|
||||
.f64 => return .f64_max,
|
||||
},
|
||||
.copysign => switch (args.valtype1.?) {
|
||||
.i32, .i64 => unreachable,
|
||||
.f32 => return .f32_copysign,
|
||||
.f64 => return .f64_copysign,
|
||||
},
|
||||
|
||||
.wrap => switch (args.valtype1.?) {
|
||||
.i32 => switch (args.valtype2.?) {
|
||||
.i32 => unreachable,
|
||||
.i64 => return .i32_wrap_i64,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
.i64, .f32, .f64 => unreachable,
|
||||
},
|
||||
.convert => switch (args.valtype1.?) {
|
||||
.i32, .i64 => unreachable,
|
||||
.f32 => switch (args.valtype2.?) {
|
||||
.i32 => if (args.signedness.? == .signed) return .f32_convert_i32_s else return .f32_convert_i32_u,
|
||||
.i64 => if (args.signedness.? == .signed) return .f32_convert_i64_s else return .f32_convert_i64_u,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
.f64 => switch (args.valtype2.?) {
|
||||
.i32 => if (args.signedness.? == .signed) return .f64_convert_i32_s else return .f64_convert_i32_u,
|
||||
.i64 => if (args.signedness.? == .signed) return .f64_convert_i64_s else return .f64_convert_i64_u,
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
},
|
||||
.demote => if (args.valtype1.? == .f32 and args.valtype2.? == .f64) return .f32_demote_f64 else unreachable,
|
||||
.promote => if (args.valtype1.? == .f64 and args.valtype2.? == .f32) return .f64_promote_f32 else unreachable,
|
||||
.reinterpret => switch (args.valtype1.?) {
|
||||
.i32 => if (args.valtype2.? == .f32) return .i32_reinterpret_f32 else unreachable,
|
||||
.i64 => if (args.valtype2.? == .f64) return .i64_reinterpret_f64 else unreachable,
|
||||
.f32 => if (args.valtype2.? == .i32) return .f32_reinterpret_i32 else unreachable,
|
||||
.f64 => if (args.valtype2.? == .i64) return .f64_reinterpret_i64 else unreachable,
|
||||
},
|
||||
.extend => switch (args.valtype1.?) {
|
||||
.i32 => switch (args.width.?) {
|
||||
8 => if (args.signedness.? == .signed) return .i32_extend8_s else unreachable,
|
||||
16 => if (args.signedness.? == .signed) return .i32_extend16_s else unreachable,
|
||||
else => unreachable,
|
||||
},
|
||||
.i64 => switch (args.width.?) {
|
||||
8 => if (args.signedness.? == .signed) return .i64_extend8_s else unreachable,
|
||||
16 => if (args.signedness.? == .signed) return .i64_extend16_s else unreachable,
|
||||
32 => if (args.signedness.? == .signed) return .i64_extend32_s else unreachable,
|
||||
else => unreachable,
|
||||
},
|
||||
.f32, .f64 => unreachable,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
test "Wasm - buildOpcode" {
|
||||
// Make sure buildOpcode is referenced, and test some examples
|
||||
const i32_const = buildOpcode(.{ .op = .@"const", .valtype1 = .i32 });
|
||||
const end = buildOpcode(.{ .op = .end });
|
||||
const local_get = buildOpcode(.{ .op = .local_get });
|
||||
const i64_extend32_s = buildOpcode(.{ .op = .extend, .valtype1 = .i64, .width = 32, .signedness = .signed });
|
||||
const f64_reinterpret_i64 = buildOpcode(.{ .op = .reinterpret, .valtype1 = .f64, .valtype2 = .i64 });
|
||||
|
||||
testing.expectEqual(@as(wasm.Opcode, .i32_const), i32_const);
|
||||
testing.expectEqual(@as(wasm.Opcode, .end), end);
|
||||
testing.expectEqual(@as(wasm.Opcode, .local_get), local_get);
|
||||
testing.expectEqual(@as(wasm.Opcode, .i64_extend32_s), i64_extend32_s);
|
||||
testing.expectEqual(@as(wasm.Opcode, .f64_reinterpret_i64), f64_reinterpret_i64);
|
||||
}
|
||||
|
||||
pub const Result = union(enum) {
|
||||
/// The codegen bytes have been appended to `Context.code`
|
||||
appended: void,
|
||||
/// The data is managed externally and are part of the `Result`
|
||||
externally_managed: []const u8,
|
||||
};
|
||||
|
||||
/// Hashmap to store generated `WValue` for each `Inst`
|
||||
pub const ValueTable = std.AutoHashMapUnmanaged(*Inst, WValue);
|
||||
|
||||
@ -58,10 +506,14 @@ pub const Context = struct {
|
||||
/// List of all locals' types generated throughout this declaration
|
||||
/// used to emit locals count at start of 'code' section.
|
||||
locals: std.ArrayListUnmanaged(u8),
|
||||
/// The Target we're emitting (used to call intInfo)
|
||||
target: std.Target,
|
||||
|
||||
const InnerError = error{
|
||||
OutOfMemory,
|
||||
CodegenFail,
|
||||
/// Can occur when dereferencing a pointer that points to a `Decl` of which the analysis has failed
|
||||
AnalysisFail,
|
||||
};
|
||||
|
||||
pub fn deinit(self: *Context) void {
|
||||
@ -89,17 +541,31 @@ pub const Context = struct {
|
||||
return self.values.get(inst).?; // Instruction does not dominate all uses!
|
||||
}
|
||||
|
||||
/// Using a given `Type`, returns the corresponding wasm value type
|
||||
fn genValtype(self: *Context, src: LazySrcLoc, ty: Type) InnerError!u8 {
|
||||
return switch (ty.tag()) {
|
||||
.f32 => wasm.valtype(.f32),
|
||||
.f64 => wasm.valtype(.f64),
|
||||
.u32, .i32, .bool => wasm.valtype(.i32),
|
||||
.u64, .i64 => wasm.valtype(.i64),
|
||||
else => self.fail(src, "TODO - Wasm genValtype for type '{s}'", .{ty.tag()}),
|
||||
/// Using a given `Type`, returns the corresponding wasm Valtype
|
||||
fn typeToValtype(self: *Context, src: LazySrcLoc, ty: Type) InnerError!wasm.Valtype {
|
||||
return switch (ty.zigTypeTag()) {
|
||||
.Float => blk: {
|
||||
const bits = ty.floatBits(self.target);
|
||||
if (bits == 16 or bits == 32) break :blk wasm.Valtype.f32;
|
||||
if (bits == 64) break :blk wasm.Valtype.f64;
|
||||
return self.fail(src, "Float bit size not supported by wasm: '{d}'", .{bits});
|
||||
},
|
||||
.Int => blk: {
|
||||
const info = ty.intInfo(self.target);
|
||||
if (info.bits <= 32) break :blk wasm.Valtype.i32;
|
||||
if (info.bits > 32 and info.bits <= 64) break :blk wasm.Valtype.i64;
|
||||
return self.fail(src, "Integer bit size not supported by wasm: '{d}'", .{info.bits});
|
||||
},
|
||||
.Bool, .Pointer => wasm.Valtype.i32,
|
||||
else => self.fail(src, "TODO - Wasm valtype for type '{s}'", .{ty.tag()}),
|
||||
};
|
||||
}
|
||||
|
||||
/// Using a given `Type`, returns the byte representation of its wasm value type
|
||||
fn genValtype(self: *Context, src: LazySrcLoc, ty: Type) InnerError!u8 {
|
||||
return wasm.valtype(try self.typeToValtype(src, ty));
|
||||
}
|
||||
|
||||
/// Using a given `Type`, returns the corresponding wasm value type
|
||||
/// Differently from `genValtype` this also allows `void` to create a block
|
||||
/// with no return type
|
||||
@ -157,59 +623,97 @@ pub const Context = struct {
|
||||
}
|
||||
|
||||
/// Generates the wasm bytecode for the function declaration belonging to `Context`
|
||||
pub fn gen(self: *Context) InnerError!void {
|
||||
assert(self.code.items.len == 0);
|
||||
try self.genFunctype();
|
||||
pub fn gen(self: *Context, typed_value: TypedValue) InnerError!Result {
|
||||
switch (typed_value.ty.zigTypeTag()) {
|
||||
.Fn => {
|
||||
try self.genFunctype();
|
||||
|
||||
// Write instructions
|
||||
// TODO: check for and handle death of instructions
|
||||
const tv = self.decl.typed_value.most_recent.typed_value;
|
||||
const mod_fn = blk: {
|
||||
if (tv.val.castTag(.function)) |func| break :blk func.data;
|
||||
if (tv.val.castTag(.extern_fn)) |ext_fn| return; // don't need codegen for extern functions
|
||||
return self.fail(.{ .node_offset = 0 }, "TODO: Wasm codegen for decl type '{s}'", .{tv.ty.tag()});
|
||||
};
|
||||
// Write instructions
|
||||
// TODO: check for and handle death of instructions
|
||||
const mod_fn = blk: {
|
||||
if (typed_value.val.castTag(.function)) |func| break :blk func.data;
|
||||
if (typed_value.val.castTag(.extern_fn)) |ext_fn| return Result.appended; // don't need code body for extern functions
|
||||
unreachable;
|
||||
};
|
||||
|
||||
// Reserve space to write the size after generating the code as well as space for locals count
|
||||
try self.code.resize(10);
|
||||
// Reserve space to write the size after generating the code as well as space for locals count
|
||||
try self.code.resize(10);
|
||||
|
||||
try self.genBody(mod_fn.body);
|
||||
try self.genBody(mod_fn.body);
|
||||
|
||||
// finally, write our local types at the 'offset' position
|
||||
{
|
||||
leb.writeUnsignedFixed(5, self.code.items[5..10], @intCast(u32, self.locals.items.len));
|
||||
// finally, write our local types at the 'offset' position
|
||||
{
|
||||
leb.writeUnsignedFixed(5, self.code.items[5..10], @intCast(u32, self.locals.items.len));
|
||||
|
||||
// offset into 'code' section where we will put our locals types
|
||||
var local_offset: usize = 10;
|
||||
// offset into 'code' section where we will put our locals types
|
||||
var local_offset: usize = 10;
|
||||
|
||||
// emit the actual locals amount
|
||||
for (self.locals.items) |local| {
|
||||
var buf: [6]u8 = undefined;
|
||||
leb.writeUnsignedFixed(5, buf[0..5], @as(u32, 1));
|
||||
buf[5] = local;
|
||||
try self.code.insertSlice(local_offset, &buf);
|
||||
local_offset += 6;
|
||||
}
|
||||
// emit the actual locals amount
|
||||
for (self.locals.items) |local| {
|
||||
var buf: [6]u8 = undefined;
|
||||
leb.writeUnsignedFixed(5, buf[0..5], @as(u32, 1));
|
||||
buf[5] = local;
|
||||
try self.code.insertSlice(local_offset, &buf);
|
||||
local_offset += 6;
|
||||
}
|
||||
}
|
||||
|
||||
const writer = self.code.writer();
|
||||
try writer.writeByte(wasm.opcode(.end));
|
||||
|
||||
// Fill in the size of the generated code to the reserved space at the
|
||||
// beginning of the buffer.
|
||||
const size = self.code.items.len - 5 + self.decl.fn_link.wasm.idx_refs.items.len * 5;
|
||||
leb.writeUnsignedFixed(5, self.code.items[0..5], @intCast(u32, size));
|
||||
|
||||
// codegen data has been appended to `code`
|
||||
return Result.appended;
|
||||
},
|
||||
.Array => {
|
||||
if (typed_value.val.castTag(.bytes)) |payload| {
|
||||
if (typed_value.ty.sentinel()) |sentinel| {
|
||||
try self.code.appendSlice(payload.data);
|
||||
|
||||
switch (try self.gen(.{
|
||||
.ty = typed_value.ty.elemType(),
|
||||
.val = sentinel,
|
||||
})) {
|
||||
.appended => return Result.appended,
|
||||
.externally_managed => |data| {
|
||||
try self.code.appendSlice(data);
|
||||
return Result.appended;
|
||||
},
|
||||
}
|
||||
}
|
||||
return Result{ .externally_managed = payload.data };
|
||||
} else return self.fail(.{ .node_offset = 0 }, "TODO implement gen for more kinds of arrays", .{});
|
||||
},
|
||||
.Int => {
|
||||
const info = typed_value.ty.intInfo(self.target);
|
||||
if (info.bits == 8 and info.signedness == .unsigned) {
|
||||
const int_byte = typed_value.val.toUnsignedInt();
|
||||
try self.code.append(@intCast(u8, int_byte));
|
||||
return Result.appended;
|
||||
}
|
||||
return self.fail(.{ .node_offset = 0 }, "TODO: Implement codegen for int type: '{}'", .{typed_value.ty});
|
||||
},
|
||||
else => |tag| return self.fail(.{ .node_offset = 0 }, "TODO: Implement zig type codegen for type: '{s}'", .{tag}),
|
||||
}
|
||||
|
||||
const writer = self.code.writer();
|
||||
try writer.writeByte(wasm.opcode(.end));
|
||||
|
||||
// Fill in the size of the generated code to the reserved space at the
|
||||
// beginning of the buffer.
|
||||
const size = self.code.items.len - 5 + self.decl.fn_link.wasm.?.idx_refs.items.len * 5;
|
||||
leb.writeUnsignedFixed(5, self.code.items[0..5], @intCast(u32, size));
|
||||
}
|
||||
|
||||
fn genInst(self: *Context, inst: *Inst) InnerError!WValue {
|
||||
return switch (inst.tag) {
|
||||
.add => self.genAdd(inst.castTag(.add).?),
|
||||
.add => self.genBinOp(inst.castTag(.add).?, .add),
|
||||
.alloc => self.genAlloc(inst.castTag(.alloc).?),
|
||||
.arg => self.genArg(inst.castTag(.arg).?),
|
||||
.block => self.genBlock(inst.castTag(.block).?),
|
||||
.breakpoint => self.genBreakpoint(inst.castTag(.breakpoint).?),
|
||||
.br => self.genBr(inst.castTag(.br).?),
|
||||
.call => self.genCall(inst.castTag(.call).?),
|
||||
.bit_or => self.genBinOp(inst.castTag(.bit_or).?, .@"or"),
|
||||
.bit_and => self.genBinOp(inst.castTag(.bit_and).?, .@"and"),
|
||||
.bool_or => self.genBinOp(inst.castTag(.bool_or).?, .@"or"),
|
||||
.bool_and => self.genBinOp(inst.castTag(.bool_and).?, .@"and"),
|
||||
.cmp_eq => self.genCmp(inst.castTag(.cmp_eq).?, .eq),
|
||||
.cmp_gte => self.genCmp(inst.castTag(.cmp_gte).?, .gte),
|
||||
.cmp_gt => self.genCmp(inst.castTag(.cmp_gt).?, .gt),
|
||||
@ -221,10 +725,14 @@ pub const Context = struct {
|
||||
.dbg_stmt => WValue.none,
|
||||
.load => self.genLoad(inst.castTag(.load).?),
|
||||
.loop => self.genLoop(inst.castTag(.loop).?),
|
||||
.mul => self.genBinOp(inst.castTag(.mul).?, .mul),
|
||||
.div => self.genBinOp(inst.castTag(.div).?, .div),
|
||||
.xor => self.genBinOp(inst.castTag(.xor).?, .xor),
|
||||
.not => self.genNot(inst.castTag(.not).?),
|
||||
.ret => self.genRet(inst.castTag(.ret).?),
|
||||
.retvoid => WValue.none,
|
||||
.store => self.genStore(inst.castTag(.store).?),
|
||||
.sub => self.genBinOp(inst.castTag(.sub).?, .sub),
|
||||
.unreach => self.genUnreachable(inst.castTag(.unreach).?),
|
||||
else => self.fail(inst.src, "TODO: Implement wasm inst: {s}", .{inst.tag}),
|
||||
};
|
||||
@ -266,7 +774,7 @@ pub const Context = struct {
|
||||
|
||||
// The function index immediate argument will be filled in using this data
|
||||
// in link.Wasm.flush().
|
||||
try self.decl.fn_link.wasm.?.idx_refs.append(self.gpa, .{
|
||||
try self.decl.fn_link.wasm.idx_refs.append(self.gpa, .{
|
||||
.offset = @intCast(u32, self.code.items.len),
|
||||
.decl = target,
|
||||
});
|
||||
@ -305,56 +813,76 @@ pub const Context = struct {
|
||||
return WValue{ .local = self.local_index };
|
||||
}
|
||||
|
||||
fn genAdd(self: *Context, inst: *Inst.BinOp) InnerError!WValue {
|
||||
fn genBinOp(self: *Context, inst: *Inst.BinOp, op: Op) InnerError!WValue {
|
||||
const lhs = self.resolveInst(inst.lhs);
|
||||
const rhs = self.resolveInst(inst.rhs);
|
||||
|
||||
try self.emitWValue(lhs);
|
||||
try self.emitWValue(rhs);
|
||||
|
||||
const opcode: wasm.Opcode = switch (inst.base.ty.tag()) {
|
||||
.u32, .i32 => .i32_add,
|
||||
.u64, .i64 => .i64_add,
|
||||
.f32 => .f32_add,
|
||||
.f64 => .f64_add,
|
||||
else => return self.fail(inst.base.src, "TODO - Implement wasm genAdd for type '{s}'", .{inst.base.ty.tag()}),
|
||||
};
|
||||
|
||||
const opcode: wasm.Opcode = buildOpcode(.{
|
||||
.op = op,
|
||||
.valtype1 = try self.typeToValtype(inst.base.src, inst.base.ty),
|
||||
.signedness = if (inst.base.ty.isSignedInt()) .signed else .unsigned,
|
||||
});
|
||||
try self.code.append(wasm.opcode(opcode));
|
||||
return .none;
|
||||
}
|
||||
|
||||
fn emitConstant(self: *Context, inst: *Inst.Constant) InnerError!void {
|
||||
const writer = self.code.writer();
|
||||
switch (inst.base.ty.tag()) {
|
||||
.u32 => {
|
||||
try writer.writeByte(wasm.opcode(.i32_const));
|
||||
try leb.writeILEB128(writer, inst.val.toUnsignedInt());
|
||||
switch (inst.base.ty.zigTypeTag()) {
|
||||
.Int => {
|
||||
// write opcode
|
||||
const opcode: wasm.Opcode = buildOpcode(.{
|
||||
.op = .@"const",
|
||||
.valtype1 = try self.typeToValtype(inst.base.src, inst.base.ty),
|
||||
});
|
||||
try writer.writeByte(wasm.opcode(opcode));
|
||||
// write constant
|
||||
switch (inst.base.ty.intInfo(self.target).signedness) {
|
||||
.signed => try leb.writeILEB128(writer, inst.val.toSignedInt()),
|
||||
.unsigned => try leb.writeILEB128(writer, inst.val.toUnsignedInt()),
|
||||
}
|
||||
},
|
||||
.i32, .bool => {
|
||||
.Bool => {
|
||||
// write opcode
|
||||
try writer.writeByte(wasm.opcode(.i32_const));
|
||||
// write constant
|
||||
try leb.writeILEB128(writer, inst.val.toSignedInt());
|
||||
},
|
||||
.u64 => {
|
||||
try writer.writeByte(wasm.opcode(.i64_const));
|
||||
try leb.writeILEB128(writer, inst.val.toUnsignedInt());
|
||||
.Float => {
|
||||
// write opcode
|
||||
const opcode: wasm.Opcode = buildOpcode(.{
|
||||
.op = .@"const",
|
||||
.valtype1 = try self.typeToValtype(inst.base.src, inst.base.ty),
|
||||
});
|
||||
try writer.writeByte(wasm.opcode(opcode));
|
||||
// write constant
|
||||
switch (inst.base.ty.floatBits(self.target)) {
|
||||
0...32 => try writer.writeIntLittle(u32, @bitCast(u32, inst.val.toFloat(f32))),
|
||||
64 => try writer.writeIntLittle(u64, @bitCast(u64, inst.val.toFloat(f64))),
|
||||
else => |bits| return self.fail(inst.base.src, "Wasm TODO: emitConstant for float with {d} bits", .{bits}),
|
||||
}
|
||||
},
|
||||
.i64 => {
|
||||
try writer.writeByte(wasm.opcode(.i64_const));
|
||||
try leb.writeILEB128(writer, inst.val.toSignedInt());
|
||||
.Pointer => {
|
||||
if (inst.val.castTag(.decl_ref)) |payload| {
|
||||
const decl = payload.data;
|
||||
|
||||
// offset into the offset table within the 'data' section
|
||||
const ptr_width = self.target.cpu.arch.ptrBitWidth() / 8;
|
||||
try writer.writeByte(wasm.opcode(.i32_const));
|
||||
try leb.writeULEB128(writer, decl.link.wasm.offset_index * ptr_width);
|
||||
|
||||
// memory instruction followed by their memarg immediate
|
||||
// memarg ::== x:u32, y:u32 => {align x, offset y}
|
||||
try writer.writeByte(wasm.opcode(.i32_load));
|
||||
try leb.writeULEB128(writer, @as(u32, 0));
|
||||
try leb.writeULEB128(writer, @as(u32, 0));
|
||||
} else return self.fail(inst.base.src, "Wasm TODO: emitConstant for other const pointer tag {s}", .{inst.val.tag()});
|
||||
},
|
||||
.f32 => {
|
||||
try writer.writeByte(wasm.opcode(.f32_const));
|
||||
// TODO: enforce LE byte order
|
||||
try writer.writeAll(mem.asBytes(&inst.val.toFloat(f32)));
|
||||
},
|
||||
.f64 => {
|
||||
try writer.writeByte(wasm.opcode(.f64_const));
|
||||
// TODO: enforce LE byte order
|
||||
try writer.writeAll(mem.asBytes(&inst.val.toFloat(f64)));
|
||||
},
|
||||
.void => {},
|
||||
else => |ty| return self.fail(inst.base.src, "Wasm TODO: emitConstant for type {s}", .{ty}),
|
||||
.Void => {},
|
||||
else => |ty| return self.fail(inst.base.src, "Wasm TODO: emitConstant for zigTypeTag {s}", .{ty}),
|
||||
}
|
||||
}
|
||||
|
||||
@ -455,62 +983,18 @@ pub const Context = struct {
|
||||
try self.emitWValue(lhs);
|
||||
try self.emitWValue(rhs);
|
||||
|
||||
const opcode_maybe: ?wasm.Opcode = switch (op) {
|
||||
.lt => @as(?wasm.Opcode, switch (ty) {
|
||||
.i32 => .i32_lt_s,
|
||||
.u32 => .i32_lt_u,
|
||||
.i64 => .i64_lt_s,
|
||||
.u64 => .i64_lt_u,
|
||||
.f32 => .f32_lt,
|
||||
.f64 => .f64_lt,
|
||||
else => null,
|
||||
}),
|
||||
.lte => @as(?wasm.Opcode, switch (ty) {
|
||||
.i32 => .i32_le_s,
|
||||
.u32 => .i32_le_u,
|
||||
.i64 => .i64_le_s,
|
||||
.u64 => .i64_le_u,
|
||||
.f32 => .f32_le,
|
||||
.f64 => .f64_le,
|
||||
else => null,
|
||||
}),
|
||||
.eq => @as(?wasm.Opcode, switch (ty) {
|
||||
.i32, .u32 => .i32_eq,
|
||||
.i64, .u64 => .i64_eq,
|
||||
.f32 => .f32_eq,
|
||||
.f64 => .f64_eq,
|
||||
else => null,
|
||||
}),
|
||||
.gte => @as(?wasm.Opcode, switch (ty) {
|
||||
.i32 => .i32_ge_s,
|
||||
.u32 => .i32_ge_u,
|
||||
.i64 => .i64_ge_s,
|
||||
.u64 => .i64_ge_u,
|
||||
.f32 => .f32_ge,
|
||||
.f64 => .f64_ge,
|
||||
else => null,
|
||||
}),
|
||||
.gt => @as(?wasm.Opcode, switch (ty) {
|
||||
.i32 => .i32_gt_s,
|
||||
.u32 => .i32_gt_u,
|
||||
.i64 => .i64_gt_s,
|
||||
.u64 => .i64_gt_u,
|
||||
.f32 => .f32_gt,
|
||||
.f64 => .f64_gt,
|
||||
else => null,
|
||||
}),
|
||||
.neq => @as(?wasm.Opcode, switch (ty) {
|
||||
.i32, .u32 => .i32_ne,
|
||||
.i64, .u64 => .i64_ne,
|
||||
.f32 => .f32_ne,
|
||||
.f64 => .f64_ne,
|
||||
else => null,
|
||||
}),
|
||||
};
|
||||
|
||||
const opcode = opcode_maybe orelse
|
||||
return self.fail(inst.base.src, "TODO - Wasm genCmp for type '{s}' and operator '{s}'", .{ ty, @tagName(op) });
|
||||
|
||||
const opcode: wasm.Opcode = buildOpcode(.{
|
||||
.valtype1 = try self.typeToValtype(inst.base.src, inst.lhs.ty),
|
||||
.op = switch (op) {
|
||||
.lt => .lt,
|
||||
.lte => .le,
|
||||
.eq => .eq,
|
||||
.neq => .ne,
|
||||
.gte => .ge,
|
||||
.gt => .gt,
|
||||
},
|
||||
.signedness = inst.lhs.ty.intInfo(self.target).signedness,
|
||||
});
|
||||
try self.code.append(wasm.opcode(opcode));
|
||||
return WValue{ .code_offset = offset };
|
||||
}
|
||||
|
||||
@ -115,6 +115,7 @@ pub const Inst = struct {
|
||||
unreach,
|
||||
mul,
|
||||
mulwrap,
|
||||
div,
|
||||
not,
|
||||
floatcast,
|
||||
intcast,
|
||||
@ -181,6 +182,7 @@ pub const Inst = struct {
|
||||
.subwrap,
|
||||
.mul,
|
||||
.mulwrap,
|
||||
.div,
|
||||
.cmp_lt,
|
||||
.cmp_lte,
|
||||
.cmp_eq,
|
||||
@ -752,6 +754,7 @@ const DumpTzir = struct {
|
||||
.subwrap,
|
||||
.mul,
|
||||
.mulwrap,
|
||||
.div,
|
||||
.cmp_lt,
|
||||
.cmp_lte,
|
||||
.cmp_eq,
|
||||
@ -891,6 +894,7 @@ const DumpTzir = struct {
|
||||
.subwrap,
|
||||
.mul,
|
||||
.mulwrap,
|
||||
.div,
|
||||
.cmp_lt,
|
||||
.cmp_lte,
|
||||
.cmp_eq,
|
||||
|
||||
@ -138,7 +138,7 @@ pub const File = struct {
|
||||
coff: Coff.TextBlock,
|
||||
macho: MachO.TextBlock,
|
||||
c: C.DeclBlock,
|
||||
wasm: void,
|
||||
wasm: Wasm.DeclBlock,
|
||||
spirv: void,
|
||||
};
|
||||
|
||||
@ -147,7 +147,7 @@ pub const File = struct {
|
||||
coff: Coff.SrcFn,
|
||||
macho: MachO.SrcFn,
|
||||
c: C.FnBlock,
|
||||
wasm: ?Wasm.FnData,
|
||||
wasm: Wasm.FnData,
|
||||
spirv: SpirV.FnData,
|
||||
};
|
||||
|
||||
@ -328,7 +328,8 @@ pub const File = struct {
|
||||
.elf => return @fieldParentPtr(Elf, "base", base).allocateDeclIndexes(decl),
|
||||
.macho => return @fieldParentPtr(MachO, "base", base).allocateDeclIndexes(decl),
|
||||
.c => return @fieldParentPtr(C, "base", base).allocateDeclIndexes(decl),
|
||||
.wasm, .spirv => {},
|
||||
.wasm => return @fieldParentPtr(Wasm, "base", base).allocateDeclIndexes(decl),
|
||||
.spirv => {},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -2228,10 +2228,9 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
|
||||
const file_ast_decls = tree.rootDecls();
|
||||
// TODO Look into improving the performance here by adding a token-index-to-line
|
||||
// lookup table. Currently this involves scanning over the source code for newlines.
|
||||
const fn_decl = file_ast_decls[decl.src_index];
|
||||
const fn_decl = decl.src_node;
|
||||
assert(node_tags[fn_decl] == .fn_decl);
|
||||
const block = node_datas[fn_decl].rhs;
|
||||
const lbrace = tree.firstToken(block);
|
||||
@ -2755,10 +2754,9 @@ pub fn updateDeclLineNumber(self: *Elf, module: *Module, decl: *const Module.Dec
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
|
||||
const file_ast_decls = tree.rootDecls();
|
||||
// TODO Look into improving the performance here by adding a token-index-to-line
|
||||
// lookup table. Currently this involves scanning over the source code for newlines.
|
||||
const fn_decl = file_ast_decls[decl.src_index];
|
||||
const fn_decl = decl.src_node;
|
||||
assert(node_tags[fn_decl] == .fn_decl);
|
||||
const block = node_datas[fn_decl].rhs;
|
||||
const lbrace = tree.firstToken(block);
|
||||
|
||||
@ -1256,7 +1256,7 @@ pub fn updateDecl(self: *MachO, module: *Module, decl: *Module.Decl) !void {
|
||||
const inst = code_buffer.items[fixup.offset..][0..4];
|
||||
var parsed = mem.bytesAsValue(meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.PCRelativeAddress,
|
||||
aarch64.Instruction.pc_relative_address,
|
||||
), inst);
|
||||
const this_page = @intCast(i32, this_addr >> 12);
|
||||
const target_page = @intCast(i32, target_addr >> 12);
|
||||
@ -1268,7 +1268,7 @@ pub fn updateDecl(self: *MachO, module: *Module, decl: *Module.Decl) !void {
|
||||
const inst = code_buffer.items[fixup.offset + 4 ..][0..4];
|
||||
var parsed = mem.bytesAsValue(meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.LoadStoreRegister,
|
||||
aarch64.Instruction.load_store_register,
|
||||
), inst);
|
||||
const narrowed = @truncate(u12, target_addr);
|
||||
const offset = try math.divExact(u12, narrowed, 8);
|
||||
|
||||
@ -909,10 +909,9 @@ pub fn updateDeclLineNumber(self: *DebugSymbols, module: *Module, decl: *const M
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
|
||||
const file_ast_decls = tree.rootDecls();
|
||||
// TODO Look into improving the performance here by adding a token-index-to-line
|
||||
// lookup table. Currently this involves scanning over the source code for newlines.
|
||||
const fn_decl = file_ast_decls[decl.src_index];
|
||||
const fn_decl = decl.src_node;
|
||||
assert(node_tags[fn_decl] == .fn_decl);
|
||||
const block = node_datas[fn_decl].rhs;
|
||||
const lbrace = tree.firstToken(block);
|
||||
@ -959,10 +958,9 @@ pub fn initDeclDebugBuffers(
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
|
||||
const file_ast_decls = tree.rootDecls();
|
||||
// TODO Look into improving the performance here by adding a token-index-to-line
|
||||
// lookup table. Currently this involves scanning over the source code for newlines.
|
||||
const fn_decl = file_ast_decls[decl.src_index];
|
||||
const fn_decl = decl.src_node;
|
||||
assert(node_tags[fn_decl] == .fn_decl);
|
||||
const block = node_datas[fn_decl].rhs;
|
||||
const lbrace = tree.firstToken(block);
|
||||
|
||||
@ -1668,7 +1668,7 @@ fn doRelocs(self: *Zld) !void {
|
||||
var parsed = mem.bytesAsValue(
|
||||
meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.UnconditionalBranchImmediate,
|
||||
aarch64.Instruction.unconditional_branch_immediate,
|
||||
),
|
||||
inst,
|
||||
);
|
||||
@ -1688,7 +1688,7 @@ fn doRelocs(self: *Zld) !void {
|
||||
var parsed = mem.bytesAsValue(
|
||||
meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.PCRelativeAddress,
|
||||
aarch64.Instruction.pc_relative_address,
|
||||
),
|
||||
inst,
|
||||
);
|
||||
@ -1706,7 +1706,7 @@ fn doRelocs(self: *Zld) !void {
|
||||
var parsed = mem.bytesAsValue(
|
||||
meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.AddSubtractImmediate,
|
||||
aarch64.Instruction.add_subtract_immediate,
|
||||
),
|
||||
inst,
|
||||
);
|
||||
@ -1719,7 +1719,7 @@ fn doRelocs(self: *Zld) !void {
|
||||
var parsed = mem.bytesAsValue(
|
||||
meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.LoadStoreRegister,
|
||||
aarch64.Instruction.load_store_register,
|
||||
),
|
||||
inst,
|
||||
);
|
||||
@ -1774,7 +1774,7 @@ fn doRelocs(self: *Zld) !void {
|
||||
const curr = mem.bytesAsValue(
|
||||
meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.AddSubtractImmediate,
|
||||
aarch64.Instruction.add_subtract_immediate,
|
||||
),
|
||||
inst,
|
||||
);
|
||||
@ -1783,7 +1783,7 @@ fn doRelocs(self: *Zld) !void {
|
||||
const curr = mem.bytesAsValue(
|
||||
meta.TagPayload(
|
||||
aarch64.Instruction,
|
||||
aarch64.Instruction.LoadStoreRegister,
|
||||
aarch64.Instruction.load_store_register,
|
||||
),
|
||||
inst,
|
||||
);
|
||||
|
||||
@ -16,21 +16,11 @@ const link = @import("../link.zig");
|
||||
const trace = @import("../tracy.zig").trace;
|
||||
const build_options = @import("build_options");
|
||||
const Cache = @import("../Cache.zig");
|
||||
const TypedValue = @import("../TypedValue.zig");
|
||||
|
||||
pub const base_tag = link.File.Tag.wasm;
|
||||
|
||||
pub const FnData = struct {
|
||||
/// Generated code for the type of the function
|
||||
functype: std.ArrayListUnmanaged(u8) = .{},
|
||||
/// Generated code for the body of the function
|
||||
code: std.ArrayListUnmanaged(u8) = .{},
|
||||
/// Locations in the generated code where function indexes must be filled in.
|
||||
/// This must be kept ordered by offset.
|
||||
idx_refs: std.ArrayListUnmanaged(struct { offset: u32, decl: *Module.Decl }) = .{},
|
||||
};
|
||||
|
||||
base: link.File,
|
||||
|
||||
/// List of all function Decls to be written to the output file. The index of
|
||||
/// each Decl in this list at the time of writing the binary is used as the
|
||||
/// function index. In the event where ext_funcs' size is not 0, the index of
|
||||
@ -45,6 +35,77 @@ ext_funcs: std.ArrayListUnmanaged(*Module.Decl) = .{},
|
||||
/// to support existing code.
|
||||
/// TODO: Allow setting this through a flag?
|
||||
host_name: []const u8 = "env",
|
||||
/// The last `DeclBlock` that was initialized will be saved here.
|
||||
last_block: ?*DeclBlock = null,
|
||||
/// Table with offsets, each element represents an offset with the value being
|
||||
/// the offset into the 'data' section where the data lives
|
||||
offset_table: std.ArrayListUnmanaged(u32) = .{},
|
||||
/// List of offset indexes which are free to be used for new decl's.
|
||||
/// Each element's value points to an index into the offset_table.
|
||||
offset_table_free_list: std.ArrayListUnmanaged(u32) = .{},
|
||||
/// List of all `Decl` that are currently alive.
|
||||
/// This is ment for bookkeeping so we can safely cleanup all codegen memory
|
||||
/// when calling `deinit`
|
||||
symbols: std.ArrayListUnmanaged(*Module.Decl) = .{},
|
||||
|
||||
pub const FnData = struct {
|
||||
/// Generated code for the type of the function
|
||||
functype: std.ArrayListUnmanaged(u8),
|
||||
/// Generated code for the body of the function
|
||||
code: std.ArrayListUnmanaged(u8),
|
||||
/// Locations in the generated code where function indexes must be filled in.
|
||||
/// This must be kept ordered by offset.
|
||||
idx_refs: std.ArrayListUnmanaged(struct { offset: u32, decl: *Module.Decl }),
|
||||
|
||||
pub const empty: FnData = .{
|
||||
.functype = .{},
|
||||
.code = .{},
|
||||
.idx_refs = .{},
|
||||
};
|
||||
};
|
||||
|
||||
pub const DeclBlock = struct {
|
||||
/// Determines whether the `DeclBlock` has been initialized for codegen.
|
||||
init: bool,
|
||||
/// Index into the `symbols` list.
|
||||
symbol_index: u32,
|
||||
/// Index into the offset table
|
||||
offset_index: u32,
|
||||
/// The size of the block and how large part of the data section it occupies.
|
||||
/// Will be 0 when the Decl will not live inside the data section and `data` will be undefined.
|
||||
size: u32,
|
||||
/// Points to the previous and next blocks.
|
||||
/// Can be used to find the total size, and used to calculate the `offset` based on the previous block.
|
||||
prev: ?*DeclBlock,
|
||||
next: ?*DeclBlock,
|
||||
/// Pointer to data that will be written to the 'data' section.
|
||||
/// This data either lives in `FnData.code` or is externally managed.
|
||||
/// For data that does not live inside the 'data' section, this field will be undefined. (size == 0).
|
||||
data: [*]const u8,
|
||||
|
||||
pub const empty: DeclBlock = .{
|
||||
.init = false,
|
||||
.symbol_index = 0,
|
||||
.offset_index = 0,
|
||||
.size = 0,
|
||||
.prev = null,
|
||||
.next = null,
|
||||
.data = undefined,
|
||||
};
|
||||
|
||||
/// Unplugs the `DeclBlock` from the chain
|
||||
fn unplug(self: *DeclBlock) void {
|
||||
if (self.prev) |prev| {
|
||||
prev.next = self.next;
|
||||
}
|
||||
|
||||
if (self.next) |next| {
|
||||
next.prev = self.prev;
|
||||
}
|
||||
self.next = null;
|
||||
self.prev = null;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn openPath(allocator: *Allocator, sub_path: []const u8, options: link.Options) !*Wasm {
|
||||
assert(options.object_format == .wasm);
|
||||
@ -52,7 +113,7 @@ pub fn openPath(allocator: *Allocator, sub_path: []const u8, options: link.Optio
|
||||
if (options.use_llvm) return error.LLVM_BackendIsTODO_ForWasm; // TODO
|
||||
if (options.use_lld) return error.LLD_LinkingIsTODO_ForWasm; // TODO
|
||||
|
||||
// TODO: read the file and keep vaild parts instead of truncating
|
||||
// TODO: read the file and keep valid parts instead of truncating
|
||||
const file = try options.emit.?.directory.handle.createFile(sub_path, .{ .truncate = true, .read = true });
|
||||
errdefer file.close();
|
||||
|
||||
@ -80,58 +141,76 @@ pub fn createEmpty(gpa: *Allocator, options: link.Options) !*Wasm {
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Wasm) void {
|
||||
for (self.funcs.items) |decl| {
|
||||
decl.fn_link.wasm.?.functype.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.?.code.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.?.idx_refs.deinit(self.base.allocator);
|
||||
}
|
||||
for (self.ext_funcs.items) |decl| {
|
||||
decl.fn_link.wasm.?.functype.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.?.code.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.?.idx_refs.deinit(self.base.allocator);
|
||||
for (self.symbols.items) |decl| {
|
||||
decl.fn_link.wasm.functype.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.code.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.idx_refs.deinit(self.base.allocator);
|
||||
}
|
||||
|
||||
self.funcs.deinit(self.base.allocator);
|
||||
self.ext_funcs.deinit(self.base.allocator);
|
||||
self.offset_table.deinit(self.base.allocator);
|
||||
self.offset_table_free_list.deinit(self.base.allocator);
|
||||
self.symbols.deinit(self.base.allocator);
|
||||
}
|
||||
|
||||
pub fn allocateDeclIndexes(self: *Wasm, decl: *Module.Decl) !void {
|
||||
if (decl.link.wasm.init) return;
|
||||
|
||||
try self.offset_table.ensureCapacity(self.base.allocator, self.offset_table.items.len + 1);
|
||||
try self.symbols.ensureCapacity(self.base.allocator, self.symbols.items.len + 1);
|
||||
|
||||
const block = &decl.link.wasm;
|
||||
block.init = true;
|
||||
|
||||
block.symbol_index = @intCast(u32, self.symbols.items.len);
|
||||
self.symbols.appendAssumeCapacity(decl);
|
||||
|
||||
if (self.offset_table_free_list.popOrNull()) |index| {
|
||||
block.offset_index = index;
|
||||
} else {
|
||||
block.offset_index = @intCast(u32, self.offset_table.items.len);
|
||||
_ = self.offset_table.addOneAssumeCapacity();
|
||||
}
|
||||
|
||||
self.offset_table.items[block.offset_index] = 0;
|
||||
|
||||
const typed_value = decl.typed_value.most_recent.typed_value;
|
||||
if (typed_value.ty.zigTypeTag() == .Fn) {
|
||||
switch (typed_value.val.tag()) {
|
||||
// dependent on function type, appends it to the correct list
|
||||
.function => try self.funcs.append(self.base.allocator, decl),
|
||||
.extern_fn => try self.ext_funcs.append(self.base.allocator, decl),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Generate code for the Decl, storing it in memory to be later written to
|
||||
// the file on flush().
|
||||
pub fn updateDecl(self: *Wasm, module: *Module, decl: *Module.Decl) !void {
|
||||
std.debug.assert(decl.link.wasm.init); // Must call allocateDeclIndexes()
|
||||
|
||||
const typed_value = decl.typed_value.most_recent.typed_value;
|
||||
if (typed_value.ty.zigTypeTag() != .Fn)
|
||||
return error.TODOImplementNonFnDeclsForWasm;
|
||||
|
||||
if (decl.fn_link.wasm) |*fn_data| {
|
||||
fn_data.functype.items.len = 0;
|
||||
fn_data.code.items.len = 0;
|
||||
fn_data.idx_refs.items.len = 0;
|
||||
} else {
|
||||
decl.fn_link.wasm = .{};
|
||||
// dependent on function type, appends it to the correct list
|
||||
switch (decl.typed_value.most_recent.typed_value.val.tag()) {
|
||||
.function => try self.funcs.append(self.base.allocator, decl),
|
||||
.extern_fn => try self.ext_funcs.append(self.base.allocator, decl),
|
||||
else => return error.TODOImplementNonFnDeclsForWasm,
|
||||
}
|
||||
}
|
||||
const fn_data = &decl.fn_link.wasm.?;
|
||||
|
||||
var managed_functype = fn_data.functype.toManaged(self.base.allocator);
|
||||
var managed_code = fn_data.code.toManaged(self.base.allocator);
|
||||
const fn_data = &decl.fn_link.wasm;
|
||||
fn_data.functype.items.len = 0;
|
||||
fn_data.code.items.len = 0;
|
||||
fn_data.idx_refs.items.len = 0;
|
||||
|
||||
var context = codegen.Context{
|
||||
.gpa = self.base.allocator,
|
||||
.values = .{},
|
||||
.code = managed_code,
|
||||
.func_type_data = managed_functype,
|
||||
.code = fn_data.code.toManaged(self.base.allocator),
|
||||
.func_type_data = fn_data.functype.toManaged(self.base.allocator),
|
||||
.decl = decl,
|
||||
.err_msg = undefined,
|
||||
.locals = .{},
|
||||
.target = self.base.options.target,
|
||||
};
|
||||
defer context.deinit();
|
||||
|
||||
// generate the 'code' section for the function declaration
|
||||
context.gen() catch |err| switch (err) {
|
||||
const result = context.gen(typed_value) catch |err| switch (err) {
|
||||
error.CodegenFail => {
|
||||
decl.analysis = .codegen_failure;
|
||||
try module.failed_decls.put(module.gpa, decl, context.err_msg);
|
||||
@ -140,15 +219,38 @@ pub fn updateDecl(self: *Wasm, module: *Module, decl: *Module.Decl) !void {
|
||||
else => |e| return err,
|
||||
};
|
||||
|
||||
// as locals are patched afterwards, the offsets of funcidx's are off,
|
||||
// here we update them to correct them
|
||||
for (decl.fn_link.wasm.?.idx_refs.items) |*func| {
|
||||
// For each local, add 6 bytes (count + type)
|
||||
func.offset += @intCast(u32, context.locals.items.len * 6);
|
||||
const code: []const u8 = switch (result) {
|
||||
.appended => @as([]const u8, context.code.items),
|
||||
.externally_managed => |payload| payload,
|
||||
};
|
||||
|
||||
fn_data.code = context.code.toUnmanaged();
|
||||
fn_data.functype = context.func_type_data.toUnmanaged();
|
||||
|
||||
const block = &decl.link.wasm;
|
||||
if (typed_value.ty.zigTypeTag() == .Fn) {
|
||||
// as locals are patched afterwards, the offsets of funcidx's are off,
|
||||
// here we update them to correct them
|
||||
for (fn_data.idx_refs.items) |*func| {
|
||||
// For each local, add 6 bytes (count + type)
|
||||
func.offset += @intCast(u32, context.locals.items.len * 6);
|
||||
}
|
||||
} else {
|
||||
block.size = @intCast(u32, code.len);
|
||||
block.data = code.ptr;
|
||||
}
|
||||
|
||||
fn_data.functype = context.func_type_data.toUnmanaged();
|
||||
fn_data.code = context.code.toUnmanaged();
|
||||
// If we're updating an existing decl, unplug it first
|
||||
// to avoid infinite loops due to earlier links
|
||||
block.unplug();
|
||||
|
||||
if (self.last_block) |last| {
|
||||
if (last != block) {
|
||||
last.next = block;
|
||||
block.prev = last;
|
||||
}
|
||||
}
|
||||
self.last_block = block;
|
||||
}
|
||||
|
||||
pub fn updateDeclExports(
|
||||
@ -159,18 +261,34 @@ pub fn updateDeclExports(
|
||||
) !void {}
|
||||
|
||||
pub fn freeDecl(self: *Wasm, decl: *Module.Decl) void {
|
||||
// TODO: remove this assert when non-function Decls are implemented
|
||||
assert(decl.typed_value.most_recent.typed_value.ty.zigTypeTag() == .Fn);
|
||||
const func_idx = self.getFuncidx(decl).?;
|
||||
switch (decl.typed_value.most_recent.typed_value.val.tag()) {
|
||||
.function => _ = self.funcs.swapRemove(func_idx),
|
||||
.extern_fn => _ = self.ext_funcs.swapRemove(func_idx),
|
||||
else => unreachable,
|
||||
if (self.getFuncidx(decl)) |func_idx| {
|
||||
switch (decl.typed_value.most_recent.typed_value.val.tag()) {
|
||||
.function => _ = self.funcs.swapRemove(func_idx),
|
||||
.extern_fn => _ = self.ext_funcs.swapRemove(func_idx),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
decl.fn_link.wasm.?.functype.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.?.code.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.?.idx_refs.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm = null;
|
||||
const block = &decl.link.wasm;
|
||||
|
||||
if (self.last_block == block) {
|
||||
self.last_block = block.prev;
|
||||
}
|
||||
|
||||
block.unplug();
|
||||
|
||||
self.offset_table_free_list.append(self.base.allocator, decl.link.wasm.offset_index) catch {};
|
||||
_ = self.symbols.swapRemove(block.symbol_index);
|
||||
|
||||
// update symbol_index as we swap removed the last symbol into the removed's position
|
||||
if (block.symbol_index < self.symbols.items.len)
|
||||
self.symbols.items[block.symbol_index].link.wasm.symbol_index = block.symbol_index;
|
||||
|
||||
block.init = false;
|
||||
|
||||
decl.fn_link.wasm.functype.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.code.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm.idx_refs.deinit(self.base.allocator);
|
||||
decl.fn_link.wasm = undefined;
|
||||
}
|
||||
|
||||
pub fn flush(self: *Wasm, comp: *Compilation) !void {
|
||||
@ -187,6 +305,25 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void {
|
||||
|
||||
const file = self.base.file.?;
|
||||
const header_size = 5 + 1;
|
||||
// ptr_width in bytes
|
||||
const ptr_width = self.base.options.target.cpu.arch.ptrBitWidth() / 8;
|
||||
// The size of the offset table in bytes
|
||||
// The table contains all decl's with its corresponding offset into
|
||||
// the 'data' section
|
||||
const offset_table_size = @intCast(u32, self.offset_table.items.len * ptr_width);
|
||||
|
||||
// The size of the data, this together with `offset_table_size` amounts to the
|
||||
// total size of the 'data' section
|
||||
var first_decl: ?*DeclBlock = null;
|
||||
const data_size: u32 = if (self.last_block) |last| blk: {
|
||||
var size = last.size;
|
||||
var cur = last;
|
||||
while (cur.prev) |prev| : (cur = prev) {
|
||||
size += prev.size;
|
||||
}
|
||||
first_decl = cur;
|
||||
break :blk size;
|
||||
} else 0;
|
||||
|
||||
// No need to rewrite the magic/version header
|
||||
try file.setEndPos(@sizeOf(@TypeOf(wasm.magic ++ wasm.version)));
|
||||
@ -198,8 +335,8 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void {
|
||||
|
||||
// extern functions are defined in the wasm binary first through the `import`
|
||||
// section, so define their func types first
|
||||
for (self.ext_funcs.items) |decl| try file.writeAll(decl.fn_link.wasm.?.functype.items);
|
||||
for (self.funcs.items) |decl| try file.writeAll(decl.fn_link.wasm.?.functype.items);
|
||||
for (self.ext_funcs.items) |decl| try file.writeAll(decl.fn_link.wasm.functype.items);
|
||||
for (self.funcs.items) |decl| try file.writeAll(decl.fn_link.wasm.functype.items);
|
||||
|
||||
try writeVecSectionHeader(
|
||||
file,
|
||||
@ -256,6 +393,31 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void {
|
||||
);
|
||||
}
|
||||
|
||||
// Memory section
|
||||
if (data_size != 0) {
|
||||
const header_offset = try reserveVecSectionHeader(file);
|
||||
const writer = file.writer();
|
||||
|
||||
try leb.writeULEB128(writer, @as(u32, 0));
|
||||
// Calculate the amount of memory pages are required and write them.
|
||||
// Wasm uses 64kB page sizes. Round up to ensure the data segments fit into the memory
|
||||
try leb.writeULEB128(
|
||||
writer,
|
||||
try std.math.divCeil(
|
||||
u32,
|
||||
offset_table_size + data_size,
|
||||
std.wasm.page_size,
|
||||
),
|
||||
);
|
||||
try writeVecSectionHeader(
|
||||
file,
|
||||
header_offset,
|
||||
.memory,
|
||||
@intCast(u32, (try file.getPos()) - header_offset - header_size),
|
||||
@as(u32, 1), // wasm currently only supports 1 linear memory segment
|
||||
);
|
||||
}
|
||||
|
||||
// Export section
|
||||
if (self.base.options.module) |module| {
|
||||
const header_offset = try reserveVecSectionHeader(file);
|
||||
@ -280,6 +442,16 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void {
|
||||
count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// export memory if size is not 0
|
||||
if (data_size != 0) {
|
||||
try leb.writeULEB128(writer, @intCast(u32, "memory".len));
|
||||
try writer.writeAll("memory");
|
||||
try writer.writeByte(wasm.externalKind(.memory));
|
||||
try leb.writeULEB128(writer, @as(u32, 0)); // only 1 memory 'object' can exist
|
||||
count += 1;
|
||||
}
|
||||
|
||||
try writeVecSectionHeader(
|
||||
file,
|
||||
header_offset,
|
||||
@ -294,7 +466,7 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void {
|
||||
const header_offset = try reserveVecSectionHeader(file);
|
||||
const writer = file.writer();
|
||||
for (self.funcs.items) |decl| {
|
||||
const fn_data = &decl.fn_link.wasm.?;
|
||||
const fn_data = &decl.fn_link.wasm;
|
||||
|
||||
// Write the already generated code to the file, inserting
|
||||
// function indexes where required.
|
||||
@ -319,6 +491,51 @@ pub fn flushModule(self: *Wasm, comp: *Compilation) !void {
|
||||
@intCast(u32, self.funcs.items.len),
|
||||
);
|
||||
}
|
||||
|
||||
// Data section
|
||||
if (data_size != 0) {
|
||||
const header_offset = try reserveVecSectionHeader(file);
|
||||
const writer = file.writer();
|
||||
var len: u32 = 0;
|
||||
// index to memory section (currently, there can only be 1 memory section in wasm)
|
||||
try leb.writeULEB128(writer, @as(u32, 0));
|
||||
|
||||
// offset into data section
|
||||
try writer.writeByte(wasm.opcode(.i32_const));
|
||||
try leb.writeILEB128(writer, @as(i32, 0));
|
||||
try writer.writeByte(wasm.opcode(.end));
|
||||
|
||||
const total_size = offset_table_size + data_size;
|
||||
|
||||
// offset table + data size
|
||||
try leb.writeULEB128(writer, total_size);
|
||||
|
||||
// fill in the offset table and the data segments
|
||||
const file_offset = try file.getPos();
|
||||
var cur = first_decl;
|
||||
var data_offset = offset_table_size;
|
||||
while (cur) |cur_block| : (cur = cur_block.next) {
|
||||
if (cur_block.size == 0) continue;
|
||||
std.debug.assert(cur_block.init);
|
||||
|
||||
const offset = (cur_block.offset_index) * ptr_width;
|
||||
var buf: [4]u8 = undefined;
|
||||
std.mem.writeIntLittle(u32, &buf, data_offset);
|
||||
|
||||
try file.pwriteAll(&buf, file_offset + offset);
|
||||
try file.pwriteAll(cur_block.data[0..cur_block.size], file_offset + data_offset);
|
||||
data_offset += cur_block.size;
|
||||
}
|
||||
|
||||
try file.seekTo(file_offset + data_offset);
|
||||
try writeVecSectionHeader(
|
||||
file,
|
||||
header_offset,
|
||||
.data,
|
||||
@intCast(u32, (file_offset + data_offset) - header_offset - header_size),
|
||||
@intCast(u32, 1), // only 1 data section
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn linkWithLLD(self: *Wasm, comp: *Compilation) !void {
|
||||
|
||||
59
src/main.zig
59
src/main.zig
@ -505,7 +505,6 @@ fn buildOutputType(
|
||||
var emit_bin: EmitBin = .yes_default_path;
|
||||
var emit_asm: Emit = .no;
|
||||
var emit_llvm_ir: Emit = .no;
|
||||
var emit_zir: Emit = .no;
|
||||
var emit_docs: Emit = .no;
|
||||
var emit_analysis: Emit = .no;
|
||||
var target_arch_os_abi: []const u8 = "native";
|
||||
@ -599,15 +598,15 @@ fn buildOutputType(
|
||||
var test_exec_args = std.ArrayList(?[]const u8).init(gpa);
|
||||
defer test_exec_args.deinit();
|
||||
|
||||
const pkg_tree_root = try gpa.create(Package);
|
||||
// This package only exists to clean up the code parsing --pkg-begin and
|
||||
// --pkg-end flags. Use dummy values that are safe for the destroy call.
|
||||
pkg_tree_root.* = .{
|
||||
var pkg_tree_root: Package = .{
|
||||
.root_src_directory = .{ .path = null, .handle = fs.cwd() },
|
||||
.root_src_path = &[0]u8{},
|
||||
.namespace_hash = Package.root_namespace_hash,
|
||||
};
|
||||
defer pkg_tree_root.destroy(gpa);
|
||||
var cur_pkg: *Package = pkg_tree_root;
|
||||
defer freePkgTree(gpa, &pkg_tree_root, false);
|
||||
var cur_pkg: *Package = &pkg_tree_root;
|
||||
|
||||
switch (arg_mode) {
|
||||
.build, .translate_c, .zig_test, .run => {
|
||||
@ -658,8 +657,7 @@ fn buildOutputType(
|
||||
) catch |err| {
|
||||
fatal("Failed to add package at path {s}: {s}", .{ pkg_path, @errorName(err) });
|
||||
};
|
||||
new_cur_pkg.parent = cur_pkg;
|
||||
try cur_pkg.add(gpa, pkg_name, new_cur_pkg);
|
||||
try cur_pkg.addAndAdopt(gpa, pkg_name, new_cur_pkg);
|
||||
cur_pkg = new_cur_pkg;
|
||||
} else if (mem.eql(u8, arg, "--pkg-end")) {
|
||||
cur_pkg = cur_pkg.parent orelse
|
||||
@ -924,12 +922,6 @@ fn buildOutputType(
|
||||
emit_bin = .{ .yes = arg["-femit-bin=".len..] };
|
||||
} else if (mem.eql(u8, arg, "-fno-emit-bin")) {
|
||||
emit_bin = .no;
|
||||
} else if (mem.eql(u8, arg, "-femit-zir")) {
|
||||
emit_zir = .yes_default_path;
|
||||
} else if (mem.startsWith(u8, arg, "-femit-zir=")) {
|
||||
emit_zir = .{ .yes = arg["-femit-zir=".len..] };
|
||||
} else if (mem.eql(u8, arg, "-fno-emit-zir")) {
|
||||
emit_zir = .no;
|
||||
} else if (mem.eql(u8, arg, "-femit-h")) {
|
||||
emit_h = .yes_default_path;
|
||||
} else if (mem.startsWith(u8, arg, "-femit-h=")) {
|
||||
@ -1026,7 +1018,7 @@ fn buildOutputType(
|
||||
.extra_flags = try arena.dupe([]const u8, extra_cflags.items),
|
||||
});
|
||||
},
|
||||
.zig, .zir => {
|
||||
.zig => {
|
||||
if (root_src_file) |other| {
|
||||
fatal("found another zig file '{s}' after root source file '{s}'", .{ arg, other });
|
||||
} else {
|
||||
@ -1087,7 +1079,7 @@ fn buildOutputType(
|
||||
.unknown, .shared_library, .object, .static_library => {
|
||||
try link_objects.append(it.only_arg);
|
||||
},
|
||||
.zig, .zir => {
|
||||
.zig => {
|
||||
if (root_src_file) |other| {
|
||||
fatal("found another zig file '{s}' after root source file '{s}'", .{ it.only_arg, other });
|
||||
} else {
|
||||
@ -1725,13 +1717,6 @@ fn buildOutputType(
|
||||
var emit_docs_resolved = try emit_docs.resolve("docs");
|
||||
defer emit_docs_resolved.deinit();
|
||||
|
||||
switch (emit_zir) {
|
||||
.no => {},
|
||||
.yes_default_path, .yes => {
|
||||
fatal("The -femit-zir implementation has been intentionally deleted so that it can be rewritten as a proper backend.", .{});
|
||||
},
|
||||
}
|
||||
|
||||
const root_pkg: ?*Package = if (root_src_file) |src_path| blk: {
|
||||
if (main_pkg_path) |p| {
|
||||
const rel_src_path = try fs.path.relative(gpa, p, src_path);
|
||||
@ -1747,6 +1732,7 @@ fn buildOutputType(
|
||||
if (root_pkg) |pkg| {
|
||||
pkg.table = pkg_tree_root.table;
|
||||
pkg_tree_root.table = .{};
|
||||
pkg.namespace_hash = pkg_tree_root.namespace_hash;
|
||||
}
|
||||
|
||||
const self_exe_path = try fs.selfExePathAlloc(arena);
|
||||
@ -1815,6 +1801,11 @@ fn buildOutputType(
|
||||
@import("codegen/llvm/bindings.zig").ParseCommandLineOptions(argv.len, &argv);
|
||||
}
|
||||
|
||||
const clang_passthrough_mode = switch (arg_mode) {
|
||||
.cc, .cpp, .translate_c => true,
|
||||
else => false,
|
||||
};
|
||||
|
||||
gimmeMoreOfThoseSweetSweetFileDescriptors();
|
||||
|
||||
const comp = Compilation.create(gpa, .{
|
||||
@ -1886,7 +1877,7 @@ fn buildOutputType(
|
||||
.function_sections = function_sections,
|
||||
.self_exe_path = self_exe_path,
|
||||
.thread_pool = &thread_pool,
|
||||
.clang_passthrough_mode = arg_mode != .build,
|
||||
.clang_passthrough_mode = clang_passthrough_mode,
|
||||
.clang_preprocessor_mode = clang_preprocessor_mode,
|
||||
.version = optional_version,
|
||||
.libc_installation = if (libc_installation) |*lci| lci else null,
|
||||
@ -2101,8 +2092,12 @@ fn updateModule(gpa: *Allocator, comp: *Compilation, hook: AfterUpdateHook) !voi
|
||||
defer errors.deinit(comp.gpa);
|
||||
|
||||
if (errors.list.len != 0) {
|
||||
const ttyconf: std.debug.TTY.Config = switch (comp.color) {
|
||||
.auto, .on => std.debug.detectTTYConfig(),
|
||||
.off => .no_color,
|
||||
};
|
||||
for (errors.list) |full_err_msg| {
|
||||
full_err_msg.renderToStdErr();
|
||||
full_err_msg.renderToStdErr(ttyconf);
|
||||
}
|
||||
const log_text = comp.getCompileLogOutput();
|
||||
if (log_text.len != 0) {
|
||||
@ -2146,6 +2141,18 @@ fn updateModule(gpa: *Allocator, comp: *Compilation, hook: AfterUpdateHook) !voi
|
||||
}
|
||||
}
|
||||
|
||||
fn freePkgTree(gpa: *Allocator, pkg: *Package, free_parent: bool) void {
|
||||
{
|
||||
var it = pkg.table.iterator();
|
||||
while (it.next()) |kv| {
|
||||
freePkgTree(gpa, kv.value, true);
|
||||
}
|
||||
}
|
||||
if (free_parent) {
|
||||
pkg.destroy(gpa);
|
||||
}
|
||||
}
|
||||
|
||||
fn cmdTranslateC(comp: *Compilation, arena: *Allocator, enable_cache: bool) !void {
|
||||
if (!build_options.have_llvm)
|
||||
fatal("cannot translate-c: compiler built without LLVM extensions", .{});
|
||||
@ -2500,6 +2507,7 @@ pub fn cmdBuild(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
|
||||
.handle = try zig_lib_directory.handle.openDir(std_special, .{}),
|
||||
},
|
||||
.root_src_path = "build_runner.zig",
|
||||
.namespace_hash = Package.root_namespace_hash,
|
||||
};
|
||||
defer root_pkg.root_src_directory.handle.close();
|
||||
|
||||
@ -2545,8 +2553,9 @@ pub fn cmdBuild(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
|
||||
var build_pkg: Package = .{
|
||||
.root_src_directory = build_directory,
|
||||
.root_src_path = build_zig_basename,
|
||||
.namespace_hash = undefined,
|
||||
};
|
||||
try root_pkg.table.put(arena, "@build", &build_pkg);
|
||||
try root_pkg.addAndAdopt(arena, "@build", &build_pkg);
|
||||
|
||||
var global_cache_directory: Compilation.Directory = l: {
|
||||
const p = override_global_cache_dir orelse try introspect.resolveGlobalCacheDir(arena);
|
||||
|
||||
@ -35,6 +35,10 @@ pub fn RegisterManager(
|
||||
self.registers.deinit(allocator);
|
||||
}
|
||||
|
||||
fn isTracked(reg: Register) bool {
|
||||
return std.mem.indexOfScalar(Register, callee_preserved_regs, reg) != null;
|
||||
}
|
||||
|
||||
fn markRegUsed(self: *Self, reg: Register) void {
|
||||
if (FreeRegInt == u0) return;
|
||||
const index = reg.allocIndex() orelse return;
|
||||
@ -51,6 +55,13 @@ pub fn RegisterManager(
|
||||
self.free_registers |= @as(FreeRegInt, 1) << shift;
|
||||
}
|
||||
|
||||
pub fn isRegFree(self: Self, reg: Register) bool {
|
||||
if (FreeRegInt == u0) return true;
|
||||
const index = reg.allocIndex() orelse return true;
|
||||
const shift = @intCast(ShiftInt, index);
|
||||
return self.free_registers & @as(FreeRegInt, 1) << shift != 0;
|
||||
}
|
||||
|
||||
/// Returns whether this register was allocated in the course
|
||||
/// of this function
|
||||
pub fn isRegAllocated(self: Self, reg: Register) bool {
|
||||
@ -117,17 +128,61 @@ pub fn RegisterManager(
|
||||
const regs_entry = self.registers.remove(reg).?;
|
||||
const spilled_inst = regs_entry.value;
|
||||
try self.getFunction().spillInstruction(spilled_inst.src, reg, spilled_inst);
|
||||
self.markRegFree(reg);
|
||||
|
||||
break :b reg;
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getRegAssumeFree(self: *Self, reg: Register, inst: *ir.Inst) !void {
|
||||
try self.registers.putNoClobber(self.getFunction().gpa, reg, inst);
|
||||
/// Allocates the specified register with the specified
|
||||
/// instruction. Spills the register if it is currently
|
||||
/// allocated.
|
||||
/// Before calling, must ensureCapacity + 1 on self.registers.
|
||||
pub fn getReg(self: *Self, reg: Register, inst: *ir.Inst) !void {
|
||||
if (!isTracked(reg)) return;
|
||||
|
||||
if (!self.isRegFree(reg)) {
|
||||
// Move the instruction that was previously there to a
|
||||
// stack allocation.
|
||||
const regs_entry = self.registers.getEntry(reg).?;
|
||||
const spilled_inst = regs_entry.value;
|
||||
regs_entry.value = inst;
|
||||
try self.getFunction().spillInstruction(spilled_inst.src, reg, spilled_inst);
|
||||
} else {
|
||||
self.getRegAssumeFree(reg, inst);
|
||||
}
|
||||
}
|
||||
|
||||
/// Spills the register if it is currently allocated.
|
||||
/// Does not track the register.
|
||||
pub fn getRegWithoutTracking(self: *Self, reg: Register) !void {
|
||||
if (!isTracked(reg)) return;
|
||||
|
||||
if (!self.isRegFree(reg)) {
|
||||
// Move the instruction that was previously there to a
|
||||
// stack allocation.
|
||||
const regs_entry = self.registers.remove(reg).?;
|
||||
const spilled_inst = regs_entry.value;
|
||||
try self.getFunction().spillInstruction(spilled_inst.src, reg, spilled_inst);
|
||||
self.markRegFree(reg);
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocates the specified register with the specified
|
||||
/// instruction. Assumes that the register is free and no
|
||||
/// spilling is necessary.
|
||||
/// Before calling, must ensureCapacity + 1 on self.registers.
|
||||
pub fn getRegAssumeFree(self: *Self, reg: Register, inst: *ir.Inst) void {
|
||||
if (!isTracked(reg)) return;
|
||||
|
||||
self.registers.putAssumeCapacityNoClobber(reg, inst);
|
||||
self.markRegUsed(reg);
|
||||
}
|
||||
|
||||
/// Marks the specified register as free
|
||||
pub fn freeReg(self: *Self, reg: Register) void {
|
||||
if (!isTracked(reg)) return;
|
||||
|
||||
_ = self.registers.remove(reg);
|
||||
self.markRegFree(reg);
|
||||
}
|
||||
@ -226,3 +281,35 @@ test "allocReg: spilling" {
|
||||
std.testing.expectEqual(@as(?MockRegister, .r3), try function.register_manager.allocReg(&mock_instruction));
|
||||
std.testing.expectEqualSlices(MockRegister, &[_]MockRegister{.r2}, function.spilled.items);
|
||||
}
|
||||
|
||||
test "getReg" {
|
||||
const allocator = std.testing.allocator;
|
||||
|
||||
var function = MockFunction{
|
||||
.allocator = allocator,
|
||||
};
|
||||
defer function.deinit();
|
||||
|
||||
var mock_instruction = ir.Inst{
|
||||
.tag = .breakpoint,
|
||||
.ty = Type.initTag(.void),
|
||||
.src = .unneeded,
|
||||
};
|
||||
|
||||
std.testing.expect(!function.register_manager.isRegAllocated(.r2));
|
||||
std.testing.expect(!function.register_manager.isRegAllocated(.r3));
|
||||
|
||||
try function.register_manager.registers.ensureCapacity(allocator, function.register_manager.registers.count() + 2);
|
||||
try function.register_manager.getReg(.r3, &mock_instruction);
|
||||
|
||||
std.testing.expect(!function.register_manager.isRegAllocated(.r2));
|
||||
std.testing.expect(function.register_manager.isRegAllocated(.r3));
|
||||
|
||||
// Spill r3
|
||||
try function.register_manager.registers.ensureCapacity(allocator, function.register_manager.registers.count() + 2);
|
||||
try function.register_manager.getReg(.r3, &mock_instruction);
|
||||
|
||||
std.testing.expect(!function.register_manager.isRegAllocated(.r2));
|
||||
std.testing.expect(function.register_manager.isRegAllocated(.r3));
|
||||
std.testing.expectEqualSlices(MockRegister, &[_]MockRegister{.r3}, function.spilled.items);
|
||||
}
|
||||
|
||||
@ -9110,6 +9110,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g) {
|
||||
buf_appendf(contents, "pub const position_independent_executable = %s;\n", bool_to_str(g->have_pie));
|
||||
buf_appendf(contents, "pub const strip_debug_info = %s;\n", bool_to_str(g->strip_debug_symbols));
|
||||
buf_appendf(contents, "pub const code_model = CodeModel.default;\n");
|
||||
buf_appendf(contents, "pub const zig_is_stage2 = false;\n");
|
||||
|
||||
{
|
||||
TargetSubsystem detected_subsystem = detect_subsystem(g);
|
||||
|
||||
63
src/test.zig
63
src/test.zig
@ -122,11 +122,6 @@ pub const TestContext = struct {
|
||||
path: []const u8,
|
||||
};
|
||||
|
||||
pub const Extension = enum {
|
||||
Zig,
|
||||
ZIR,
|
||||
};
|
||||
|
||||
/// A `Case` consists of a list of `Update`. The same `Compilation` is used for each
|
||||
/// update, so each update's source is treated as a single file being
|
||||
/// updated by the test harness and incrementally compiled.
|
||||
@ -141,7 +136,6 @@ pub const TestContext = struct {
|
||||
/// to Executable.
|
||||
output_mode: std.builtin.OutputMode,
|
||||
updates: std.ArrayList(Update),
|
||||
extension: Extension,
|
||||
object_format: ?std.builtin.ObjectFormat = null,
|
||||
emit_h: bool = false,
|
||||
llvm_backend: bool = false,
|
||||
@ -238,14 +232,12 @@ pub const TestContext = struct {
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
extension: Extension,
|
||||
) *Case {
|
||||
ctx.cases.append(Case{
|
||||
.name = name,
|
||||
.target = target,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Exe,
|
||||
.extension = extension,
|
||||
.files = std.ArrayList(File).init(ctx.cases.allocator),
|
||||
}) catch @panic("out of memory");
|
||||
return &ctx.cases.items[ctx.cases.items.len - 1];
|
||||
@ -253,7 +245,7 @@ pub const TestContext = struct {
|
||||
|
||||
/// Adds a test case for Zig input, producing an executable
|
||||
pub fn exe(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case {
|
||||
return ctx.addExe(name, target, .Zig);
|
||||
return ctx.addExe(name, target);
|
||||
}
|
||||
|
||||
/// Adds a test case for ZIR input, producing an executable
|
||||
@ -269,7 +261,6 @@ pub const TestContext = struct {
|
||||
.target = target,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Exe,
|
||||
.extension = .Zig,
|
||||
.object_format = .c,
|
||||
.files = std.ArrayList(File).init(ctx.cases.allocator),
|
||||
}) catch @panic("out of memory");
|
||||
@ -284,7 +275,6 @@ pub const TestContext = struct {
|
||||
.target = target,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Exe,
|
||||
.extension = .Zig,
|
||||
.files = std.ArrayList(File).init(ctx.cases.allocator),
|
||||
.llvm_backend = true,
|
||||
}) catch @panic("out of memory");
|
||||
@ -295,14 +285,12 @@ pub const TestContext = struct {
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
extension: Extension,
|
||||
) *Case {
|
||||
ctx.cases.append(Case{
|
||||
.name = name,
|
||||
.target = target,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Obj,
|
||||
.extension = extension,
|
||||
.files = std.ArrayList(File).init(ctx.cases.allocator),
|
||||
}) catch @panic("out of memory");
|
||||
return &ctx.cases.items[ctx.cases.items.len - 1];
|
||||
@ -310,7 +298,7 @@ pub const TestContext = struct {
|
||||
|
||||
/// Adds a test case for Zig input, producing an object file.
|
||||
pub fn obj(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case {
|
||||
return ctx.addObj(name, target, .Zig);
|
||||
return ctx.addObj(name, target);
|
||||
}
|
||||
|
||||
/// Adds a test case for ZIR input, producing an object file.
|
||||
@ -319,13 +307,12 @@ pub const TestContext = struct {
|
||||
}
|
||||
|
||||
/// Adds a test case for Zig or ZIR input, producing C code.
|
||||
pub fn addC(ctx: *TestContext, name: []const u8, target: CrossTarget, ext: Extension) *Case {
|
||||
pub fn addC(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case {
|
||||
ctx.cases.append(Case{
|
||||
.name = name,
|
||||
.target = target,
|
||||
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
|
||||
.output_mode = .Obj,
|
||||
.extension = ext,
|
||||
.object_format = .c,
|
||||
.files = std.ArrayList(File).init(ctx.cases.allocator),
|
||||
}) catch @panic("out of memory");
|
||||
@ -333,21 +320,20 @@ pub const TestContext = struct {
|
||||
}
|
||||
|
||||
pub fn c(ctx: *TestContext, name: []const u8, target: CrossTarget, src: [:0]const u8, comptime out: [:0]const u8) void {
|
||||
ctx.addC(name, target, .Zig).addCompareObjectFile(src, zig_h ++ out);
|
||||
ctx.addC(name, target).addCompareObjectFile(src, zig_h ++ out);
|
||||
}
|
||||
|
||||
pub fn h(ctx: *TestContext, name: []const u8, target: CrossTarget, src: [:0]const u8, comptime out: [:0]const u8) void {
|
||||
ctx.addC(name, target, .Zig).addHeader(src, zig_h ++ out);
|
||||
ctx.addC(name, target).addHeader(src, zig_h ++ out);
|
||||
}
|
||||
|
||||
pub fn addCompareOutput(
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
extension: Extension,
|
||||
src: [:0]const u8,
|
||||
expected_stdout: []const u8,
|
||||
) void {
|
||||
ctx.addExe(name, .{}, extension).addCompareOutput(src, expected_stdout);
|
||||
ctx.addExe(name, .{}).addCompareOutput(src, expected_stdout);
|
||||
}
|
||||
|
||||
/// Adds a test case that compiles the Zig source given in `src`, executes
|
||||
@ -358,7 +344,7 @@ pub const TestContext = struct {
|
||||
src: [:0]const u8,
|
||||
expected_stdout: []const u8,
|
||||
) void {
|
||||
return ctx.addCompareOutput(name, .Zig, src, expected_stdout);
|
||||
return ctx.addCompareOutput(name, src, expected_stdout);
|
||||
}
|
||||
|
||||
/// Adds a test case that compiles the ZIR source given in `src`, executes
|
||||
@ -376,11 +362,10 @@ pub const TestContext = struct {
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
extension: Extension,
|
||||
src: [:0]const u8,
|
||||
result: [:0]const u8,
|
||||
) void {
|
||||
ctx.addObj(name, target, extension).addTransform(src, result);
|
||||
ctx.addObj(name, target).addTransform(src, result);
|
||||
}
|
||||
|
||||
/// Adds a test case that compiles the Zig given in `src` to ZIR and tests
|
||||
@ -392,7 +377,7 @@ pub const TestContext = struct {
|
||||
src: [:0]const u8,
|
||||
result: [:0]const u8,
|
||||
) void {
|
||||
ctx.addTransform(name, target, .Zig, src, result);
|
||||
ctx.addTransform(name, target, src, result);
|
||||
}
|
||||
|
||||
/// Adds a test case that cleans up the ZIR source given in `src`, and
|
||||
@ -411,11 +396,10 @@ pub const TestContext = struct {
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
extension: Extension,
|
||||
src: [:0]const u8,
|
||||
expected_errors: []const []const u8,
|
||||
) void {
|
||||
ctx.addObj(name, target, extension).addError(src, expected_errors);
|
||||
ctx.addObj(name, target).addError(src, expected_errors);
|
||||
}
|
||||
|
||||
/// Adds a test case that ensures that the Zig given in `src` fails to
|
||||
@ -428,7 +412,7 @@ pub const TestContext = struct {
|
||||
src: [:0]const u8,
|
||||
expected_errors: []const []const u8,
|
||||
) void {
|
||||
ctx.addError(name, target, .Zig, src, expected_errors);
|
||||
ctx.addError(name, target, src, expected_errors);
|
||||
}
|
||||
|
||||
/// Adds a test case that ensures that the ZIR given in `src` fails to
|
||||
@ -448,10 +432,9 @@ pub const TestContext = struct {
|
||||
ctx: *TestContext,
|
||||
name: []const u8,
|
||||
target: CrossTarget,
|
||||
extension: Extension,
|
||||
src: [:0]const u8,
|
||||
) void {
|
||||
ctx.addObj(name, target, extension).compiles(src);
|
||||
ctx.addObj(name, target).compiles(src);
|
||||
}
|
||||
|
||||
/// Adds a test case that asserts that the Zig given in `src` compiles
|
||||
@ -462,7 +445,7 @@ pub const TestContext = struct {
|
||||
target: CrossTarget,
|
||||
src: [:0]const u8,
|
||||
) void {
|
||||
ctx.addCompiles(name, target, .Zig, src);
|
||||
ctx.addCompiles(name, target, src);
|
||||
}
|
||||
|
||||
/// Adds a test case that asserts that the ZIR given in `src` compiles
|
||||
@ -489,7 +472,7 @@ pub const TestContext = struct {
|
||||
expected_errors: []const []const u8,
|
||||
fixed_src: [:0]const u8,
|
||||
) void {
|
||||
var case = ctx.addObj(name, target, .Zig);
|
||||
var case = ctx.addObj(name, target);
|
||||
case.addError(src, expected_errors);
|
||||
case.compiles(fixed_src);
|
||||
}
|
||||
@ -614,15 +597,14 @@ pub const TestContext = struct {
|
||||
.path = try std.fs.path.join(arena, &[_][]const u8{ tmp_dir_path, "zig-cache" }),
|
||||
};
|
||||
|
||||
const tmp_src_path = switch (case.extension) {
|
||||
.Zig => "test_case.zig",
|
||||
.ZIR => "test_case.zir",
|
||||
};
|
||||
const tmp_src_path = "test_case.zig";
|
||||
|
||||
var root_pkg: Package = .{
|
||||
.root_src_directory = .{ .path = tmp_dir_path, .handle = tmp.dir },
|
||||
.root_src_path = tmp_src_path,
|
||||
.namespace_hash = Package.root_namespace_hash,
|
||||
};
|
||||
defer root_pkg.table.deinit(allocator);
|
||||
|
||||
const bin_name = try std.zig.binNameAlloc(arena, .{
|
||||
.root_name = "test_case",
|
||||
@ -639,13 +621,10 @@ pub const TestContext = struct {
|
||||
.directory = emit_directory,
|
||||
.basename = bin_name,
|
||||
};
|
||||
const emit_h: ?Compilation.EmitLoc = if (case.emit_h)
|
||||
.{
|
||||
.directory = emit_directory,
|
||||
.basename = "test_case.h",
|
||||
}
|
||||
else
|
||||
null;
|
||||
const emit_h: ?Compilation.EmitLoc = if (case.emit_h) .{
|
||||
.directory = emit_directory,
|
||||
.basename = "test_case.h",
|
||||
} else null;
|
||||
const comp = try Compilation.create(allocator, .{
|
||||
.local_cache_directory = zig_cache_directory,
|
||||
.global_cache_directory = global_cache_directory,
|
||||
|
||||
@ -1123,6 +1123,16 @@ fn transStmt(
|
||||
const gen_sel = @ptrCast(*const clang.GenericSelectionExpr, stmt);
|
||||
return transExpr(c, scope, gen_sel.getResultExpr(), result_used);
|
||||
},
|
||||
.ConvertVectorExprClass => {
|
||||
const conv_vec = @ptrCast(*const clang.ConvertVectorExpr, stmt);
|
||||
const conv_vec_node = try transConvertVectorExpr(c, scope, stmt.getBeginLoc(), conv_vec);
|
||||
return maybeSuppressResult(c, scope, result_used, conv_vec_node);
|
||||
},
|
||||
.ShuffleVectorExprClass => {
|
||||
const shuffle_vec_expr = @ptrCast(*const clang.ShuffleVectorExpr, stmt);
|
||||
const shuffle_vec_node = try transShuffleVectorExpr(c, scope, shuffle_vec_expr);
|
||||
return maybeSuppressResult(c, scope, result_used, shuffle_vec_node);
|
||||
},
|
||||
// When adding new cases here, see comment for maybeBlockify()
|
||||
else => {
|
||||
return fail(c, error.UnsupportedTranslation, stmt.getBeginLoc(), "TODO implement translation of stmt class {s}", .{@tagName(sc)});
|
||||
@ -1130,6 +1140,128 @@ fn transStmt(
|
||||
}
|
||||
}
|
||||
|
||||
/// See https://clang.llvm.org/docs/LanguageExtensions.html#langext-builtin-convertvector
|
||||
fn transConvertVectorExpr(
|
||||
c: *Context,
|
||||
scope: *Scope,
|
||||
source_loc: clang.SourceLocation,
|
||||
expr: *const clang.ConvertVectorExpr,
|
||||
) TransError!Node {
|
||||
const base_stmt = @ptrCast(*const clang.Stmt, expr);
|
||||
|
||||
var block_scope = try Scope.Block.init(c, scope, true);
|
||||
defer block_scope.deinit();
|
||||
|
||||
const src_expr = expr.getSrcExpr();
|
||||
const src_type = qualTypeCanon(src_expr.getType());
|
||||
const src_vector_ty = @ptrCast(*const clang.VectorType, src_type);
|
||||
const src_element_qt = src_vector_ty.getElementType();
|
||||
const src_element_type_node = try transQualType(c, &block_scope.base, src_element_qt, base_stmt.getBeginLoc());
|
||||
|
||||
const src_expr_node = try transExpr(c, &block_scope.base, src_expr, .used);
|
||||
|
||||
const dst_qt = expr.getTypeSourceInfo_getType();
|
||||
const dst_type_node = try transQualType(c, &block_scope.base, dst_qt, base_stmt.getBeginLoc());
|
||||
const dst_vector_ty = @ptrCast(*const clang.VectorType, qualTypeCanon(dst_qt));
|
||||
const num_elements = dst_vector_ty.getNumElements();
|
||||
const dst_element_qt = dst_vector_ty.getElementType();
|
||||
|
||||
// workaround for https://github.com/ziglang/zig/issues/8322
|
||||
// we store the casted results into temp variables and use those
|
||||
// to initialize the vector. Eventually we can just directly
|
||||
// construct the init_list from casted source members
|
||||
var i: usize = 0;
|
||||
while (i < num_elements) : (i += 1) {
|
||||
const mangled_name = try block_scope.makeMangledName(c, "tmp");
|
||||
const value = try Tag.array_access.create(c.arena, .{
|
||||
.lhs = src_expr_node,
|
||||
.rhs = try transCreateNodeNumber(c, i, .int),
|
||||
});
|
||||
const tmp_decl_node = try Tag.var_simple.create(c.arena, .{
|
||||
.name = mangled_name,
|
||||
.init = try transCCast(c, &block_scope.base, base_stmt.getBeginLoc(), dst_element_qt, src_element_qt, value),
|
||||
});
|
||||
try block_scope.statements.append(tmp_decl_node);
|
||||
}
|
||||
|
||||
const init_list = try c.arena.alloc(Node, num_elements);
|
||||
for (init_list) |*init, init_index| {
|
||||
const tmp_decl = block_scope.statements.items[init_index];
|
||||
const name = tmp_decl.castTag(.var_simple).?.data.name;
|
||||
init.* = try Tag.identifier.create(c.arena, name);
|
||||
}
|
||||
|
||||
const vec_init = try Tag.array_init.create(c.arena, .{
|
||||
.cond = dst_type_node,
|
||||
.cases = init_list,
|
||||
});
|
||||
|
||||
const break_node = try Tag.break_val.create(c.arena, .{
|
||||
.label = block_scope.label,
|
||||
.val = vec_init,
|
||||
});
|
||||
try block_scope.statements.append(break_node);
|
||||
return block_scope.complete(c);
|
||||
}
|
||||
|
||||
fn makeShuffleMask(c: *Context, scope: *Scope, expr: *const clang.ShuffleVectorExpr, vector_len: Node) TransError!Node {
|
||||
const num_subexprs = expr.getNumSubExprs();
|
||||
assert(num_subexprs >= 3); // two source vectors + at least 1 index expression
|
||||
const mask_len = num_subexprs - 2;
|
||||
|
||||
const mask_type = try Tag.std_meta_vector.create(c.arena, .{
|
||||
.lhs = try transCreateNodeNumber(c, mask_len, .int),
|
||||
.rhs = try Tag.type.create(c.arena, "i32"),
|
||||
});
|
||||
|
||||
const init_list = try c.arena.alloc(Node, mask_len);
|
||||
|
||||
for (init_list) |*init, i| {
|
||||
const index_expr = try transExprCoercing(c, scope, expr.getExpr(@intCast(c_uint, i + 2)), .used);
|
||||
const converted_index = try Tag.std_meta_shuffle_vector_index.create(c.arena, .{ .lhs = index_expr, .rhs = vector_len });
|
||||
init.* = converted_index;
|
||||
}
|
||||
|
||||
const mask_init = try Tag.array_init.create(c.arena, .{
|
||||
.cond = mask_type,
|
||||
.cases = init_list,
|
||||
});
|
||||
return Tag.@"comptime".create(c.arena, mask_init);
|
||||
}
|
||||
|
||||
/// @typeInfo(@TypeOf(vec_node)).Vector.<field>
|
||||
fn vectorTypeInfo(arena: *mem.Allocator, vec_node: Node, field: []const u8) TransError!Node {
|
||||
const typeof_call = try Tag.typeof.create(arena, vec_node);
|
||||
const typeinfo_call = try Tag.typeinfo.create(arena, typeof_call);
|
||||
const vector_type_info = try Tag.field_access.create(arena, .{ .lhs = typeinfo_call, .field_name = "Vector" });
|
||||
return Tag.field_access.create(arena, .{ .lhs = vector_type_info, .field_name = field });
|
||||
}
|
||||
|
||||
fn transShuffleVectorExpr(
|
||||
c: *Context,
|
||||
scope: *Scope,
|
||||
expr: *const clang.ShuffleVectorExpr,
|
||||
) TransError!Node {
|
||||
const base_expr = @ptrCast(*const clang.Expr, expr);
|
||||
const num_subexprs = expr.getNumSubExprs();
|
||||
if (num_subexprs < 3) return fail(c, error.UnsupportedTranslation, base_expr.getBeginLoc(), "ShuffleVector needs at least 1 index", .{});
|
||||
|
||||
const a = try transExpr(c, scope, expr.getExpr(0), .used);
|
||||
const b = try transExpr(c, scope, expr.getExpr(1), .used);
|
||||
|
||||
// clang requires first two arguments to __builtin_shufflevector to be same type
|
||||
const vector_child_type = try vectorTypeInfo(c.arena, a, "child");
|
||||
const vector_len = try vectorTypeInfo(c.arena, a, "len");
|
||||
const shuffle_mask = try makeShuffleMask(c, scope, expr, vector_len);
|
||||
|
||||
return Tag.shuffle.create(c.arena, .{
|
||||
.element_type = vector_child_type,
|
||||
.a = a,
|
||||
.b = b,
|
||||
.mask_vector = shuffle_mask,
|
||||
});
|
||||
}
|
||||
|
||||
/// Translate a "simple" offsetof expression containing exactly one component,
|
||||
/// when that component is of kind .Field - e.g. offsetof(mytype, myfield)
|
||||
fn transSimpleOffsetOfExpr(
|
||||
@ -1935,6 +2067,10 @@ fn cIsEnum(qt: clang.QualType) bool {
|
||||
return qt.getCanonicalType().getTypeClass() == .Enum;
|
||||
}
|
||||
|
||||
fn cIsVector(qt: clang.QualType) bool {
|
||||
return qt.getCanonicalType().getTypeClass() == .Vector;
|
||||
}
|
||||
|
||||
/// Get the underlying int type of an enum. The C compiler chooses a signed int
|
||||
/// type that is large enough to hold all of the enum's values. It is not required
|
||||
/// to be the smallest possible type that can hold all the values.
|
||||
@ -1991,6 +2127,11 @@ fn transCCast(
|
||||
// @bitCast(dest_type, intermediate_value)
|
||||
return Tag.bit_cast.create(c.arena, .{ .lhs = dst_node, .rhs = src_int_expr });
|
||||
}
|
||||
if (cIsVector(src_type) or cIsVector(dst_type)) {
|
||||
// C cast where at least 1 operand is a vector requires them to be same size
|
||||
// @bitCast(dest_type, val)
|
||||
return Tag.bit_cast.create(c.arena, .{ .lhs = dst_node, .rhs = expr });
|
||||
}
|
||||
if (cIsInteger(dst_type) and qualTypeIsPtr(src_type)) {
|
||||
// @intCast(dest_type, @ptrToInt(val))
|
||||
const ptr_to_int = try Tag.ptr_to_int.create(c.arena, expr);
|
||||
@ -2209,6 +2350,63 @@ fn transInitListExprArray(
|
||||
}
|
||||
}
|
||||
|
||||
fn transInitListExprVector(
|
||||
c: *Context,
|
||||
scope: *Scope,
|
||||
loc: clang.SourceLocation,
|
||||
expr: *const clang.InitListExpr,
|
||||
ty: *const clang.Type,
|
||||
) TransError!Node {
|
||||
|
||||
const qt = getExprQualType(c, @ptrCast(*const clang.Expr, expr));
|
||||
const vector_type = try transQualType(c, scope, qt, loc);
|
||||
const init_count = expr.getNumInits();
|
||||
|
||||
if (init_count == 0) {
|
||||
return Tag.container_init.create(c.arena, .{
|
||||
.lhs = vector_type,
|
||||
.inits = try c.arena.alloc(ast.Payload.ContainerInit.Initializer, 0),
|
||||
});
|
||||
}
|
||||
|
||||
var block_scope = try Scope.Block.init(c, scope, true);
|
||||
defer block_scope.deinit();
|
||||
|
||||
// workaround for https://github.com/ziglang/zig/issues/8322
|
||||
// we store the initializers in temp variables and use those
|
||||
// to initialize the vector. Eventually we can just directly
|
||||
// construct the init_list from casted source members
|
||||
var i: usize = 0;
|
||||
while (i < init_count) : (i += 1) {
|
||||
const mangled_name = try block_scope.makeMangledName(c, "tmp");
|
||||
const init_expr = expr.getInit(@intCast(c_uint, i));
|
||||
const tmp_decl_node = try Tag.var_simple.create(c.arena, .{
|
||||
.name = mangled_name,
|
||||
.init = try transExpr(c, &block_scope.base, init_expr, .used),
|
||||
});
|
||||
try block_scope.statements.append(tmp_decl_node);
|
||||
}
|
||||
|
||||
const init_list = try c.arena.alloc(Node, init_count);
|
||||
for (init_list) |*init, init_index| {
|
||||
const tmp_decl = block_scope.statements.items[init_index];
|
||||
const name = tmp_decl.castTag(.var_simple).?.data.name;
|
||||
init.* = try Tag.identifier.create(c.arena, name);
|
||||
}
|
||||
|
||||
const array_init = try Tag.array_init.create(c.arena, .{
|
||||
.cond = vector_type,
|
||||
.cases = init_list,
|
||||
});
|
||||
const break_node = try Tag.break_val.create(c.arena, .{
|
||||
.label = block_scope.label,
|
||||
.val = array_init,
|
||||
});
|
||||
try block_scope.statements.append(break_node);
|
||||
|
||||
return block_scope.complete(c);
|
||||
}
|
||||
|
||||
fn transInitListExpr(
|
||||
c: *Context,
|
||||
scope: *Scope,
|
||||
@ -2235,6 +2433,14 @@ fn transInitListExpr(
|
||||
expr,
|
||||
qual_type,
|
||||
));
|
||||
} else if (qual_type.isVectorType()) {
|
||||
return maybeSuppressResult(c, scope, used, try transInitListExprVector(
|
||||
c,
|
||||
scope,
|
||||
source_loc,
|
||||
expr,
|
||||
qual_type,
|
||||
));
|
||||
} else {
|
||||
const type_name = c.str(qual_type.getTypeClassName());
|
||||
return fail(c, error.UnsupportedType, source_loc, "unsupported initlist type: '{s}'", .{type_name});
|
||||
@ -4085,6 +4291,15 @@ fn transType(c: *Context, scope: *Scope, ty: *const clang.Type, source_loc: clan
|
||||
};
|
||||
return Tag.typeof.create(c.arena, underlying_expr);
|
||||
},
|
||||
.Vector => {
|
||||
const vector_ty = @ptrCast(*const clang.VectorType, ty);
|
||||
const num_elements = vector_ty.getNumElements();
|
||||
const element_qt = vector_ty.getElementType();
|
||||
return Tag.std_meta_vector.create(c.arena, .{
|
||||
.lhs = try transCreateNodeNumber(c, num_elements, .int),
|
||||
.rhs = try transQualType(c, scope, element_qt, source_loc),
|
||||
});
|
||||
},
|
||||
else => {
|
||||
const type_name = c.str(ty.getTypeClassName());
|
||||
return fail(c, error.UnsupportedType, source_loc, "unsupported type: '{s}'", .{type_name});
|
||||
@ -5211,21 +5426,26 @@ fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
|
||||
}
|
||||
},
|
||||
.LParen => {
|
||||
var args = std.ArrayList(Node).init(c.gpa);
|
||||
defer args.deinit();
|
||||
while (true) {
|
||||
const arg = try parseCCondExpr(c, m, scope);
|
||||
try args.append(arg);
|
||||
switch (m.next().?) {
|
||||
.Comma => {},
|
||||
.RParen => break,
|
||||
else => {
|
||||
try m.fail(c, "unable to translate C expr: expected ',' or ')'", .{});
|
||||
return error.ParseError;
|
||||
},
|
||||
if (m.peek().? == .RParen) {
|
||||
m.i += 1;
|
||||
node = try Tag.call.create(c.arena, .{ .lhs = node, .args = &[0]Node{} });
|
||||
} else {
|
||||
var args = std.ArrayList(Node).init(c.gpa);
|
||||
defer args.deinit();
|
||||
while (true) {
|
||||
const arg = try parseCCondExpr(c, m, scope);
|
||||
try args.append(arg);
|
||||
switch (m.next().?) {
|
||||
.Comma => {},
|
||||
.RParen => break,
|
||||
else => {
|
||||
try m.fail(c, "unable to translate C expr: expected ',' or ')'", .{});
|
||||
return error.ParseError;
|
||||
},
|
||||
}
|
||||
}
|
||||
node = try Tag.call.create(c.arena, .{ .lhs = node, .args = try c.arena.dupe(Node, args.items) });
|
||||
}
|
||||
node = try Tag.call.create(c.arena, .{ .lhs = node, .args = try c.arena.dupe(Node, args.items) });
|
||||
},
|
||||
.LBrace => {
|
||||
var init_vals = std.ArrayList(Node).init(c.gpa);
|
||||
|
||||
@ -66,6 +66,7 @@ pub const Node = extern union {
|
||||
@"enum",
|
||||
@"struct",
|
||||
@"union",
|
||||
@"comptime",
|
||||
array_init,
|
||||
tuple,
|
||||
container_init,
|
||||
@ -154,6 +155,8 @@ pub const Node = extern union {
|
||||
div_exact,
|
||||
/// @byteOffsetOf(lhs, rhs)
|
||||
byte_offset_of,
|
||||
/// @shuffle(type, a, b, mask)
|
||||
shuffle,
|
||||
|
||||
negate,
|
||||
negate_wrap,
|
||||
@ -172,6 +175,7 @@ pub const Node = extern union {
|
||||
sizeof,
|
||||
alignof,
|
||||
typeof,
|
||||
typeinfo,
|
||||
type,
|
||||
|
||||
optional_type,
|
||||
@ -182,6 +186,10 @@ pub const Node = extern union {
|
||||
|
||||
/// @import("std").meta.sizeof(operand)
|
||||
std_meta_sizeof,
|
||||
/// @import("std").meta.shuffleVectorIndex(lhs, rhs)
|
||||
std_meta_shuffle_vector_index,
|
||||
/// @import("std").meta.Vector(lhs, rhs)
|
||||
std_meta_vector,
|
||||
/// @import("std").mem.zeroes(operand)
|
||||
std_mem_zeroes,
|
||||
/// @import("std").mem.zeroInit(lhs, rhs)
|
||||
@ -233,6 +241,7 @@ pub const Node = extern union {
|
||||
|
||||
.std_mem_zeroes,
|
||||
.@"return",
|
||||
.@"comptime",
|
||||
.discard,
|
||||
.std_math_Log2Int,
|
||||
.negate,
|
||||
@ -255,6 +264,7 @@ pub const Node = extern union {
|
||||
.sizeof,
|
||||
.alignof,
|
||||
.typeof,
|
||||
.typeinfo,
|
||||
=> Payload.UnOp,
|
||||
|
||||
.add,
|
||||
@ -308,6 +318,8 @@ pub const Node = extern union {
|
||||
.align_cast,
|
||||
.array_access,
|
||||
.std_mem_zeroinit,
|
||||
.std_meta_shuffle_vector_index,
|
||||
.std_meta_vector,
|
||||
.ptr_cast,
|
||||
.div_exact,
|
||||
.byte_offset_of,
|
||||
@ -346,6 +358,7 @@ pub const Node = extern union {
|
||||
.pub_inline_fn => Payload.PubInlineFn,
|
||||
.field_access => Payload.FieldAccess,
|
||||
.string_slice => Payload.StringSlice,
|
||||
.shuffle => Payload.Shuffle,
|
||||
};
|
||||
}
|
||||
|
||||
@ -678,6 +691,16 @@ pub const Payload = struct {
|
||||
end: usize,
|
||||
},
|
||||
};
|
||||
|
||||
pub const Shuffle = struct {
|
||||
base: Payload,
|
||||
data: struct {
|
||||
element_type: Node,
|
||||
a: Node,
|
||||
b: Node,
|
||||
mask_vector: Node,
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
/// Converts the nodes into a Zig ast.
|
||||
@ -868,6 +891,16 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
|
||||
const import_node = try renderStdImport(c, "mem", "zeroInit");
|
||||
return renderCall(c, import_node, &.{ payload.lhs, payload.rhs });
|
||||
},
|
||||
.std_meta_shuffle_vector_index => {
|
||||
const payload = node.castTag(.std_meta_shuffle_vector_index).?.data;
|
||||
const import_node = try renderStdImport(c, "meta", "shuffleVectorIndex");
|
||||
return renderCall(c, import_node, &.{ payload.lhs, payload.rhs });
|
||||
},
|
||||
.std_meta_vector => {
|
||||
const payload = node.castTag(.std_meta_vector).?.data;
|
||||
const import_node = try renderStdImport(c, "meta", "Vector");
|
||||
return renderCall(c, import_node, &.{ payload.lhs, payload.rhs });
|
||||
},
|
||||
.call => {
|
||||
const payload = node.castTag(.call).?.data;
|
||||
const lhs = try renderNode(c, payload.lhs);
|
||||
@ -964,6 +997,17 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
|
||||
},
|
||||
});
|
||||
},
|
||||
.@"comptime" => {
|
||||
const payload = node.castTag(.@"comptime").?.data;
|
||||
return c.addNode(.{
|
||||
.tag = .@"comptime",
|
||||
.main_token = try c.addToken(.keyword_comptime, "comptime"),
|
||||
.data = .{
|
||||
.lhs = try renderNode(c, payload),
|
||||
.rhs = undefined,
|
||||
},
|
||||
});
|
||||
},
|
||||
.type => {
|
||||
const payload = node.castTag(.type).?.data;
|
||||
return c.addNode(.{
|
||||
@ -1217,6 +1261,15 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
|
||||
const payload = node.castTag(.sizeof).?.data;
|
||||
return renderBuiltinCall(c, "@sizeOf", &.{payload});
|
||||
},
|
||||
.shuffle => {
|
||||
const payload = node.castTag(.shuffle).?.data;
|
||||
return renderBuiltinCall(c, "@shuffle", &.{
|
||||
payload.element_type,
|
||||
payload.a,
|
||||
payload.b,
|
||||
payload.mask_vector,
|
||||
});
|
||||
},
|
||||
.alignof => {
|
||||
const payload = node.castTag(.alignof).?.data;
|
||||
return renderBuiltinCall(c, "@alignOf", &.{payload});
|
||||
@ -1225,6 +1278,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
|
||||
const payload = node.castTag(.typeof).?.data;
|
||||
return renderBuiltinCall(c, "@TypeOf", &.{payload});
|
||||
},
|
||||
.typeinfo => {
|
||||
const payload = node.castTag(.typeinfo).?.data;
|
||||
return renderBuiltinCall(c, "@typeInfo", &.{payload});
|
||||
},
|
||||
.negate => return renderPrefixOp(c, node, .negation, .minus, "-"),
|
||||
.negate_wrap => return renderPrefixOp(c, node, .negation_wrap, .minus_percent, "-%"),
|
||||
.bit_not => return renderPrefixOp(c, node, .bit_not, .tilde, "~"),
|
||||
@ -2085,9 +2142,12 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex {
|
||||
.sizeof,
|
||||
.alignof,
|
||||
.typeof,
|
||||
.typeinfo,
|
||||
.std_meta_sizeof,
|
||||
.std_meta_cast,
|
||||
.std_meta_promoteIntLiteral,
|
||||
.std_meta_vector,
|
||||
.std_meta_shuffle_vector_index,
|
||||
.std_mem_zeroinit,
|
||||
.integer_literal,
|
||||
.float_literal,
|
||||
@ -2118,6 +2178,7 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex {
|
||||
.bool_to_int,
|
||||
.div_exact,
|
||||
.byte_offset_of,
|
||||
.shuffle,
|
||||
=> {
|
||||
// no grouping needed
|
||||
return renderNode(c, node);
|
||||
@ -2185,6 +2246,7 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex {
|
||||
.discard,
|
||||
.@"continue",
|
||||
.@"return",
|
||||
.@"comptime",
|
||||
.usingnamespace_builtins,
|
||||
.while_true,
|
||||
.if_not_break,
|
||||
@ -2327,6 +2389,8 @@ fn renderBuiltinCall(c: *Context, builtin: []const u8, args: []const Node) !Node
|
||||
_ = try c.addToken(.l_paren, "(");
|
||||
var arg_1: NodeIndex = 0;
|
||||
var arg_2: NodeIndex = 0;
|
||||
var arg_3: NodeIndex = 0;
|
||||
var arg_4: NodeIndex = 0;
|
||||
switch (args.len) {
|
||||
0 => {},
|
||||
1 => {
|
||||
@ -2337,18 +2401,41 @@ fn renderBuiltinCall(c: *Context, builtin: []const u8, args: []const Node) !Node
|
||||
_ = try c.addToken(.comma, ",");
|
||||
arg_2 = try renderNode(c, args[1]);
|
||||
},
|
||||
4 => {
|
||||
arg_1 = try renderNode(c, args[0]);
|
||||
_ = try c.addToken(.comma, ",");
|
||||
arg_2 = try renderNode(c, args[1]);
|
||||
_ = try c.addToken(.comma, ",");
|
||||
arg_3 = try renderNode(c, args[2]);
|
||||
_ = try c.addToken(.comma, ",");
|
||||
arg_4 = try renderNode(c, args[3]);
|
||||
},
|
||||
else => unreachable, // expand this function as needed.
|
||||
}
|
||||
|
||||
_ = try c.addToken(.r_paren, ")");
|
||||
return c.addNode(.{
|
||||
.tag = .builtin_call_two,
|
||||
.main_token = builtin_tok,
|
||||
.data = .{
|
||||
.lhs = arg_1,
|
||||
.rhs = arg_2,
|
||||
},
|
||||
});
|
||||
if (args.len <= 2) {
|
||||
return c.addNode(.{
|
||||
.tag = .builtin_call_two,
|
||||
.main_token = builtin_tok,
|
||||
.data = .{
|
||||
.lhs = arg_1,
|
||||
.rhs = arg_2,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
std.debug.assert(args.len == 4);
|
||||
|
||||
const params = try c.listToSpan(&.{ arg_1, arg_2, arg_3, arg_4 });
|
||||
return c.addNode(.{
|
||||
.tag = .builtin_call,
|
||||
.main_token = builtin_tok,
|
||||
.data = .{
|
||||
.lhs = params.start,
|
||||
.rhs = params.end,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn renderVar(c: *Context, node: Node) !NodeIndex {
|
||||
|
||||
1945
src/type.zig
1945
src/type.zig
File diff suppressed because it is too large
Load Diff
837
src/value.zig
837
src/value.zig
File diff suppressed because it is too large
Load Diff
@ -2060,6 +2060,11 @@ bool ZigClangType_isRecordType(const ZigClangType *self) {
|
||||
return casted->isRecordType();
|
||||
}
|
||||
|
||||
bool ZigClangType_isVectorType(const ZigClangType *self) {
|
||||
auto casted = reinterpret_cast<const clang::Type *>(self);
|
||||
return casted->isVectorType();
|
||||
}
|
||||
|
||||
bool ZigClangType_isIncompleteOrZeroLengthArrayType(const ZigClangQualType *self,
|
||||
const struct ZigClangASTContext *ctx)
|
||||
{
|
||||
@ -2752,6 +2757,16 @@ struct ZigClangQualType ZigClangBinaryOperator_getType(const struct ZigClangBina
|
||||
return bitcast(casted->getType());
|
||||
}
|
||||
|
||||
const struct ZigClangExpr *ZigClangConvertVectorExpr_getSrcExpr(const struct ZigClangConvertVectorExpr *self) {
|
||||
auto casted = reinterpret_cast<const clang::ConvertVectorExpr *>(self);
|
||||
return reinterpret_cast<const struct ZigClangExpr *>(casted->getSrcExpr());
|
||||
}
|
||||
|
||||
struct ZigClangQualType ZigClangConvertVectorExpr_getTypeSourceInfo_getType(const struct ZigClangConvertVectorExpr *self) {
|
||||
auto casted = reinterpret_cast<const clang::ConvertVectorExpr *>(self);
|
||||
return bitcast(casted->getTypeSourceInfo()->getType());
|
||||
}
|
||||
|
||||
struct ZigClangQualType ZigClangDecayedType_getDecayedType(const struct ZigClangDecayedType *self) {
|
||||
auto casted = reinterpret_cast<const clang::DecayedType *>(self);
|
||||
return bitcast(casted->getDecayedType());
|
||||
@ -2857,6 +2872,16 @@ struct ZigClangQualType ZigClangValueDecl_getType(const struct ZigClangValueDecl
|
||||
return bitcast(casted->getType());
|
||||
}
|
||||
|
||||
struct ZigClangQualType ZigClangVectorType_getElementType(const struct ZigClangVectorType *self) {
|
||||
auto casted = reinterpret_cast<const clang::VectorType *>(self);
|
||||
return bitcast(casted->getElementType());
|
||||
}
|
||||
|
||||
unsigned ZigClangVectorType_getNumElements(const struct ZigClangVectorType *self) {
|
||||
auto casted = reinterpret_cast<const clang::VectorType *>(self);
|
||||
return casted->getNumElements();
|
||||
}
|
||||
|
||||
const struct ZigClangExpr *ZigClangWhileStmt_getCond(const struct ZigClangWhileStmt *self) {
|
||||
auto casted = reinterpret_cast<const clang::WhileStmt *>(self);
|
||||
return reinterpret_cast<const struct ZigClangExpr *>(casted->getCond());
|
||||
@ -2936,6 +2961,15 @@ struct ZigClangSourceLocation ZigClangUnaryExprOrTypeTraitExpr_getBeginLoc(
|
||||
return bitcast(casted->getBeginLoc());
|
||||
}
|
||||
|
||||
unsigned ZigClangShuffleVectorExpr_getNumSubExprs(const ZigClangShuffleVectorExpr *self) {
|
||||
auto casted = reinterpret_cast<const clang::ShuffleVectorExpr *>(self);
|
||||
return casted->getNumSubExprs();
|
||||
}
|
||||
|
||||
const struct ZigClangExpr *ZigClangShuffleVectorExpr_getExpr(const struct ZigClangShuffleVectorExpr *self, unsigned idx) {
|
||||
auto casted = reinterpret_cast<const clang::ShuffleVectorExpr *>(self);
|
||||
return reinterpret_cast<const struct ZigClangExpr *>(casted->getExpr(idx));
|
||||
}
|
||||
|
||||
enum ZigClangUnaryExprOrTypeTrait_Kind ZigClangUnaryExprOrTypeTraitExpr_getKind(
|
||||
const struct ZigClangUnaryExprOrTypeTraitExpr *self)
|
||||
|
||||
@ -1071,6 +1071,7 @@ ZIG_EXTERN_C bool ZigClangType_isBooleanType(const struct ZigClangType *self);
|
||||
ZIG_EXTERN_C bool ZigClangType_isVoidType(const struct ZigClangType *self);
|
||||
ZIG_EXTERN_C bool ZigClangType_isArrayType(const struct ZigClangType *self);
|
||||
ZIG_EXTERN_C bool ZigClangType_isRecordType(const struct ZigClangType *self);
|
||||
ZIG_EXTERN_C bool ZigClangType_isVectorType(const struct ZigClangType *self);
|
||||
ZIG_EXTERN_C bool ZigClangType_isIncompleteOrZeroLengthArrayType(const ZigClangQualType *self, const struct ZigClangASTContext *ctx);
|
||||
ZIG_EXTERN_C bool ZigClangType_isConstantArrayType(const ZigClangType *self);
|
||||
ZIG_EXTERN_C const char *ZigClangType_getTypeClassName(const struct ZigClangType *self);
|
||||
@ -1206,6 +1207,9 @@ ZIG_EXTERN_C const struct ZigClangExpr *ZigClangBinaryOperator_getLHS(const stru
|
||||
ZIG_EXTERN_C const struct ZigClangExpr *ZigClangBinaryOperator_getRHS(const struct ZigClangBinaryOperator *);
|
||||
ZIG_EXTERN_C struct ZigClangQualType ZigClangBinaryOperator_getType(const struct ZigClangBinaryOperator *);
|
||||
|
||||
ZIG_EXTERN_C const struct ZigClangExpr *ZigClangConvertVectorExpr_getSrcExpr(const struct ZigClangConvertVectorExpr *);
|
||||
ZIG_EXTERN_C struct ZigClangQualType ZigClangConvertVectorExpr_getTypeSourceInfo_getType(const struct ZigClangConvertVectorExpr *);
|
||||
|
||||
ZIG_EXTERN_C struct ZigClangQualType ZigClangDecayedType_getDecayedType(const struct ZigClangDecayedType *);
|
||||
|
||||
ZIG_EXTERN_C const struct ZigClangCompoundStmt *ZigClangStmtExpr_getSubStmt(const struct ZigClangStmtExpr *);
|
||||
@ -1235,6 +1239,9 @@ ZIG_EXTERN_C struct ZigClangSourceLocation ZigClangUnaryOperator_getBeginLoc(con
|
||||
|
||||
ZIG_EXTERN_C struct ZigClangQualType ZigClangValueDecl_getType(const struct ZigClangValueDecl *);
|
||||
|
||||
ZIG_EXTERN_C struct ZigClangQualType ZigClangVectorType_getElementType(const struct ZigClangVectorType *);
|
||||
ZIG_EXTERN_C unsigned ZigClangVectorType_getNumElements(const struct ZigClangVectorType *);
|
||||
|
||||
ZIG_EXTERN_C const struct ZigClangExpr *ZigClangWhileStmt_getCond(const struct ZigClangWhileStmt *);
|
||||
ZIG_EXTERN_C const struct ZigClangStmt *ZigClangWhileStmt_getBody(const struct ZigClangWhileStmt *);
|
||||
|
||||
@ -1259,6 +1266,9 @@ ZIG_EXTERN_C struct ZigClangQualType ZigClangUnaryExprOrTypeTraitExpr_getTypeOfA
|
||||
ZIG_EXTERN_C struct ZigClangSourceLocation ZigClangUnaryExprOrTypeTraitExpr_getBeginLoc(const struct ZigClangUnaryExprOrTypeTraitExpr *);
|
||||
ZIG_EXTERN_C enum ZigClangUnaryExprOrTypeTrait_Kind ZigClangUnaryExprOrTypeTraitExpr_getKind(const struct ZigClangUnaryExprOrTypeTraitExpr *);
|
||||
|
||||
ZIG_EXTERN_C unsigned ZigClangShuffleVectorExpr_getNumSubExprs(const struct ZigClangShuffleVectorExpr *);
|
||||
ZIG_EXTERN_C const struct ZigClangExpr *ZigClangShuffleVectorExpr_getExpr(const struct ZigClangShuffleVectorExpr *, unsigned);
|
||||
|
||||
ZIG_EXTERN_C const struct ZigClangStmt *ZigClangDoStmt_getBody(const struct ZigClangDoStmt *);
|
||||
ZIG_EXTERN_C const struct ZigClangExpr *ZigClangDoStmt_getCond(const struct ZigClangDoStmt *);
|
||||
|
||||
|
||||
141
src/zir.zig
141
src/zir.zig
@ -37,8 +37,6 @@ pub const Code = struct {
|
||||
string_bytes: []u8,
|
||||
/// The meaning of this data is determined by `Inst.Tag` value.
|
||||
extra: []u32,
|
||||
/// Used for decl_val and decl_ref instructions.
|
||||
decls: []*Module.Decl,
|
||||
|
||||
/// Returns the requested data, as well as the new index which is at the start of the
|
||||
/// trailers for the object.
|
||||
@ -78,7 +76,6 @@ pub const Code = struct {
|
||||
code.instructions.deinit(gpa);
|
||||
gpa.free(code.string_bytes);
|
||||
gpa.free(code.extra);
|
||||
gpa.free(code.decls);
|
||||
code.* = undefined;
|
||||
}
|
||||
|
||||
@ -133,7 +130,7 @@ pub const Inst = struct {
|
||||
/// Same as `alloc` except mutable.
|
||||
alloc_mut,
|
||||
/// Same as `alloc` except the type is inferred.
|
||||
/// The operand is unused.
|
||||
/// Uses the `node` union field.
|
||||
alloc_inferred,
|
||||
/// Same as `alloc_inferred` except mutable.
|
||||
alloc_inferred_mut,
|
||||
@ -267,9 +264,6 @@ pub const Inst = struct {
|
||||
/// only the taken branch is analyzed. The then block and else block must
|
||||
/// terminate with an "inline" variant of a noreturn instruction.
|
||||
condbr_inline,
|
||||
/// A comptime known value.
|
||||
/// Uses the `const` union field.
|
||||
@"const",
|
||||
/// A struct type definition. Contains references to ZIR instructions for
|
||||
/// the field types, defaults, and alignments.
|
||||
/// Uses the `pl_node` union field. Payload is `StructDecl`.
|
||||
@ -286,6 +280,8 @@ pub const Inst = struct {
|
||||
/// the field value expressions and optional type tag expression.
|
||||
/// Uses the `pl_node` union field. Payload is `EnumDecl`.
|
||||
enum_decl,
|
||||
/// Same as `enum_decl`, except the enum is non-exhaustive.
|
||||
enum_decl_nonexhaustive,
|
||||
/// An opaque type definition. Provides an AST node only.
|
||||
/// Uses the `node` union field.
|
||||
opaque_decl,
|
||||
@ -332,12 +328,16 @@ pub const Inst = struct {
|
||||
error_union_type,
|
||||
/// `error.Foo` syntax. Uses the `str_tok` field of the Data union.
|
||||
error_value,
|
||||
/// Implements the `@export` builtin function.
|
||||
/// Uses the `pl_node` union field. Payload is `Bin`.
|
||||
@"export",
|
||||
/// Given a pointer to a struct or object that contains virtual fields, returns a pointer
|
||||
/// to the named field. The field name is stored in string_bytes. Used by a.b syntax.
|
||||
/// Uses `pl_node` field. The AST node is the a.b syntax. Payload is Field.
|
||||
field_ptr,
|
||||
/// Given a struct or object that contains virtual fields, returns the named field.
|
||||
/// The field name is stored in string_bytes. Used by a.b syntax.
|
||||
/// This instruction also accepts a pointer.
|
||||
/// Uses `pl_node` field. The AST node is the a.b syntax. Payload is Field.
|
||||
field_val,
|
||||
/// Given a pointer to a struct or object that contains virtual fields, returns a pointer
|
||||
@ -363,11 +363,19 @@ pub const Inst = struct {
|
||||
fn_type_cc,
|
||||
/// Same as `fn_type_cc` but the function is variadic.
|
||||
fn_type_cc_var_args,
|
||||
/// Implements the `@hasDecl` builtin.
|
||||
/// Uses the `pl_node` union field. Payload is `Bin`.
|
||||
has_decl,
|
||||
/// `@import(operand)`.
|
||||
/// Uses the `un_node` field.
|
||||
import,
|
||||
/// Integer literal that fits in a u64. Uses the int union value.
|
||||
int,
|
||||
/// A float literal that fits in a f32. Uses the float union value.
|
||||
float,
|
||||
/// A float literal that fits in a f128. Uses the `pl_node` union value.
|
||||
/// Payload is `Float128`.
|
||||
float128,
|
||||
/// Convert an integer value to another integer type, asserting that the destination type
|
||||
/// can hold the same mathematical value.
|
||||
/// Uses the `pl_node` field. AST is the `@intCast` syntax.
|
||||
@ -659,11 +667,28 @@ pub const Inst = struct {
|
||||
/// Given a set of `field_ptr` instructions, assumes they are all part of a struct
|
||||
/// initialization expression, and emits compile errors for duplicate fields
|
||||
/// as well as missing fields, if applicable.
|
||||
/// This instruction asserts that there is at least one field_ptr instruction,
|
||||
/// because it must use one of them to find out the struct type.
|
||||
/// Uses the `pl_node` field. Payload is `Block`.
|
||||
validate_struct_init_ptr,
|
||||
/// A struct literal with a specified type, with no fields.
|
||||
/// Uses the `un_node` field.
|
||||
struct_init_empty,
|
||||
/// Given a struct, union, enum, or opaque and a field name, returns the field type.
|
||||
/// Uses the `pl_node` field. Payload is `FieldType`.
|
||||
field_type,
|
||||
/// Finalizes a typed struct initialization, performs validation, and returns the
|
||||
/// struct value.
|
||||
/// Uses the `pl_node` field. Payload is `StructInit`.
|
||||
struct_init,
|
||||
/// Converts an integer into an enum value.
|
||||
/// Uses `pl_node` with payload `Bin`. `lhs` is enum type, `rhs` is operand.
|
||||
int_to_enum,
|
||||
/// Converts an enum value into an integer. Resulting type will be the tag type
|
||||
/// of the enum. Uses `un_node`.
|
||||
enum_to_int,
|
||||
/// Implements the `@typeInfo` builtin. Uses `un_node`.
|
||||
type_info,
|
||||
|
||||
/// Returns whether the instruction is one of the control flow "noreturn" types.
|
||||
/// Function calls do not count.
|
||||
@ -709,12 +734,12 @@ pub const Inst = struct {
|
||||
.cmp_gt,
|
||||
.cmp_neq,
|
||||
.coerce_result_ptr,
|
||||
.@"const",
|
||||
.struct_decl,
|
||||
.struct_decl_packed,
|
||||
.struct_decl_extern,
|
||||
.union_decl,
|
||||
.enum_decl,
|
||||
.enum_decl_nonexhaustive,
|
||||
.opaque_decl,
|
||||
.dbg_stmt_node,
|
||||
.decl_ref,
|
||||
@ -727,6 +752,7 @@ pub const Inst = struct {
|
||||
.elem_val_node,
|
||||
.ensure_result_used,
|
||||
.ensure_result_non_error,
|
||||
.@"export",
|
||||
.floatcast,
|
||||
.field_ptr,
|
||||
.field_val,
|
||||
@ -736,7 +762,10 @@ pub const Inst = struct {
|
||||
.fn_type_var_args,
|
||||
.fn_type_cc,
|
||||
.fn_type_cc_var_args,
|
||||
.has_decl,
|
||||
.int,
|
||||
.float,
|
||||
.float128,
|
||||
.intcast,
|
||||
.int_type,
|
||||
.is_non_null,
|
||||
@ -819,6 +848,11 @@ pub const Inst = struct {
|
||||
.switch_block_ref_under_multi,
|
||||
.validate_struct_init_ptr,
|
||||
.struct_init_empty,
|
||||
.struct_init,
|
||||
.field_type,
|
||||
.int_to_enum,
|
||||
.enum_to_int,
|
||||
.type_info,
|
||||
=> false,
|
||||
|
||||
.@"break",
|
||||
@ -1181,7 +1215,6 @@ pub const Inst = struct {
|
||||
}
|
||||
},
|
||||
bin: Bin,
|
||||
@"const": *TypedValue,
|
||||
/// For strings which may contain null bytes.
|
||||
str: struct {
|
||||
/// Offset into `string_bytes`.
|
||||
@ -1223,6 +1256,16 @@ pub const Inst = struct {
|
||||
/// Offset from Decl AST node index.
|
||||
node: i32,
|
||||
int: u64,
|
||||
float: struct {
|
||||
/// Offset from Decl AST node index.
|
||||
/// `Tag` determines which kind of AST node this points to.
|
||||
src_node: i32,
|
||||
number: f32,
|
||||
|
||||
pub fn src(self: @This()) LazySrcLoc {
|
||||
return .{ .node_offset = self.src_node };
|
||||
}
|
||||
},
|
||||
array_type_sentinel: struct {
|
||||
len: Ref,
|
||||
/// index into extra, points to an `ArrayTypeSentinel`
|
||||
@ -1504,6 +1547,40 @@ pub const Inst = struct {
|
||||
tag_type: Ref,
|
||||
fields_len: u32,
|
||||
};
|
||||
|
||||
/// A f128 value, broken up into 4 u32 parts.
|
||||
pub const Float128 = struct {
|
||||
piece0: u32,
|
||||
piece1: u32,
|
||||
piece2: u32,
|
||||
piece3: u32,
|
||||
|
||||
pub fn get(self: Float128) f128 {
|
||||
const int_bits = @as(u128, self.piece0) |
|
||||
(@as(u128, self.piece1) << 32) |
|
||||
(@as(u128, self.piece2) << 64) |
|
||||
(@as(u128, self.piece3) << 96);
|
||||
return @bitCast(f128, int_bits);
|
||||
}
|
||||
};
|
||||
|
||||
/// Trailing is an item per field.
|
||||
pub const StructInit = struct {
|
||||
fields_len: u32,
|
||||
|
||||
pub const Item = struct {
|
||||
/// The `field_type` ZIR instruction for this field init.
|
||||
field_type: Index,
|
||||
/// The field init expression to be used as the field value.
|
||||
init: Ref,
|
||||
};
|
||||
};
|
||||
|
||||
pub const FieldType = struct {
|
||||
container_type: Ref,
|
||||
/// Offset into `string_bytes`, null terminated.
|
||||
name_start: u32,
|
||||
};
|
||||
};
|
||||
|
||||
pub const SpecialProng = enum { none, @"else", under };
|
||||
@ -1533,12 +1610,11 @@ const Writer = struct {
|
||||
.intcast,
|
||||
.store,
|
||||
.store_to_block_ptr,
|
||||
.store_to_inferred_ptr,
|
||||
=> try self.writeBin(stream, inst),
|
||||
|
||||
.alloc,
|
||||
.alloc_mut,
|
||||
.alloc_inferred,
|
||||
.alloc_inferred_mut,
|
||||
.indexable_ptr_len,
|
||||
.bit_not,
|
||||
.bool_not,
|
||||
@ -1578,6 +1654,8 @@ const Writer = struct {
|
||||
.typeof,
|
||||
.typeof_elem,
|
||||
.struct_init_empty,
|
||||
.enum_to_int,
|
||||
.type_info,
|
||||
=> try self.writeUnNode(stream, inst),
|
||||
|
||||
.ref,
|
||||
@ -1591,11 +1669,12 @@ const Writer = struct {
|
||||
=> try self.writeBoolBr(stream, inst),
|
||||
|
||||
.array_type_sentinel => try self.writeArrayTypeSentinel(stream, inst),
|
||||
.@"const" => try self.writeConst(stream, inst),
|
||||
.param_type => try self.writeParamType(stream, inst),
|
||||
.ptr_type_simple => try self.writePtrTypeSimple(stream, inst),
|
||||
.ptr_type => try self.writePtrType(stream, inst),
|
||||
.int => try self.writeInt(stream, inst),
|
||||
.float => try self.writeFloat(stream, inst),
|
||||
.float128 => try self.writeFloat128(stream, inst),
|
||||
.str => try self.writeStr(stream, inst),
|
||||
.elided => try stream.writeAll(")"),
|
||||
.int_type => try self.writeIntType(stream, inst),
|
||||
@ -1616,6 +1695,9 @@ const Writer = struct {
|
||||
.slice_sentinel,
|
||||
.union_decl,
|
||||
.enum_decl,
|
||||
.enum_decl_nonexhaustive,
|
||||
.struct_init,
|
||||
.field_type,
|
||||
=> try self.writePlNode(stream, inst),
|
||||
|
||||
.add,
|
||||
@ -1635,15 +1717,18 @@ const Writer = struct {
|
||||
.cmp_gt,
|
||||
.cmp_neq,
|
||||
.div,
|
||||
.has_decl,
|
||||
.mod_rem,
|
||||
.shl,
|
||||
.shr,
|
||||
.xor,
|
||||
.store_node,
|
||||
.error_union_type,
|
||||
.@"export",
|
||||
.merge_error_sets,
|
||||
.bit_and,
|
||||
.bit_or,
|
||||
.int_to_enum,
|
||||
=> try self.writePlNodeBin(stream, inst),
|
||||
|
||||
.call,
|
||||
@ -1701,6 +1786,8 @@ const Writer = struct {
|
||||
.ret_type,
|
||||
.repeat,
|
||||
.repeat_inline,
|
||||
.alloc_inferred,
|
||||
.alloc_inferred_mut,
|
||||
=> try self.writeNode(stream, inst),
|
||||
|
||||
.error_value,
|
||||
@ -1726,7 +1813,6 @@ const Writer = struct {
|
||||
|
||||
.bitcast,
|
||||
.bitcast_result_ptr,
|
||||
.store_to_inferred_ptr,
|
||||
=> try stream.writeAll("TODO)"),
|
||||
}
|
||||
}
|
||||
@ -1770,15 +1856,6 @@ const Writer = struct {
|
||||
try stream.writeAll("TODO)");
|
||||
}
|
||||
|
||||
fn writeConst(
|
||||
self: *Writer,
|
||||
stream: anytype,
|
||||
inst: Inst.Index,
|
||||
) (@TypeOf(stream).Error || error{OutOfMemory})!void {
|
||||
const inst_data = self.code.instructions.items(.data)[inst].@"const";
|
||||
try stream.writeAll("TODO)");
|
||||
}
|
||||
|
||||
fn writeParamType(
|
||||
self: *Writer,
|
||||
stream: anytype,
|
||||
@ -1816,6 +1893,23 @@ const Writer = struct {
|
||||
try stream.print("{d})", .{inst_data});
|
||||
}
|
||||
|
||||
fn writeFloat(self: *Writer, stream: anytype, inst: Inst.Index) !void {
|
||||
const inst_data = self.code.instructions.items(.data)[inst].float;
|
||||
const src = inst_data.src();
|
||||
try stream.print("{d}) ", .{inst_data.number});
|
||||
try self.writeSrc(stream, src);
|
||||
}
|
||||
|
||||
fn writeFloat128(self: *Writer, stream: anytype, inst: Inst.Index) !void {
|
||||
const inst_data = self.code.instructions.items(.data)[inst].pl_node;
|
||||
const extra = self.code.extraData(Inst.Float128, inst_data.payload_index).data;
|
||||
const src = inst_data.src();
|
||||
const number = extra.get();
|
||||
// TODO improve std.format to be able to print f128 values
|
||||
try stream.print("{d}) ", .{@floatCast(f64, number)});
|
||||
try self.writeSrc(stream, src);
|
||||
}
|
||||
|
||||
fn writeStr(
|
||||
self: *Writer,
|
||||
stream: anytype,
|
||||
@ -2133,7 +2227,8 @@ const Writer = struct {
|
||||
|
||||
fn writePlNodeDecl(self: *Writer, stream: anytype, inst: Inst.Index) !void {
|
||||
const inst_data = self.code.instructions.items(.data)[inst].pl_node;
|
||||
const decl = self.code.decls[inst_data.payload_index];
|
||||
const owner_decl = self.scope.ownerDecl().?;
|
||||
const decl = owner_decl.dependencies.entries.items[inst_data.payload_index].key;
|
||||
try stream.print("{s}) ", .{decl.name});
|
||||
try self.writeSrc(stream, inst_data.src());
|
||||
}
|
||||
|
||||
@ -1308,4 +1308,106 @@ pub fn addCases(cases: *tests.RunTranslatedCContext) void {
|
||||
\\ ufoo = (uval += 100000000); // compile error if @truncate() not inserted
|
||||
\\}
|
||||
, "");
|
||||
|
||||
cases.add("basic vector expressions",
|
||||
\\#include <stdlib.h>
|
||||
\\#include <stdint.h>
|
||||
\\typedef int16_t __v8hi __attribute__((__vector_size__(16)));
|
||||
\\int main(int argc, char**argv) {
|
||||
\\ __v8hi uninitialized;
|
||||
\\ __v8hi empty_init = {};
|
||||
\\ __v8hi partial_init = {0, 1, 2, 3};
|
||||
\\
|
||||
\\ __v8hi a = {0, 1, 2, 3, 4, 5, 6, 7};
|
||||
\\ __v8hi b = (__v8hi) {100, 200, 300, 400, 500, 600, 700, 800};
|
||||
\\
|
||||
\\ __v8hi sum = a + b;
|
||||
\\ for (int i = 0; i < 8; i++) {
|
||||
\\ if (sum[i] != a[i] + b[i]) abort();
|
||||
\\ }
|
||||
\\ return 0;
|
||||
\\}
|
||||
, "");
|
||||
|
||||
cases.add("__builtin_shufflevector",
|
||||
\\#include <stdlib.h>
|
||||
\\#include <stdint.h>
|
||||
\\typedef int16_t __v4hi __attribute__((__vector_size__(8)));
|
||||
\\typedef int16_t __v8hi __attribute__((__vector_size__(16)));
|
||||
\\int main(int argc, char**argv) {
|
||||
\\ __v8hi v8_a = {0, 1, 2, 3, 4, 5, 6, 7};
|
||||
\\ __v8hi v8_b = {100, 200, 300, 400, 500, 600, 700, 800};
|
||||
\\ __v8hi shuffled = __builtin_shufflevector(v8_a, v8_b, 0, 1, 2, 3, 8, 9, 10, 11);
|
||||
\\ for (int i = 0; i < 8; i++) {
|
||||
\\ if (i < 4) {
|
||||
\\ if (shuffled[i] != v8_a[i]) abort();
|
||||
\\ } else {
|
||||
\\ if (shuffled[i] != v8_b[i - 4]) abort();
|
||||
\\ }
|
||||
\\ }
|
||||
\\ shuffled = __builtin_shufflevector(
|
||||
\\ (__v8hi) {-1, -1, -1, -1, -1, -1, -1, -1},
|
||||
\\ (__v8hi) {42, 42, 42, 42, 42, 42, 42, 42},
|
||||
\\ 0, 1, 2, 3, 8, 9, 10, 11
|
||||
\\ );
|
||||
\\ for (int i = 0; i < 8; i++) {
|
||||
\\ if (i < 4) {
|
||||
\\ if (shuffled[i] != -1) abort();
|
||||
\\ } else {
|
||||
\\ if (shuffled[i] != 42) abort();
|
||||
\\ }
|
||||
\\ }
|
||||
\\ __v4hi shuffled_to_fewer_elements = __builtin_shufflevector(v8_a, v8_b, 0, 1, 8, 9);
|
||||
\\ for (int i = 0; i < 4; i++) {
|
||||
\\ if (i < 2) {
|
||||
\\ if (shuffled_to_fewer_elements[i] != v8_a[i]) abort();
|
||||
\\ } else {
|
||||
\\ if (shuffled_to_fewer_elements[i] != v8_b[i - 2]) abort();
|
||||
\\ }
|
||||
\\ }
|
||||
\\ __v4hi v4_a = {0, 1, 2, 3};
|
||||
\\ __v4hi v4_b = {100, 200, 300, 400};
|
||||
\\ __v8hi shuffled_to_more_elements = __builtin_shufflevector(v4_a, v4_b, 0, 1, 2, 3, 4, 5, 6, 7);
|
||||
\\ for (int i = 0; i < 4; i++) {
|
||||
\\ if (shuffled_to_more_elements[i] != v4_a[i]) abort();
|
||||
\\ if (shuffled_to_more_elements[i + 4] != v4_b[i]) abort();
|
||||
\\ }
|
||||
\\ return 0;
|
||||
\\}
|
||||
, "");
|
||||
|
||||
cases.add("__builtin_convertvector",
|
||||
\\#include <stdlib.h>
|
||||
\\#include <stdint.h>
|
||||
\\typedef int16_t __v8hi __attribute__((__vector_size__(16)));
|
||||
\\typedef uint16_t __v8hu __attribute__((__vector_size__(16)));
|
||||
\\int main(int argc, char**argv) {
|
||||
\\ __v8hi signed_vector = { 1, 2, 3, 4, -1, -2, -3,-4};
|
||||
\\ __v8hu unsigned_vector = __builtin_convertvector(signed_vector, __v8hu);
|
||||
\\
|
||||
\\ for (int i = 0; i < 8; i++) {
|
||||
\\ if (unsigned_vector[i] != (uint16_t)signed_vector[i]) abort();
|
||||
\\ }
|
||||
\\ return 0;
|
||||
\\}
|
||||
, "");
|
||||
|
||||
cases.add("vector casting",
|
||||
\\#include <stdlib.h>
|
||||
\\#include <stdint.h>
|
||||
\\typedef int8_t __v8qi __attribute__((__vector_size__(8)));
|
||||
\\typedef uint8_t __v8qu __attribute__((__vector_size__(8)));
|
||||
\\int main(int argc, char**argv) {
|
||||
\\ __v8qi signed_vector = { 1, 2, 3, 4, -1, -2, -3,-4};
|
||||
\\
|
||||
\\ uint64_t big_int = (uint64_t) signed_vector;
|
||||
\\ if (big_int != 0x01020304FFFEFDFCULL && big_int != 0xFCFDFEFF04030201ULL) abort();
|
||||
\\ __v8qu unsigned_vector = (__v8qu) big_int;
|
||||
\\ for (int i = 0; i < 8; i++) {
|
||||
\\ if (unsigned_vector[i] != (uint8_t)signed_vector[i] && unsigned_vector[i] != (uint8_t)signed_vector[7 - i]) abort();
|
||||
\\ }
|
||||
\\ return 0;
|
||||
\\}
|
||||
, "");
|
||||
|
||||
}
|
||||
|
||||
@ -3,16 +3,56 @@ const os = std.os;
|
||||
const tests = @import("tests.zig");
|
||||
|
||||
pub fn addCases(cases: *tests.StackTracesContext) void {
|
||||
const source_return =
|
||||
\\const std = @import("std");
|
||||
\\
|
||||
cases.addCase(.{
|
||||
.name = "return",
|
||||
.source =
|
||||
\\pub fn main() !void {
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\}
|
||||
;
|
||||
const source_try_return =
|
||||
\\const std = @import("std");
|
||||
\\
|
||||
,
|
||||
.Debug = .{
|
||||
.expect =
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:2:5: [address] in main (test)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
},
|
||||
.ReleaseSafe = .{
|
||||
.exclude = struct {
|
||||
pub fn exclude() bool {
|
||||
return if (std.builtin.object_format == .elf) true else false;
|
||||
}
|
||||
},
|
||||
.exclude_os = .{
|
||||
.windows, // segfault
|
||||
},
|
||||
.expect =
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:2:5: [address] in [function]
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
},
|
||||
.ReleaseFast = .{
|
||||
.expect =
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
},
|
||||
.ReleaseSmall = .{
|
||||
.expect =
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
},
|
||||
});
|
||||
|
||||
cases.addCase(.{
|
||||
.name = "try return",
|
||||
.source =
|
||||
\\fn foo() !void {
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\}
|
||||
@ -20,10 +60,56 @@ pub fn addCases(cases: *tests.StackTracesContext) void {
|
||||
\\pub fn main() !void {
|
||||
\\ try foo();
|
||||
\\}
|
||||
;
|
||||
const source_try_try_return_return =
|
||||
\\const std = @import("std");
|
||||
\\
|
||||
,
|
||||
.Debug = .{
|
||||
.expect =
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:2:5: [address] in foo (test)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\source.zig:6:5: [address] in main (test)
|
||||
\\ try foo();
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
},
|
||||
.ReleaseSafe = .{
|
||||
.exclude = struct {
|
||||
pub fn exclude() bool {
|
||||
return if (std.builtin.object_format == .elf) true else false;
|
||||
}
|
||||
},
|
||||
.exclude_os = .{
|
||||
.windows, // segfault
|
||||
},
|
||||
.expect =
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:2:5: [address] in [function]
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\source.zig:6:5: [address] in [function]
|
||||
\\ try foo();
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
},
|
||||
.ReleaseFast = .{
|
||||
.expect =
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
},
|
||||
.ReleaseSmall = .{
|
||||
.expect =
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
},
|
||||
});
|
||||
|
||||
cases.addCase(.{
|
||||
.name = "try try return return",
|
||||
.source =
|
||||
\\fn foo() !void {
|
||||
\\ try bar();
|
||||
\\}
|
||||
@ -39,9 +125,71 @@ pub fn addCases(cases: *tests.StackTracesContext) void {
|
||||
\\pub fn main() !void {
|
||||
\\ try foo();
|
||||
\\}
|
||||
;
|
||||
,
|
||||
.Debug = .{
|
||||
.expect =
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:10:5: [address] in make_error (test)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\source.zig:6:5: [address] in bar (test)
|
||||
\\ return make_error();
|
||||
\\ ^
|
||||
\\source.zig:2:5: [address] in foo (test)
|
||||
\\ try bar();
|
||||
\\ ^
|
||||
\\source.zig:14:5: [address] in main (test)
|
||||
\\ try foo();
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
},
|
||||
.ReleaseSafe = .{
|
||||
.exclude = struct {
|
||||
pub fn exclude() bool {
|
||||
return if (std.builtin.object_format == .elf) true else false;
|
||||
}
|
||||
},
|
||||
.exclude_os = .{
|
||||
.windows, // segfault
|
||||
},
|
||||
.expect =
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:10:5: [address] in [function]
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\source.zig:6:5: [address] in [function]
|
||||
\\ return make_error();
|
||||
\\ ^
|
||||
\\source.zig:2:5: [address] in [function]
|
||||
\\ try bar();
|
||||
\\ ^
|
||||
\\source.zig:14:5: [address] in [function]
|
||||
\\ try foo();
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
},
|
||||
.ReleaseFast = .{
|
||||
.expect =
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
},
|
||||
.ReleaseSmall = .{
|
||||
.expect =
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
},
|
||||
});
|
||||
|
||||
const source_dumpCurrentStackTrace =
|
||||
cases.addCase(.{
|
||||
.exclude_os = .{
|
||||
.windows,
|
||||
},
|
||||
.name = "dumpCurrentStackTrace",
|
||||
.source =
|
||||
\\const std = @import("std");
|
||||
\\
|
||||
\\fn bar() void {
|
||||
@ -54,401 +202,17 @@ pub fn addCases(cases: *tests.StackTracesContext) void {
|
||||
\\ foo();
|
||||
\\ return 1;
|
||||
\\}
|
||||
;
|
||||
|
||||
switch (std.Target.current.os.tag) {
|
||||
.freebsd => {
|
||||
cases.addCase(
|
||||
"return",
|
||||
source_return,
|
||||
[_][]const u8{
|
||||
// debug
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:4:5: [address] in main (test)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
// release-safe
|
||||
// https://github.com/ziglang/zig/issues/8421
|
||||
\\
|
||||
,
|
||||
// release-fast
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
// release-small
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
},
|
||||
);
|
||||
cases.addCase(
|
||||
"try return",
|
||||
source_try_return,
|
||||
[_][]const u8{
|
||||
// debug
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:4:5: [address] in foo (test)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\source.zig:8:5: [address] in main (test)
|
||||
\\ try foo();
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
// release-safe
|
||||
// https://github.com/ziglang/zig/issues/8421
|
||||
\\
|
||||
,
|
||||
// release-fast
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
// release-small
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
},
|
||||
);
|
||||
cases.addCase(
|
||||
"try try return return",
|
||||
source_try_try_return_return,
|
||||
[_][]const u8{
|
||||
// debug
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:12:5: [address] in make_error (test)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\source.zig:8:5: [address] in bar (test)
|
||||
\\ return make_error();
|
||||
\\ ^
|
||||
\\source.zig:4:5: [address] in foo (test)
|
||||
\\ try bar();
|
||||
\\ ^
|
||||
\\source.zig:16:5: [address] in main (test)
|
||||
\\ try foo();
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
// release-safe
|
||||
// https://github.com/ziglang/zig/issues/8421
|
||||
\\
|
||||
,
|
||||
// release-fast
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
// release-small
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
},
|
||||
);
|
||||
,
|
||||
.Debug = .{
|
||||
.expect =
|
||||
\\source.zig:7:8: [address] in foo (test)
|
||||
\\ bar();
|
||||
\\ ^
|
||||
\\source.zig:10:8: [address] in main (test)
|
||||
\\ foo();
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
},
|
||||
.linux => {
|
||||
cases.addCase(
|
||||
"return",
|
||||
source_return,
|
||||
[_][]const u8{
|
||||
// debug
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:4:5: [address] in main (test)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
// release-safe
|
||||
// https://github.com/ziglang/zig/issues/8421
|
||||
\\
|
||||
,
|
||||
// release-fast
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
// release-small
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
},
|
||||
);
|
||||
cases.addCase(
|
||||
"try return",
|
||||
source_try_return,
|
||||
[_][]const u8{
|
||||
// debug
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:4:5: [address] in foo (test)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\source.zig:8:5: [address] in main (test)
|
||||
\\ try foo();
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
// release-safe
|
||||
// https://github.com/ziglang/zig/issues/8421
|
||||
\\
|
||||
,
|
||||
// release-fast
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
// release-small
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
},
|
||||
);
|
||||
cases.addCase(
|
||||
"try try return return",
|
||||
source_try_try_return_return,
|
||||
[_][]const u8{
|
||||
// debug
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:12:5: [address] in make_error (test)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\source.zig:8:5: [address] in bar (test)
|
||||
\\ return make_error();
|
||||
\\ ^
|
||||
\\source.zig:4:5: [address] in foo (test)
|
||||
\\ try bar();
|
||||
\\ ^
|
||||
\\source.zig:16:5: [address] in main (test)
|
||||
\\ try foo();
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
// release-safe
|
||||
// https://github.com/ziglang/zig/issues/8421
|
||||
\\
|
||||
,
|
||||
// release-fast
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
// release-small
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
},
|
||||
);
|
||||
cases.addCase(
|
||||
"dumpCurrentStackTrace",
|
||||
source_dumpCurrentStackTrace,
|
||||
[_][]const u8{
|
||||
// debug
|
||||
\\source.zig:7:8: [address] in foo (test)
|
||||
\\ bar();
|
||||
\\ ^
|
||||
\\source.zig:10:8: [address] in main (test)
|
||||
\\ foo();
|
||||
\\ ^
|
||||
\\start.zig:342:29: [address] in std.start.posixCallMainAndExit (test)
|
||||
\\ return root.main();
|
||||
\\ ^
|
||||
\\start.zig:163:5: [address] in std.start._start (test)
|
||||
\\ @call(.{ .modifier = .never_inline }, posixCallMainAndExit, .{});
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
// release-safe
|
||||
// https://github.com/ziglang/zig/issues/8421
|
||||
\\
|
||||
,
|
||||
// release-fast
|
||||
\\
|
||||
,
|
||||
// release-small
|
||||
\\
|
||||
},
|
||||
);
|
||||
},
|
||||
.macos => {
|
||||
cases.addCase(
|
||||
"return",
|
||||
source_return,
|
||||
[_][]const u8{
|
||||
// debug
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:4:5: [address] in main (test)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
// release-safe
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:4:5: [address] in std.start.main (test)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
// release-fast
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
// release-small
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
},
|
||||
);
|
||||
cases.addCase(
|
||||
"try return",
|
||||
source_try_return,
|
||||
[_][]const u8{
|
||||
// debug
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:4:5: [address] in foo (test)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\source.zig:8:5: [address] in main (test)
|
||||
\\ try foo();
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
// release-safe
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:4:5: [address] in std.start.main (test)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\source.zig:8:5: [address] in std.start.main (test)
|
||||
\\ try foo();
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
// release-fast
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
// release-small
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
},
|
||||
);
|
||||
cases.addCase(
|
||||
"try try return return",
|
||||
source_try_try_return_return,
|
||||
[_][]const u8{
|
||||
// debug
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:12:5: [address] in make_error (test)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\source.zig:8:5: [address] in bar (test)
|
||||
\\ return make_error();
|
||||
\\ ^
|
||||
\\source.zig:4:5: [address] in foo (test)
|
||||
\\ try bar();
|
||||
\\ ^
|
||||
\\source.zig:16:5: [address] in main (test)
|
||||
\\ try foo();
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
// release-safe
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:12:5: [address] in std.start.main (test)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\source.zig:8:5: [address] in std.start.main (test)
|
||||
\\ return make_error();
|
||||
\\ ^
|
||||
\\source.zig:4:5: [address] in std.start.main (test)
|
||||
\\ try bar();
|
||||
\\ ^
|
||||
\\source.zig:16:5: [address] in std.start.main (test)
|
||||
\\ try foo();
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
// release-fast
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
// release-small
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
},
|
||||
);
|
||||
},
|
||||
.windows => {
|
||||
cases.addCase(
|
||||
"return",
|
||||
source_return,
|
||||
[_][]const u8{
|
||||
// debug
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:4:5: [address] in main (test.obj)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
// release-safe
|
||||
// --disabled-- results in segmenetation fault
|
||||
"",
|
||||
// release-fast
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
// release-small
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
},
|
||||
);
|
||||
cases.addCase(
|
||||
"try return",
|
||||
source_try_return,
|
||||
[_][]const u8{
|
||||
// debug
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:4:5: [address] in foo (test.obj)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\source.zig:8:5: [address] in main (test.obj)
|
||||
\\ try foo();
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
// release-safe
|
||||
// --disabled-- results in segmenetation fault
|
||||
"",
|
||||
// release-fast
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
// release-small
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
},
|
||||
);
|
||||
cases.addCase(
|
||||
"try try return return",
|
||||
source_try_try_return_return,
|
||||
[_][]const u8{
|
||||
// debug
|
||||
\\error: TheSkyIsFalling
|
||||
\\source.zig:12:5: [address] in make_error (test.obj)
|
||||
\\ return error.TheSkyIsFalling;
|
||||
\\ ^
|
||||
\\source.zig:8:5: [address] in bar (test.obj)
|
||||
\\ return make_error();
|
||||
\\ ^
|
||||
\\source.zig:4:5: [address] in foo (test.obj)
|
||||
\\ try bar();
|
||||
\\ ^
|
||||
\\source.zig:16:5: [address] in main (test.obj)
|
||||
\\ try foo();
|
||||
\\ ^
|
||||
\\
|
||||
,
|
||||
// release-safe
|
||||
// --disabled-- results in segmenetation fault
|
||||
"",
|
||||
// release-fast
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
,
|
||||
// release-small
|
||||
\\error: TheSkyIsFalling
|
||||
\\
|
||||
},
|
||||
);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@ -280,6 +280,15 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
\\}
|
||||
, "");
|
||||
|
||||
// If expression with breakpoint that does not get hit
|
||||
case.addCompareOutput(
|
||||
\\export fn main() c_int {
|
||||
\\ var x: i32 = 1;
|
||||
\\ if (x != 1) @breakpoint();
|
||||
\\ return 0;
|
||||
\\}
|
||||
, "");
|
||||
|
||||
// Switch expression
|
||||
case.addCompareOutput(
|
||||
\\export fn main() c_int {
|
||||
@ -481,6 +490,310 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
\\}
|
||||
, "");
|
||||
}
|
||||
|
||||
{
|
||||
var case = ctx.exeFromCompiledC("structs", .{});
|
||||
case.addError(
|
||||
\\const Point = struct { x: i32, y: i32 };
|
||||
\\export fn main() c_int {
|
||||
\\ var p: Point = .{
|
||||
\\ .y = 24,
|
||||
\\ .x = 12,
|
||||
\\ .y = 24,
|
||||
\\ };
|
||||
\\ return p.y - p.x - p.x;
|
||||
\\}
|
||||
, &.{
|
||||
":6:10: error: duplicate field",
|
||||
":4:10: note: other field here",
|
||||
});
|
||||
case.addError(
|
||||
\\const Point = struct { x: i32, y: i32 };
|
||||
\\export fn main() c_int {
|
||||
\\ var p: Point = .{
|
||||
\\ .y = 24,
|
||||
\\ };
|
||||
\\ return p.y - p.x - p.x;
|
||||
\\}
|
||||
, &.{
|
||||
":3:21: error: mising struct field: x",
|
||||
":1:15: note: struct 'Point' declared here",
|
||||
});
|
||||
case.addError(
|
||||
\\const Point = struct { x: i32, y: i32 };
|
||||
\\export fn main() c_int {
|
||||
\\ var p: Point = .{
|
||||
\\ .x = 12,
|
||||
\\ .y = 24,
|
||||
\\ .z = 48,
|
||||
\\ };
|
||||
\\ return p.y - p.x - p.x;
|
||||
\\}
|
||||
, &.{
|
||||
":6:10: error: no field named 'z' in struct 'Point'",
|
||||
":1:15: note: struct declared here",
|
||||
});
|
||||
case.addCompareOutput(
|
||||
\\const Point = struct { x: i32, y: i32 };
|
||||
\\export fn main() c_int {
|
||||
\\ var p: Point = .{
|
||||
\\ .x = 12,
|
||||
\\ .y = 24,
|
||||
\\ };
|
||||
\\ return p.y - p.x - p.x;
|
||||
\\}
|
||||
, "");
|
||||
}
|
||||
|
||||
{
|
||||
var case = ctx.exeFromCompiledC("enums", .{});
|
||||
|
||||
case.addError(
|
||||
\\const E1 = packed enum { a, b, c };
|
||||
\\const E2 = extern enum { a, b, c };
|
||||
\\export fn foo() void {
|
||||
\\ const x = E1.a;
|
||||
\\}
|
||||
\\export fn bar() void {
|
||||
\\ const x = E2.a;
|
||||
\\}
|
||||
, &.{
|
||||
":1:12: error: enums do not support 'packed' or 'extern'; instead provide an explicit integer tag type",
|
||||
":2:12: error: enums do not support 'packed' or 'extern'; instead provide an explicit integer tag type",
|
||||
});
|
||||
|
||||
// comptime and types are caught in AstGen.
|
||||
case.addError(
|
||||
\\const E1 = enum {
|
||||
\\ a,
|
||||
\\ comptime b,
|
||||
\\ c,
|
||||
\\};
|
||||
\\const E2 = enum {
|
||||
\\ a,
|
||||
\\ b: i32,
|
||||
\\ c,
|
||||
\\};
|
||||
\\export fn foo() void {
|
||||
\\ const x = E1.a;
|
||||
\\}
|
||||
\\export fn bar() void {
|
||||
\\ const x = E2.a;
|
||||
\\}
|
||||
, &.{
|
||||
":3:5: error: enum fields cannot be marked comptime",
|
||||
":8:8: error: enum fields do not have types",
|
||||
});
|
||||
|
||||
// @enumToInt, @intToEnum, enum literal coercion, field access syntax, comparison, switch
|
||||
case.addCompareOutput(
|
||||
\\const Number = enum { One, Two, Three };
|
||||
\\
|
||||
\\export fn main() c_int {
|
||||
\\ var number1 = Number.One;
|
||||
\\ var number2: Number = .Two;
|
||||
\\ const number3 = @intToEnum(Number, 2);
|
||||
\\ if (number1 == number2) return 1;
|
||||
\\ if (number2 == number3) return 1;
|
||||
\\ if (@enumToInt(number1) != 0) return 1;
|
||||
\\ if (@enumToInt(number2) != 1) return 1;
|
||||
\\ if (@enumToInt(number3) != 2) return 1;
|
||||
\\ var x: Number = .Two;
|
||||
\\ if (number2 != x) return 1;
|
||||
\\ switch (x) {
|
||||
\\ .One => return 1,
|
||||
\\ .Two => return 0,
|
||||
\\ number3 => return 2,
|
||||
\\ }
|
||||
\\}
|
||||
, "");
|
||||
|
||||
// Specifying alignment is a parse error.
|
||||
// This also tests going from a successful build to a parse error.
|
||||
case.addError(
|
||||
\\const E1 = enum {
|
||||
\\ a,
|
||||
\\ b align(4),
|
||||
\\ c,
|
||||
\\};
|
||||
\\export fn foo() void {
|
||||
\\ const x = E1.a;
|
||||
\\}
|
||||
, &.{
|
||||
":3:7: error: expected ',', found 'align'",
|
||||
});
|
||||
|
||||
// Redundant non-exhaustive enum mark.
|
||||
// This also tests going from a parse error to an AstGen error.
|
||||
case.addError(
|
||||
\\const E1 = enum {
|
||||
\\ a,
|
||||
\\ _,
|
||||
\\ b,
|
||||
\\ c,
|
||||
\\ _,
|
||||
\\};
|
||||
\\export fn foo() void {
|
||||
\\ const x = E1.a;
|
||||
\\}
|
||||
, &.{
|
||||
":6:5: error: redundant non-exhaustive enum mark",
|
||||
":3:5: note: other mark here",
|
||||
});
|
||||
|
||||
case.addError(
|
||||
\\const E1 = enum {
|
||||
\\ a,
|
||||
\\ b,
|
||||
\\ c,
|
||||
\\ _ = 10,
|
||||
\\};
|
||||
\\export fn foo() void {
|
||||
\\ const x = E1.a;
|
||||
\\}
|
||||
, &.{
|
||||
":5:9: error: '_' is used to mark an enum as non-exhaustive and cannot be assigned a value",
|
||||
});
|
||||
|
||||
case.addError(
|
||||
\\const E1 = enum {};
|
||||
\\export fn foo() void {
|
||||
\\ const x = E1.a;
|
||||
\\}
|
||||
, &.{
|
||||
":1:12: error: enum declarations must have at least one tag",
|
||||
});
|
||||
|
||||
case.addError(
|
||||
\\const E1 = enum { a, b, _ };
|
||||
\\export fn foo() void {
|
||||
\\ const x = E1.a;
|
||||
\\}
|
||||
, &.{
|
||||
":1:12: error: non-exhaustive enum missing integer tag type",
|
||||
":1:25: note: marked non-exhaustive here",
|
||||
});
|
||||
|
||||
case.addError(
|
||||
\\const E1 = enum { a, b, c, b, d };
|
||||
\\export fn foo() void {
|
||||
\\ const x = E1.a;
|
||||
\\}
|
||||
, &.{
|
||||
":1:28: error: duplicate enum tag",
|
||||
":1:22: note: other tag here",
|
||||
});
|
||||
|
||||
case.addError(
|
||||
\\export fn foo() void {
|
||||
\\ const a = true;
|
||||
\\ const b = @enumToInt(a);
|
||||
\\}
|
||||
, &.{
|
||||
":3:26: error: expected enum or tagged union, found bool",
|
||||
});
|
||||
|
||||
case.addError(
|
||||
\\export fn foo() void {
|
||||
\\ const a = 1;
|
||||
\\ const b = @intToEnum(bool, a);
|
||||
\\}
|
||||
, &.{
|
||||
":3:26: error: expected enum, found bool",
|
||||
});
|
||||
|
||||
case.addError(
|
||||
\\const E = enum { a, b, c };
|
||||
\\export fn foo() void {
|
||||
\\ const b = @intToEnum(E, 3);
|
||||
\\}
|
||||
, &.{
|
||||
":3:15: error: enum 'E' has no tag with value 3",
|
||||
":1:11: note: enum declared here",
|
||||
});
|
||||
|
||||
case.addError(
|
||||
\\const E = enum { a, b, c };
|
||||
\\export fn foo() void {
|
||||
\\ var x: E = .a;
|
||||
\\ switch (x) {
|
||||
\\ .a => {},
|
||||
\\ .c => {},
|
||||
\\ }
|
||||
\\}
|
||||
, &.{
|
||||
":4:5: error: switch must handle all possibilities",
|
||||
":4:5: note: unhandled enumeration value: 'b'",
|
||||
":1:11: note: enum 'E' declared here",
|
||||
});
|
||||
|
||||
case.addError(
|
||||
\\const E = enum { a, b, c };
|
||||
\\export fn foo() void {
|
||||
\\ var x: E = .a;
|
||||
\\ switch (x) {
|
||||
\\ .a => {},
|
||||
\\ .b => {},
|
||||
\\ .b => {},
|
||||
\\ .c => {},
|
||||
\\ }
|
||||
\\}
|
||||
, &.{
|
||||
":7:10: error: duplicate switch value",
|
||||
":6:10: note: previous value here",
|
||||
});
|
||||
|
||||
case.addError(
|
||||
\\const E = enum { a, b, c };
|
||||
\\export fn foo() void {
|
||||
\\ var x: E = .a;
|
||||
\\ switch (x) {
|
||||
\\ .a => {},
|
||||
\\ .b => {},
|
||||
\\ .c => {},
|
||||
\\ else => {},
|
||||
\\ }
|
||||
\\}
|
||||
, &.{
|
||||
":8:14: error: unreachable else prong; all cases already handled",
|
||||
});
|
||||
|
||||
case.addError(
|
||||
\\const E = enum { a, b, c };
|
||||
\\export fn foo() void {
|
||||
\\ var x: E = .a;
|
||||
\\ switch (x) {
|
||||
\\ .a => {},
|
||||
\\ .b => {},
|
||||
\\ _ => {},
|
||||
\\ }
|
||||
\\}
|
||||
, &.{
|
||||
":4:5: error: '_' prong only allowed when switching on non-exhaustive enums",
|
||||
":7:11: note: '_' prong here",
|
||||
});
|
||||
|
||||
case.addError(
|
||||
\\const E = enum { a, b, c };
|
||||
\\export fn foo() void {
|
||||
\\ var x = E.d;
|
||||
\\}
|
||||
, &.{
|
||||
":3:14: error: enum 'E' has no member named 'd'",
|
||||
":1:11: note: enum declared here",
|
||||
});
|
||||
|
||||
case.addError(
|
||||
\\const E = enum { a, b, c };
|
||||
\\export fn foo() void {
|
||||
\\ var x: E = .d;
|
||||
\\}
|
||||
, &.{
|
||||
":3:17: error: enum 'E' has no field named 'd'",
|
||||
":1:11: note: enum declared here",
|
||||
});
|
||||
}
|
||||
|
||||
ctx.c("empty start function", linux_x64,
|
||||
\\export fn _start() noreturn {
|
||||
\\ unreachable;
|
||||
@ -489,8 +802,8 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
\\ZIG_EXTERN_C zig_noreturn void _start(void);
|
||||
\\
|
||||
\\zig_noreturn void _start(void) {
|
||||
\\ zig_breakpoint();
|
||||
\\ zig_unreachable();
|
||||
\\ zig_breakpoint();
|
||||
\\ zig_unreachable();
|
||||
\\}
|
||||
\\
|
||||
);
|
||||
|
||||
@ -941,6 +941,32 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
"",
|
||||
);
|
||||
|
||||
// Array access to a global array.
|
||||
case.addCompareOutput(
|
||||
\\const hello = "hello".*;
|
||||
\\export fn _start() noreturn {
|
||||
\\ assert(hello[1] == 'e');
|
||||
\\
|
||||
\\ exit();
|
||||
\\}
|
||||
\\
|
||||
\\pub fn assert(ok: bool) void {
|
||||
\\ if (!ok) unreachable; // assertion failure
|
||||
\\}
|
||||
\\
|
||||
\\fn exit() noreturn {
|
||||
\\ asm volatile ("syscall"
|
||||
\\ :
|
||||
\\ : [number] "{rax}" (231),
|
||||
\\ [arg1] "{rdi}" (0)
|
||||
\\ : "rcx", "r11", "memory"
|
||||
\\ );
|
||||
\\ unreachable;
|
||||
\\}
|
||||
,
|
||||
"",
|
||||
);
|
||||
|
||||
// 64bit set stack
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() noreturn {
|
||||
@ -1022,7 +1048,7 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
"Hello, World!\n",
|
||||
);
|
||||
try case.files.append(.{
|
||||
.src =
|
||||
.src =
|
||||
\\pub fn print() void {
|
||||
\\ asm volatile ("syscall"
|
||||
\\ :
|
||||
@ -1038,11 +1064,61 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
.path = "print.zig",
|
||||
});
|
||||
}
|
||||
{
|
||||
var case = ctx.exe("import private", linux_x64);
|
||||
case.addError(
|
||||
\\export fn _start() noreturn {
|
||||
\\ @import("print.zig").print();
|
||||
\\ exit();
|
||||
\\}
|
||||
\\
|
||||
\\fn exit() noreturn {
|
||||
\\ asm volatile ("syscall"
|
||||
\\ :
|
||||
\\ : [number] "{rax}" (231),
|
||||
\\ [arg1] "{rdi}" (@as(usize, 0))
|
||||
\\ : "rcx", "r11", "memory"
|
||||
\\ );
|
||||
\\ unreachable;
|
||||
\\}
|
||||
,
|
||||
&.{":2:25: error: 'print' is private"},
|
||||
);
|
||||
try case.files.append(.{
|
||||
.src =
|
||||
\\fn print() void {
|
||||
\\ asm volatile ("syscall"
|
||||
\\ :
|
||||
\\ : [number] "{rax}" (@as(usize, 1)),
|
||||
\\ [arg1] "{rdi}" (@as(usize, 1)),
|
||||
\\ [arg2] "{rsi}" (@ptrToInt("Hello, World!\n")),
|
||||
\\ [arg3] "{rdx}" (@as(usize, 14))
|
||||
\\ : "rcx", "r11", "memory"
|
||||
\\ );
|
||||
\\ return;
|
||||
\\}
|
||||
,
|
||||
.path = "print.zig",
|
||||
});
|
||||
}
|
||||
|
||||
ctx.compileError("function redefinition", linux_x64,
|
||||
\\// dummy comment
|
||||
\\fn entry() void {}
|
||||
\\fn entry() void {}
|
||||
, &[_][]const u8{":2:4: error: redefinition of 'entry'"});
|
||||
, &[_][]const u8{
|
||||
":3:4: error: redefinition of 'entry'",
|
||||
":2:1: note: previous definition here",
|
||||
});
|
||||
|
||||
ctx.compileError("global variable redefinition", linux_x64,
|
||||
\\// dummy comment
|
||||
\\var foo = false;
|
||||
\\var foo = true;
|
||||
, &[_][]const u8{
|
||||
":3:5: error: redefinition of 'foo'",
|
||||
":2:1: note: previous definition here",
|
||||
});
|
||||
|
||||
ctx.compileError("compileError", linux_x64,
|
||||
\\export fn _start() noreturn {
|
||||
|
||||
@ -121,6 +121,138 @@ pub fn addCases(ctx: *TestContext) !void {
|
||||
\\ return x + y;
|
||||
\\}
|
||||
, "35\n");
|
||||
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() u32 {
|
||||
\\ var i: u32 = 20;
|
||||
\\ i -= 5;
|
||||
\\ return i;
|
||||
\\}
|
||||
, "15\n");
|
||||
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() u32 {
|
||||
\\ var i: u32 = 5;
|
||||
\\ i -= 3;
|
||||
\\ var result: u32 = foo(i, 10);
|
||||
\\ return result;
|
||||
\\}
|
||||
\\fn foo(x: u32, y: u32) u32 {
|
||||
\\ return y - x;
|
||||
\\}
|
||||
, "8\n");
|
||||
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() u32 {
|
||||
\\ var i: u32 = 5;
|
||||
\\ i *= 7;
|
||||
\\ var result: u32 = foo(i, 10);
|
||||
\\ return result;
|
||||
\\}
|
||||
\\fn foo(x: u32, y: u32) u32 {
|
||||
\\ return x * y;
|
||||
\\}
|
||||
, "350\n");
|
||||
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() u32 {
|
||||
\\ var i: u32 = 352;
|
||||
\\ i /= 7; // i = 50
|
||||
\\ var result: u32 = foo(i, 7);
|
||||
\\ return result;
|
||||
\\}
|
||||
\\fn foo(x: u32, y: u32) u32 {
|
||||
\\ return x / y;
|
||||
\\}
|
||||
, "7\n");
|
||||
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() u32 {
|
||||
\\ var i: u32 = 5;
|
||||
\\ i &= 6;
|
||||
\\ return i;
|
||||
\\}
|
||||
, "4\n");
|
||||
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() u32 {
|
||||
\\ var i: u32 = 5;
|
||||
\\ i |= 6;
|
||||
\\ return i;
|
||||
\\}
|
||||
, "7\n");
|
||||
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() u32 {
|
||||
\\ var i: u32 = 5;
|
||||
\\ i ^= 6;
|
||||
\\ return i;
|
||||
\\}
|
||||
, "3\n");
|
||||
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() bool {
|
||||
\\ var b: bool = false;
|
||||
\\ b = b or false;
|
||||
\\ return b;
|
||||
\\}
|
||||
, "0\n");
|
||||
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() bool {
|
||||
\\ var b: bool = true;
|
||||
\\ b = b or false;
|
||||
\\ return b;
|
||||
\\}
|
||||
, "1\n");
|
||||
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() bool {
|
||||
\\ var b: bool = false;
|
||||
\\ b = b or true;
|
||||
\\ return b;
|
||||
\\}
|
||||
, "1\n");
|
||||
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() bool {
|
||||
\\ var b: bool = true;
|
||||
\\ b = b or true;
|
||||
\\ return b;
|
||||
\\}
|
||||
, "1\n");
|
||||
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() bool {
|
||||
\\ var b: bool = false;
|
||||
\\ b = b and false;
|
||||
\\ return b;
|
||||
\\}
|
||||
, "0\n");
|
||||
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() bool {
|
||||
\\ var b: bool = true;
|
||||
\\ b = b and false;
|
||||
\\ return b;
|
||||
\\}
|
||||
, "0\n");
|
||||
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() bool {
|
||||
\\ var b: bool = false;
|
||||
\\ b = b and true;
|
||||
\\ return b;
|
||||
\\}
|
||||
, "0\n");
|
||||
|
||||
case.addCompareOutput(
|
||||
\\export fn _start() bool {
|
||||
\\ var b: bool = true;
|
||||
\\ b = b and true;
|
||||
\\ return b;
|
||||
\\}
|
||||
, "1\n");
|
||||
}
|
||||
|
||||
{
|
||||
|
||||
133
test/tests.zig
133
test/tests.zig
@ -566,42 +566,87 @@ pub const StackTracesContext = struct {
|
||||
|
||||
const Expect = [@typeInfo(Mode).Enum.fields.len][]const u8;
|
||||
|
||||
pub fn addCase(
|
||||
pub fn addCase(self: *StackTracesContext, config: anytype) void {
|
||||
if (@hasField(@TypeOf(config), "exclude")) {
|
||||
if (config.exclude.exclude()) return;
|
||||
}
|
||||
if (@hasField(@TypeOf(config), "exclude_arch")) {
|
||||
const exclude_arch: []const builtin.Cpu.Arch = &config.exclude_arch;
|
||||
for (exclude_arch) |arch| if (arch == builtin.cpu.arch) return;
|
||||
}
|
||||
if (@hasField(@TypeOf(config), "exclude_os")) {
|
||||
const exclude_os: []const builtin.Os.Tag = &config.exclude_os;
|
||||
for (exclude_os) |os| if (os == builtin.os.tag) return;
|
||||
}
|
||||
for (self.modes) |mode| {
|
||||
switch (mode) {
|
||||
.Debug => {
|
||||
if (@hasField(@TypeOf(config), "Debug")) {
|
||||
self.addExpect(config.name, config.source, mode, config.Debug);
|
||||
}
|
||||
},
|
||||
.ReleaseSafe => {
|
||||
if (@hasField(@TypeOf(config), "ReleaseSafe")) {
|
||||
self.addExpect(config.name, config.source, mode, config.ReleaseSafe);
|
||||
}
|
||||
},
|
||||
.ReleaseFast => {
|
||||
if (@hasField(@TypeOf(config), "ReleaseFast")) {
|
||||
self.addExpect(config.name, config.source, mode, config.ReleaseFast);
|
||||
}
|
||||
},
|
||||
.ReleaseSmall => {
|
||||
if (@hasField(@TypeOf(config), "ReleaseSmall")) {
|
||||
self.addExpect(config.name, config.source, mode, config.ReleaseSmall);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn addExpect(
|
||||
self: *StackTracesContext,
|
||||
name: []const u8,
|
||||
source: []const u8,
|
||||
expect: Expect,
|
||||
mode: Mode,
|
||||
mode_config: anytype,
|
||||
) void {
|
||||
const b = self.b;
|
||||
|
||||
for (self.modes) |mode| {
|
||||
const expect_for_mode = expect[@enumToInt(mode)];
|
||||
if (expect_for_mode.len == 0) continue;
|
||||
|
||||
const annotated_case_name = fmt.allocPrint(self.b.allocator, "{s} {s} ({s})", .{
|
||||
"stack-trace",
|
||||
name,
|
||||
@tagName(mode),
|
||||
}) catch unreachable;
|
||||
if (self.test_filter) |filter| {
|
||||
if (mem.indexOf(u8, annotated_case_name, filter) == null) continue;
|
||||
}
|
||||
|
||||
const src_basename = "source.zig";
|
||||
const write_src = b.addWriteFile(src_basename, source);
|
||||
const exe = b.addExecutableFromWriteFileStep("test", write_src, src_basename);
|
||||
exe.setBuildMode(mode);
|
||||
|
||||
const run_and_compare = RunAndCompareStep.create(
|
||||
self,
|
||||
exe,
|
||||
annotated_case_name,
|
||||
mode,
|
||||
expect_for_mode,
|
||||
);
|
||||
|
||||
self.step.dependOn(&run_and_compare.step);
|
||||
if (@hasField(@TypeOf(mode_config), "exclude")) {
|
||||
if (mode_config.exclude.exclude()) return;
|
||||
}
|
||||
if (@hasField(@TypeOf(mode_config), "exclude_arch")) {
|
||||
const exclude_arch: []const builtin.Cpu.Arch = &mode_config.exclude_arch;
|
||||
for (exclude_arch) |arch| if (arch == builtin.cpu.arch) return;
|
||||
}
|
||||
if (@hasField(@TypeOf(mode_config), "exclude_os")) {
|
||||
const exclude_os: []const builtin.Os.Tag = &mode_config.exclude_os;
|
||||
for (exclude_os) |os| if (os == builtin.os.tag) return;
|
||||
}
|
||||
|
||||
const annotated_case_name = fmt.allocPrint(self.b.allocator, "{s} {s} ({s})", .{
|
||||
"stack-trace",
|
||||
name,
|
||||
@tagName(mode),
|
||||
}) catch unreachable;
|
||||
if (self.test_filter) |filter| {
|
||||
if (mem.indexOf(u8, annotated_case_name, filter) == null) return;
|
||||
}
|
||||
|
||||
const b = self.b;
|
||||
const src_basename = "source.zig";
|
||||
const write_src = b.addWriteFile(src_basename, source);
|
||||
const exe = b.addExecutableFromWriteFileStep("test", write_src, src_basename);
|
||||
exe.setBuildMode(mode);
|
||||
|
||||
const run_and_compare = RunAndCompareStep.create(
|
||||
self,
|
||||
exe,
|
||||
annotated_case_name,
|
||||
mode,
|
||||
mode_config.expect,
|
||||
);
|
||||
|
||||
self.step.dependOn(&run_and_compare.step);
|
||||
}
|
||||
|
||||
const RunAndCompareStep = struct {
|
||||
@ -703,6 +748,7 @@ pub const StackTracesContext = struct {
|
||||
// process result
|
||||
// - keep only basename of source file path
|
||||
// - replace address with symbolic string
|
||||
// - replace function name with symbolic string when mode != .Debug
|
||||
// - skip empty lines
|
||||
const got: []const u8 = got_result: {
|
||||
var buf = ArrayList(u8).init(b.allocator);
|
||||
@ -711,26 +757,45 @@ pub const StackTracesContext = struct {
|
||||
var it = mem.split(stderr, "\n");
|
||||
process_lines: while (it.next()) |line| {
|
||||
if (line.len == 0) continue;
|
||||
const delims = [_][]const u8{ ":", ":", ":", " in " };
|
||||
var marks = [_]usize{0} ** 4;
|
||||
// offset search past `[drive]:` on windows
|
||||
var pos: usize = if (std.Target.current.os.tag == .windows) 2 else 0;
|
||||
// locate delims/anchor
|
||||
const delims = [_][]const u8{ ":", ":", ":", " in ", "(", ")" };
|
||||
var marks = [_]usize{0} ** delims.len;
|
||||
for (delims) |delim, i| {
|
||||
marks[i] = mem.indexOfPos(u8, line, pos, delim) orelse {
|
||||
// unexpected pattern: emit raw line and cont
|
||||
try buf.appendSlice(line);
|
||||
try buf.appendSlice("\n");
|
||||
continue :process_lines;
|
||||
};
|
||||
pos = marks[i] + delim.len;
|
||||
}
|
||||
// locate source basename
|
||||
pos = mem.lastIndexOfScalar(u8, line[0..marks[0]], fs.path.sep) orelse {
|
||||
// unexpected pattern: emit raw line and cont
|
||||
try buf.appendSlice(line);
|
||||
try buf.appendSlice("\n");
|
||||
continue :process_lines;
|
||||
};
|
||||
// end processing if source basename changes
|
||||
if (!mem.eql(u8, "source.zig", line[pos + 1 .. marks[0]])) break;
|
||||
// emit substituted line
|
||||
try buf.appendSlice(line[pos + 1 .. marks[2] + delims[2].len]);
|
||||
try buf.appendSlice(" [address]");
|
||||
try buf.appendSlice(line[marks[3]..]);
|
||||
if (self.mode == .Debug) {
|
||||
if (mem.lastIndexOfScalar(u8, line[marks[4]..marks[5]], '.')) |idot| {
|
||||
// On certain platforms (windows) or possibly depending on how we choose to link main
|
||||
// the object file extension may be present so we simply strip any extension.
|
||||
try buf.appendSlice(line[marks[3] .. marks[4] + idot]);
|
||||
try buf.appendSlice(line[marks[5]..]);
|
||||
} else {
|
||||
try buf.appendSlice(line[marks[3]..]);
|
||||
}
|
||||
} else {
|
||||
try buf.appendSlice(line[marks[3] .. marks[3] + delims[3].len]);
|
||||
try buf.appendSlice("[function]");
|
||||
}
|
||||
try buf.appendSlice("\n");
|
||||
}
|
||||
break :got_result buf.toOwnedSlice();
|
||||
|
||||
@ -2529,6 +2529,14 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
|
||||
\\}
|
||||
});
|
||||
|
||||
cases.add("macro call with no args",
|
||||
\\#define CALL(arg) bar()
|
||||
, &[_][]const u8{
|
||||
\\pub fn CALL(arg: anytype) callconv(.Inline) @TypeOf(bar()) {
|
||||
\\ return bar();
|
||||
\\}
|
||||
});
|
||||
|
||||
cases.add("logical and, logical or",
|
||||
\\int max(int a, int b) {
|
||||
\\ if (a < b || a == b)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user