mirror of
https://github.com/ziglang/zig.git
synced 2026-02-21 16:54:52 +00:00
Merge remote-tracking branch 'origin/master' into llvm7
This commit is contained in:
commit
58ce79f935
14
build.zig
14
build.zig
@ -10,7 +10,7 @@ const ArrayList = std.ArrayList;
|
||||
const Buffer = std.Buffer;
|
||||
const io = std.io;
|
||||
|
||||
pub fn build(b: &Builder) !void {
|
||||
pub fn build(b: *Builder) !void {
|
||||
const mode = b.standardReleaseOptions();
|
||||
|
||||
var docgen_exe = b.addExecutable("docgen", "doc/docgen.zig");
|
||||
@ -132,7 +132,7 @@ pub fn build(b: &Builder) !void {
|
||||
test_step.dependOn(tests.addGenHTests(b, test_filter));
|
||||
}
|
||||
|
||||
fn dependOnLib(lib_exe_obj: &std.build.LibExeObjStep, dep: &const LibraryDep) void {
|
||||
fn dependOnLib(lib_exe_obj: *std.build.LibExeObjStep, dep: *const LibraryDep) void {
|
||||
for (dep.libdirs.toSliceConst()) |lib_dir| {
|
||||
lib_exe_obj.addLibPath(lib_dir);
|
||||
}
|
||||
@ -147,7 +147,7 @@ fn dependOnLib(lib_exe_obj: &std.build.LibExeObjStep, dep: &const LibraryDep) vo
|
||||
}
|
||||
}
|
||||
|
||||
fn addCppLib(b: &Builder, lib_exe_obj: &std.build.LibExeObjStep, cmake_binary_dir: []const u8, lib_name: []const u8) void {
|
||||
fn addCppLib(b: *Builder, lib_exe_obj: *std.build.LibExeObjStep, cmake_binary_dir: []const u8, lib_name: []const u8) void {
|
||||
const lib_prefix = if (lib_exe_obj.target.isWindows()) "" else "lib";
|
||||
lib_exe_obj.addObjectFile(os.path.join(b.allocator, cmake_binary_dir, "zig_cpp", b.fmt("{}{}{}", lib_prefix, lib_name, lib_exe_obj.target.libFileExt())) catch unreachable);
|
||||
}
|
||||
@ -159,7 +159,7 @@ const LibraryDep = struct {
|
||||
includes: ArrayList([]const u8),
|
||||
};
|
||||
|
||||
fn findLLVM(b: &Builder, llvm_config_exe: []const u8) !LibraryDep {
|
||||
fn findLLVM(b: *Builder, llvm_config_exe: []const u8) !LibraryDep {
|
||||
const libs_output = try b.exec([][]const u8{
|
||||
llvm_config_exe,
|
||||
"--libs",
|
||||
@ -217,7 +217,7 @@ fn findLLVM(b: &Builder, llvm_config_exe: []const u8) !LibraryDep {
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn installStdLib(b: &Builder, stdlib_files: []const u8) void {
|
||||
pub fn installStdLib(b: *Builder, stdlib_files: []const u8) void {
|
||||
var it = mem.split(stdlib_files, ";");
|
||||
while (it.next()) |stdlib_file| {
|
||||
const src_path = os.path.join(b.allocator, "std", stdlib_file) catch unreachable;
|
||||
@ -226,7 +226,7 @@ pub fn installStdLib(b: &Builder, stdlib_files: []const u8) void {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn installCHeaders(b: &Builder, c_header_files: []const u8) void {
|
||||
pub fn installCHeaders(b: *Builder, c_header_files: []const u8) void {
|
||||
var it = mem.split(c_header_files, ";");
|
||||
while (it.next()) |c_header_file| {
|
||||
const src_path = os.path.join(b.allocator, "c_headers", c_header_file) catch unreachable;
|
||||
@ -235,7 +235,7 @@ pub fn installCHeaders(b: &Builder, c_header_files: []const u8) void {
|
||||
}
|
||||
}
|
||||
|
||||
fn nextValue(index: &usize, build_info: []const u8) []const u8 {
|
||||
fn nextValue(index: *usize, build_info: []const u8) []const u8 {
|
||||
const start = index.*;
|
||||
while (true) : (index.* += 1) {
|
||||
switch (build_info[index.*]) {
|
||||
|
||||
@ -104,7 +104,7 @@ const Tokenizer = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn next(self: &Tokenizer) Token {
|
||||
fn next(self: *Tokenizer) Token {
|
||||
var result = Token{
|
||||
.id = Token.Id.Eof,
|
||||
.start = self.index,
|
||||
@ -196,7 +196,7 @@ const Tokenizer = struct {
|
||||
line_end: usize,
|
||||
};
|
||||
|
||||
fn getTokenLocation(self: &Tokenizer, token: &const Token) Location {
|
||||
fn getTokenLocation(self: *Tokenizer, token: *const Token) Location {
|
||||
var loc = Location{
|
||||
.line = 0,
|
||||
.column = 0,
|
||||
@ -221,7 +221,7 @@ const Tokenizer = struct {
|
||||
}
|
||||
};
|
||||
|
||||
fn parseError(tokenizer: &Tokenizer, token: &const Token, comptime fmt: []const u8, args: ...) error {
|
||||
fn parseError(tokenizer: *Tokenizer, token: *const Token, comptime fmt: []const u8, args: ...) error {
|
||||
const loc = tokenizer.getTokenLocation(token);
|
||||
warn("{}:{}:{}: error: " ++ fmt ++ "\n", tokenizer.source_file_name, loc.line + 1, loc.column + 1, args);
|
||||
if (loc.line_start <= loc.line_end) {
|
||||
@ -244,13 +244,13 @@ fn parseError(tokenizer: &Tokenizer, token: &const Token, comptime fmt: []const
|
||||
return error.ParseError;
|
||||
}
|
||||
|
||||
fn assertToken(tokenizer: &Tokenizer, token: &const Token, id: Token.Id) !void {
|
||||
fn assertToken(tokenizer: *Tokenizer, token: *const Token, id: Token.Id) !void {
|
||||
if (token.id != id) {
|
||||
return parseError(tokenizer, token, "expected {}, found {}", @tagName(id), @tagName(token.id));
|
||||
}
|
||||
}
|
||||
|
||||
fn eatToken(tokenizer: &Tokenizer, id: Token.Id) !Token {
|
||||
fn eatToken(tokenizer: *Tokenizer, id: Token.Id) !Token {
|
||||
const token = tokenizer.next();
|
||||
try assertToken(tokenizer, token, id);
|
||||
return token;
|
||||
@ -317,7 +317,7 @@ const Action = enum {
|
||||
Close,
|
||||
};
|
||||
|
||||
fn genToc(allocator: &mem.Allocator, tokenizer: &Tokenizer) !Toc {
|
||||
fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
|
||||
var urls = std.HashMap([]const u8, Token, mem.hash_slice_u8, mem.eql_slice_u8).init(allocator);
|
||||
errdefer urls.deinit();
|
||||
|
||||
@ -546,7 +546,7 @@ fn genToc(allocator: &mem.Allocator, tokenizer: &Tokenizer) !Toc {
|
||||
};
|
||||
}
|
||||
|
||||
fn urlize(allocator: &mem.Allocator, input: []const u8) ![]u8 {
|
||||
fn urlize(allocator: *mem.Allocator, input: []const u8) ![]u8 {
|
||||
var buf = try std.Buffer.initSize(allocator, 0);
|
||||
defer buf.deinit();
|
||||
|
||||
@ -566,7 +566,7 @@ fn urlize(allocator: &mem.Allocator, input: []const u8) ![]u8 {
|
||||
return buf.toOwnedSlice();
|
||||
}
|
||||
|
||||
fn escapeHtml(allocator: &mem.Allocator, input: []const u8) ![]u8 {
|
||||
fn escapeHtml(allocator: *mem.Allocator, input: []const u8) ![]u8 {
|
||||
var buf = try std.Buffer.initSize(allocator, 0);
|
||||
defer buf.deinit();
|
||||
|
||||
@ -608,7 +608,7 @@ test "term color" {
|
||||
assert(mem.eql(u8, result, "A<span class=\"t32\">green</span>B"));
|
||||
}
|
||||
|
||||
fn termColor(allocator: &mem.Allocator, input: []const u8) ![]u8 {
|
||||
fn termColor(allocator: *mem.Allocator, input: []const u8) ![]u8 {
|
||||
var buf = try std.Buffer.initSize(allocator, 0);
|
||||
defer buf.deinit();
|
||||
|
||||
@ -688,7 +688,7 @@ fn termColor(allocator: &mem.Allocator, input: []const u8) ![]u8 {
|
||||
return buf.toOwnedSlice();
|
||||
}
|
||||
|
||||
fn genHtml(allocator: &mem.Allocator, tokenizer: &Tokenizer, toc: &Toc, out: var, zig_exe: []const u8) !void {
|
||||
fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var, zig_exe: []const u8) !void {
|
||||
var code_progress_index: usize = 0;
|
||||
for (toc.nodes) |node| {
|
||||
switch (node) {
|
||||
@ -1036,7 +1036,7 @@ fn genHtml(allocator: &mem.Allocator, tokenizer: &Tokenizer, toc: &Toc, out: var
|
||||
}
|
||||
}
|
||||
|
||||
fn exec(allocator: &mem.Allocator, args: []const []const u8) !os.ChildProcess.ExecResult {
|
||||
fn exec(allocator: *mem.Allocator, args: []const []const u8) !os.ChildProcess.ExecResult {
|
||||
const result = try os.ChildProcess.exec(allocator, args, null, null, max_doc_file_size);
|
||||
switch (result.term) {
|
||||
os.ChildProcess.Term.Exited => |exit_code| {
|
||||
|
||||
@ -458,7 +458,7 @@ test "string literals" {
|
||||
|
||||
// A C string literal is a null terminated pointer.
|
||||
const null_terminated_bytes = c"hello";
|
||||
assert(@typeOf(null_terminated_bytes) == &const u8);
|
||||
assert(@typeOf(null_terminated_bytes) == [*]const u8);
|
||||
assert(null_terminated_bytes[5] == 0);
|
||||
}
|
||||
{#code_end#}
|
||||
@ -547,7 +547,7 @@ const c_string_literal =
|
||||
;
|
||||
{#code_end#}
|
||||
<p>
|
||||
In this example the variable <code>c_string_literal</code> has type <code>&const char</code> and
|
||||
In this example the variable <code>c_string_literal</code> has type <code>[*]const char</code> and
|
||||
has a terminating null byte.
|
||||
</p>
|
||||
{#see_also|@embedFile#}
|
||||
@ -1288,7 +1288,7 @@ const assert = @import("std").debug.assert;
|
||||
const mem = @import("std").mem;
|
||||
|
||||
// array literal
|
||||
const message = []u8{'h', 'e', 'l', 'l', 'o'};
|
||||
const message = []u8{ 'h', 'e', 'l', 'l', 'o' };
|
||||
|
||||
// get the size of an array
|
||||
comptime {
|
||||
@ -1324,11 +1324,11 @@ test "modify an array" {
|
||||
|
||||
// array concatenation works if the values are known
|
||||
// at compile time
|
||||
const part_one = []i32{1, 2, 3, 4};
|
||||
const part_two = []i32{5, 6, 7, 8};
|
||||
const part_one = []i32{ 1, 2, 3, 4 };
|
||||
const part_two = []i32{ 5, 6, 7, 8 };
|
||||
const all_of_it = part_one ++ part_two;
|
||||
comptime {
|
||||
assert(mem.eql(i32, all_of_it, []i32{1,2,3,4,5,6,7,8}));
|
||||
assert(mem.eql(i32, all_of_it, []i32{ 1, 2, 3, 4, 5, 6, 7, 8 }));
|
||||
}
|
||||
|
||||
// remember that string literals are arrays
|
||||
@ -1357,7 +1357,7 @@ comptime {
|
||||
var fancy_array = init: {
|
||||
var initial_value: [10]Point = undefined;
|
||||
for (initial_value) |*pt, i| {
|
||||
pt.* = Point {
|
||||
pt.* = Point{
|
||||
.x = i32(i),
|
||||
.y = i32(i) * 2,
|
||||
};
|
||||
@ -1377,7 +1377,7 @@ test "compile-time array initalization" {
|
||||
// call a function to initialize an array
|
||||
var more_points = []Point{makePoint(3)} ** 10;
|
||||
fn makePoint(x: i32) Point {
|
||||
return Point {
|
||||
return Point{
|
||||
.x = x,
|
||||
.y = x * 2,
|
||||
};
|
||||
@ -1403,36 +1403,35 @@ test "address of syntax" {
|
||||
assert(x_ptr.* == 1234);
|
||||
|
||||
// When you get the address of a const variable, you get a const pointer.
|
||||
assert(@typeOf(x_ptr) == &const i32);
|
||||
assert(@typeOf(x_ptr) == *const i32);
|
||||
|
||||
// If you want to mutate the value, you'd need an address of a mutable variable:
|
||||
var y: i32 = 5678;
|
||||
const y_ptr = &y;
|
||||
assert(@typeOf(y_ptr) == &i32);
|
||||
assert(@typeOf(y_ptr) == *i32);
|
||||
y_ptr.* += 1;
|
||||
assert(y_ptr.* == 5679);
|
||||
}
|
||||
|
||||
test "pointer array access" {
|
||||
// Pointers do not support pointer arithmetic. If you
|
||||
// need such a thing, use array index syntax:
|
||||
// Taking an address of an individual element gives a
|
||||
// pointer to a single item. This kind of pointer
|
||||
// does not support pointer arithmetic.
|
||||
|
||||
var array = []u8{1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
|
||||
const ptr = &array[1];
|
||||
const ptr = &array[2];
|
||||
assert(@typeOf(ptr) == *u8);
|
||||
|
||||
assert(array[2] == 3);
|
||||
ptr[1] += 1;
|
||||
ptr.* += 1;
|
||||
assert(array[2] == 4);
|
||||
}
|
||||
|
||||
test "pointer slicing" {
|
||||
// In Zig, we prefer using slices over null-terminated pointers.
|
||||
// You can turn a pointer into a slice using slice syntax:
|
||||
// You can turn an array into a slice using slice syntax:
|
||||
var array = []u8{1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
|
||||
const ptr = &array[1];
|
||||
const slice = ptr[1..3];
|
||||
|
||||
assert(slice.ptr == &ptr[1]);
|
||||
const slice = array[2..4];
|
||||
assert(slice.len == 2);
|
||||
|
||||
// Slices have bounds checking and are therefore protected
|
||||
@ -1455,7 +1454,7 @@ comptime {
|
||||
|
||||
test "@ptrToInt and @intToPtr" {
|
||||
// To convert an integer address into a pointer, use @intToPtr:
|
||||
const ptr = @intToPtr(&i32, 0xdeadbeef);
|
||||
const ptr = @intToPtr(*i32, 0xdeadbeef);
|
||||
|
||||
// To convert a pointer to an integer, use @ptrToInt:
|
||||
const addr = @ptrToInt(ptr);
|
||||
@ -1467,7 +1466,7 @@ test "@ptrToInt and @intToPtr" {
|
||||
comptime {
|
||||
// Zig is able to do this at compile-time, as long as
|
||||
// ptr is never dereferenced.
|
||||
const ptr = @intToPtr(&i32, 0xdeadbeef);
|
||||
const ptr = @intToPtr(*i32, 0xdeadbeef);
|
||||
const addr = @ptrToInt(ptr);
|
||||
assert(@typeOf(addr) == usize);
|
||||
assert(addr == 0xdeadbeef);
|
||||
@ -1477,17 +1476,17 @@ test "volatile" {
|
||||
// In Zig, loads and stores are assumed to not have side effects.
|
||||
// If a given load or store should have side effects, such as
|
||||
// Memory Mapped Input/Output (MMIO), use `volatile`:
|
||||
const mmio_ptr = @intToPtr(&volatile u8, 0x12345678);
|
||||
const mmio_ptr = @intToPtr(*volatile u8, 0x12345678);
|
||||
|
||||
// Now loads and stores with mmio_ptr are guaranteed to all happen
|
||||
// and in the same order as in source code.
|
||||
assert(@typeOf(mmio_ptr) == &volatile u8);
|
||||
assert(@typeOf(mmio_ptr) == *volatile u8);
|
||||
}
|
||||
|
||||
test "nullable pointers" {
|
||||
// Pointers cannot be null. If you want a null pointer, use the nullable
|
||||
// prefix `?` to make the pointer type nullable.
|
||||
var ptr: ?&i32 = null;
|
||||
var ptr: ?*i32 = null;
|
||||
|
||||
var x: i32 = 1;
|
||||
ptr = &x;
|
||||
@ -1496,7 +1495,7 @@ test "nullable pointers" {
|
||||
|
||||
// Nullable pointers are the same size as normal pointers, because pointer
|
||||
// value 0 is used as the null value.
|
||||
assert(@sizeOf(?&i32) == @sizeOf(&i32));
|
||||
assert(@sizeOf(?*i32) == @sizeOf(*i32));
|
||||
}
|
||||
|
||||
test "pointer casting" {
|
||||
@ -1504,7 +1503,7 @@ test "pointer casting" {
|
||||
// operation that Zig cannot protect you against. Use @ptrCast only when other
|
||||
// conversions are not possible.
|
||||
const bytes align(@alignOf(u32)) = []u8{0x12, 0x12, 0x12, 0x12};
|
||||
const u32_ptr = @ptrCast(&const u32, &bytes[0]);
|
||||
const u32_ptr = @ptrCast(*const u32, &bytes[0]);
|
||||
assert(u32_ptr.* == 0x12121212);
|
||||
|
||||
// Even this example is contrived - there are better ways to do the above than
|
||||
@ -1518,7 +1517,7 @@ test "pointer casting" {
|
||||
|
||||
test "pointer child type" {
|
||||
// pointer types have a `child` field which tells you the type they point to.
|
||||
assert((&u32).Child == u32);
|
||||
assert((*u32).Child == u32);
|
||||
}
|
||||
{#code_end#}
|
||||
{#header_open|Alignment#}
|
||||
@ -1543,15 +1542,15 @@ const builtin = @import("builtin");
|
||||
test "variable alignment" {
|
||||
var x: i32 = 1234;
|
||||
const align_of_i32 = @alignOf(@typeOf(x));
|
||||
assert(@typeOf(&x) == &i32);
|
||||
assert(&i32 == &align(align_of_i32) i32);
|
||||
assert(@typeOf(&x) == *i32);
|
||||
assert(*i32 == *align(align_of_i32) i32);
|
||||
if (builtin.arch == builtin.Arch.x86_64) {
|
||||
assert((&i32).alignment == 4);
|
||||
assert((*i32).alignment == 4);
|
||||
}
|
||||
}
|
||||
{#code_end#}
|
||||
<p>In the same way that a <code>&i32</code> can be implicitly cast to a
|
||||
<code>&const i32</code>, a pointer with a larger alignment can be implicitly
|
||||
<p>In the same way that a <code>*i32</code> can be implicitly cast to a
|
||||
<code>*const i32</code>, a pointer with a larger alignment can be implicitly
|
||||
cast to a pointer with a smaller alignment, but not vice versa.
|
||||
</p>
|
||||
<p>
|
||||
@ -1565,7 +1564,7 @@ var foo: u8 align(4) = 100;
|
||||
|
||||
test "global variable alignment" {
|
||||
assert(@typeOf(&foo).alignment == 4);
|
||||
assert(@typeOf(&foo) == &align(4) u8);
|
||||
assert(@typeOf(&foo) == *align(4) u8);
|
||||
const slice = (&foo)[0..1];
|
||||
assert(@typeOf(slice) == []align(4) u8);
|
||||
}
|
||||
@ -1610,7 +1609,7 @@ fn foo(bytes: []u8) u32 {
|
||||
<code>u8</code> can alias any memory.
|
||||
</p>
|
||||
<p>As an example, this code produces undefined behavior:</p>
|
||||
<pre><code class="zig">@ptrCast(&u32, f32(12.34)).*</code></pre>
|
||||
<pre><code class="zig">@ptrCast(*u32, f32(12.34)).*</code></pre>
|
||||
<p>Instead, use {#link|@bitCast#}:
|
||||
<pre><code class="zig">@bitCast(u32, f32(12.34))</code></pre>
|
||||
<p>As an added benefit, the <code>@bitcast</code> version works at compile-time.</p>
|
||||
@ -1622,18 +1621,27 @@ fn foo(bytes: []u8) u32 {
|
||||
const assert = @import("std").debug.assert;
|
||||
|
||||
test "basic slices" {
|
||||
var array = []i32{1, 2, 3, 4};
|
||||
var array = []i32{ 1, 2, 3, 4 };
|
||||
// A slice is a pointer and a length. The difference between an array and
|
||||
// a slice is that the array's length is part of the type and known at
|
||||
// compile-time, whereas the slice's length is known at runtime.
|
||||
// Both can be accessed with the `len` field.
|
||||
const slice = array[0..array.len];
|
||||
assert(slice.ptr == &array[0]);
|
||||
assert(&slice[0] == &array[0]);
|
||||
assert(slice.len == array.len);
|
||||
|
||||
// Using the address-of operator on a slice gives a pointer to a single
|
||||
// item, while using the `ptr` field gives an unknown length pointer.
|
||||
assert(@typeOf(slice.ptr) == [*]i32);
|
||||
assert(@typeOf(&slice[0]) == *i32);
|
||||
assert(@ptrToInt(slice.ptr) == @ptrToInt(&slice[0]));
|
||||
|
||||
// Slices have array bounds checking. If you try to access something out
|
||||
// of bounds, you'll get a safety check failure:
|
||||
slice[10] += 1;
|
||||
|
||||
// Note that `slice.ptr` does not invoke safety checking, while `&slice[0]`
|
||||
// asserts that the slice has len >= 1.
|
||||
}
|
||||
{#code_end#}
|
||||
<p>This is one reason we prefer slices to pointers.</p>
|
||||
@ -1736,7 +1744,7 @@ const Vec3 = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn dot(self: &const Vec3, other: &const Vec3) f32 {
|
||||
pub fn dot(self: *const Vec3, other: *const Vec3) f32 {
|
||||
return self.x * other.x + self.y * other.y + self.z * other.z;
|
||||
}
|
||||
};
|
||||
@ -1768,7 +1776,7 @@ test "struct namespaced variable" {
|
||||
|
||||
// struct field order is determined by the compiler for optimal performance.
|
||||
// however, you can still calculate a struct base pointer given a field pointer:
|
||||
fn setYBasedOnX(x: &f32, y: f32) void {
|
||||
fn setYBasedOnX(x: *f32, y: f32) void {
|
||||
const point = @fieldParentPtr(Point, "x", x);
|
||||
point.y = y;
|
||||
}
|
||||
@ -1786,13 +1794,13 @@ test "field parent pointer" {
|
||||
fn LinkedList(comptime T: type) type {
|
||||
return struct {
|
||||
pub const Node = struct {
|
||||
prev: ?&Node,
|
||||
next: ?&Node,
|
||||
prev: ?*Node,
|
||||
next: ?*Node,
|
||||
data: T,
|
||||
};
|
||||
|
||||
first: ?&Node,
|
||||
last: ?&Node,
|
||||
first: ?*Node,
|
||||
last: ?*Node,
|
||||
len: usize,
|
||||
};
|
||||
}
|
||||
@ -2039,7 +2047,7 @@ const Variant = union(enum) {
|
||||
Int: i32,
|
||||
Bool: bool,
|
||||
|
||||
fn truthy(self: &const Variant) bool {
|
||||
fn truthy(self: *const Variant) bool {
|
||||
return switch (self.*) {
|
||||
Variant.Int => |x_int| x_int != 0,
|
||||
Variant.Bool => |x_bool| x_bool,
|
||||
@ -2786,7 +2794,7 @@ test "pass aggregate type by value to function" {
|
||||
}
|
||||
{#code_end#}
|
||||
<p>
|
||||
Instead, one must use <code>&const</code>. Zig allows implicitly casting something
|
||||
Instead, one must use <code>*const</code>. Zig allows implicitly casting something
|
||||
to a const pointer to it:
|
||||
</p>
|
||||
{#code_begin|test#}
|
||||
@ -2794,7 +2802,7 @@ const Foo = struct {
|
||||
x: i32,
|
||||
};
|
||||
|
||||
fn bar(foo: &const Foo) void {}
|
||||
fn bar(foo: *const Foo) void {}
|
||||
|
||||
test "implicitly cast to const pointer" {
|
||||
bar(Foo {.x = 12,});
|
||||
@ -3208,16 +3216,16 @@ struct Foo *do_a_thing(void) {
|
||||
<p>Zig code</p>
|
||||
{#code_begin|syntax#}
|
||||
// malloc prototype included for reference
|
||||
extern fn malloc(size: size_t) ?&u8;
|
||||
extern fn malloc(size: size_t) ?*u8;
|
||||
|
||||
fn doAThing() ?&Foo {
|
||||
fn doAThing() ?*Foo {
|
||||
const ptr = malloc(1234) ?? return null;
|
||||
// ...
|
||||
}
|
||||
{#code_end#}
|
||||
<p>
|
||||
Here, Zig is at least as convenient, if not more, than C. And, the type of "ptr"
|
||||
is <code>&u8</code> <em>not</em> <code>?&u8</code>. The <code>??</code> operator
|
||||
is <code>*u8</code> <em>not</em> <code>?*u8</code>. The <code>??</code> operator
|
||||
unwrapped the nullable type and therefore <code>ptr</code> is guaranteed to be non-null everywhere
|
||||
it is used in the function.
|
||||
</p>
|
||||
@ -3237,7 +3245,7 @@ fn doAThing() ?&Foo {
|
||||
In Zig you can accomplish the same thing:
|
||||
</p>
|
||||
{#code_begin|syntax#}
|
||||
fn doAThing(nullable_foo: ?&Foo) void {
|
||||
fn doAThing(nullable_foo: ?*Foo) void {
|
||||
// do some stuff
|
||||
|
||||
if (nullable_foo) |foo| {
|
||||
@ -3713,7 +3721,7 @@ fn List(comptime T: type) type {
|
||||
</p>
|
||||
{#code_begin|syntax#}
|
||||
const Node = struct {
|
||||
next: &Node,
|
||||
next: *Node,
|
||||
name: []u8,
|
||||
};
|
||||
{#code_end#}
|
||||
@ -3745,7 +3753,7 @@ pub fn main() void {
|
||||
|
||||
{#code_begin|syntax#}
|
||||
/// Calls print and then flushes the buffer.
|
||||
pub fn printf(self: &OutStream, comptime format: []const u8, args: ...) error!void {
|
||||
pub fn printf(self: *OutStream, comptime format: []const u8, args: ...) error!void {
|
||||
const State = enum {
|
||||
Start,
|
||||
OpenBrace,
|
||||
@ -3817,7 +3825,7 @@ pub fn printf(self: &OutStream, comptime format: []const u8, args: ...) error!vo
|
||||
and emits a function that actually looks like this:
|
||||
</p>
|
||||
{#code_begin|syntax#}
|
||||
pub fn printf(self: &OutStream, arg0: i32, arg1: []const u8) !void {
|
||||
pub fn printf(self: *OutStream, arg0: i32, arg1: []const u8) !void {
|
||||
try self.write("here is a string: '");
|
||||
try self.printValue(arg0);
|
||||
try self.write("' here is a number: ");
|
||||
@ -3831,7 +3839,7 @@ pub fn printf(self: &OutStream, arg0: i32, arg1: []const u8) !void {
|
||||
on the type:
|
||||
</p>
|
||||
{#code_begin|syntax#}
|
||||
pub fn printValue(self: &OutStream, value: var) !void {
|
||||
pub fn printValue(self: *OutStream, value: var) !void {
|
||||
const T = @typeOf(value);
|
||||
if (@isInteger(T)) {
|
||||
return self.printInt(T, value);
|
||||
@ -3911,7 +3919,7 @@ pub fn main() void {
|
||||
at compile time.
|
||||
</p>
|
||||
{#header_open|@addWithOverflow#}
|
||||
<pre><code class="zig">@addWithOverflow(comptime T: type, a: T, b: T, result: &T) -> bool</code></pre>
|
||||
<pre><code class="zig">@addWithOverflow(comptime T: type, a: T, b: T, result: *T) bool</code></pre>
|
||||
<p>
|
||||
Performs <code>result.* = a + b</code>. If overflow or underflow occurs,
|
||||
stores the overflowed bits in <code>result</code> and returns <code>true</code>.
|
||||
@ -3919,7 +3927,7 @@ pub fn main() void {
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@ArgType#}
|
||||
<pre><code class="zig">@ArgType(comptime T: type, comptime n: usize) -> type</code></pre>
|
||||
<pre><code class="zig">@ArgType(comptime T: type, comptime n: usize) type</code></pre>
|
||||
<p>
|
||||
This builtin function takes a function type and returns the type of the parameter at index <code>n</code>.
|
||||
</p>
|
||||
@ -3931,7 +3939,7 @@ pub fn main() void {
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@atomicLoad#}
|
||||
<pre><code class="zig">@atomicLoad(comptime T: type, ptr: &const T, comptime ordering: builtin.AtomicOrder) -> T</code></pre>
|
||||
<pre><code class="zig">@atomicLoad(comptime T: type, ptr: *const T, comptime ordering: builtin.AtomicOrder) T</code></pre>
|
||||
<p>
|
||||
This builtin function atomically dereferences a pointer and returns the value.
|
||||
</p>
|
||||
@ -3950,7 +3958,7 @@ pub fn main() void {
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@atomicRmw#}
|
||||
<pre><code class="zig">@atomicRmw(comptime T: type, ptr: &T, comptime op: builtin.AtomicRmwOp, operand: T, comptime ordering: builtin.AtomicOrder) -> T</code></pre>
|
||||
<pre><code class="zig">@atomicRmw(comptime T: type, ptr: *T, comptime op: builtin.AtomicRmwOp, operand: T, comptime ordering: builtin.AtomicOrder) T</code></pre>
|
||||
<p>
|
||||
This builtin function atomically modifies memory and then returns the previous value.
|
||||
</p>
|
||||
@ -3969,7 +3977,7 @@ pub fn main() void {
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@bitCast#}
|
||||
<pre><code class="zig">@bitCast(comptime DestType: type, value: var) -> DestType</code></pre>
|
||||
<pre><code class="zig">@bitCast(comptime DestType: type, value: var) DestType</code></pre>
|
||||
<p>
|
||||
Converts a value of one type to another type.
|
||||
</p>
|
||||
@ -4002,9 +4010,9 @@ pub fn main() void {
|
||||
|
||||
{#header_close#}
|
||||
{#header_open|@alignCast#}
|
||||
<pre><code class="zig">@alignCast(comptime alignment: u29, ptr: var) -> var</code></pre>
|
||||
<pre><code class="zig">@alignCast(comptime alignment: u29, ptr: var) var</code></pre>
|
||||
<p>
|
||||
<code>ptr</code> can be <code>&T</code>, <code>fn()</code>, <code>?&T</code>,
|
||||
<code>ptr</code> can be <code>*T</code>, <code>fn()</code>, <code>?*T</code>,
|
||||
<code>?fn()</code>, or <code>[]T</code>. It returns the same type as <code>ptr</code>
|
||||
except with the alignment adjusted to the new value.
|
||||
</p>
|
||||
@ -4013,7 +4021,7 @@ pub fn main() void {
|
||||
|
||||
{#header_close#}
|
||||
{#header_open|@alignOf#}
|
||||
<pre><code class="zig">@alignOf(comptime T: type) -> (number literal)</code></pre>
|
||||
<pre><code class="zig">@alignOf(comptime T: type) (number literal)</code></pre>
|
||||
<p>
|
||||
This function returns the number of bytes that this type should be aligned to
|
||||
for the current target to match the C ABI. When the child type of a pointer has
|
||||
@ -4021,7 +4029,7 @@ pub fn main() void {
|
||||
</p>
|
||||
<pre><code class="zig">const assert = @import("std").debug.assert;
|
||||
comptime {
|
||||
assert(&u32 == &align(@alignOf(u32)) u32);
|
||||
assert(*u32 == *align(@alignOf(u32)) u32);
|
||||
}</code></pre>
|
||||
<p>
|
||||
The result is a target-specific compile time constant. It is guaranteed to be
|
||||
@ -4049,7 +4057,7 @@ comptime {
|
||||
{#see_also|Import from C Header File|@cInclude|@cImport|@cUndef|void#}
|
||||
{#header_close#}
|
||||
{#header_open|@cImport#}
|
||||
<pre><code class="zig">@cImport(expression) -> (namespace)</code></pre>
|
||||
<pre><code class="zig">@cImport(expression) (namespace)</code></pre>
|
||||
<p>
|
||||
This function parses C code and imports the functions, types, variables, and
|
||||
compatible macro definitions into the result namespace.
|
||||
@ -4095,13 +4103,13 @@ comptime {
|
||||
{#see_also|Import from C Header File|@cImport|@cDefine|@cInclude#}
|
||||
{#header_close#}
|
||||
{#header_open|@canImplicitCast#}
|
||||
<pre><code class="zig">@canImplicitCast(comptime T: type, value) -> bool</code></pre>
|
||||
<pre><code class="zig">@canImplicitCast(comptime T: type, value) bool</code></pre>
|
||||
<p>
|
||||
Returns whether a value can be implicitly casted to a given type.
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@clz#}
|
||||
<pre><code class="zig">@clz(x: T) -> U</code></pre>
|
||||
<pre><code class="zig">@clz(x: T) U</code></pre>
|
||||
<p>
|
||||
This function counts the number of leading zeroes in <code>x</code> which is an integer
|
||||
type <code>T</code>.
|
||||
@ -4116,13 +4124,13 @@ comptime {
|
||||
|
||||
{#header_close#}
|
||||
{#header_open|@cmpxchgStrong#}
|
||||
<pre><code class="zig">@cmpxchgStrong(comptime T: type, ptr: &T, expected_value: T, new_value: T, success_order: AtomicOrder, fail_order: AtomicOrder) -> ?T</code></pre>
|
||||
<pre><code class="zig">@cmpxchgStrong(comptime T: type, ptr: *T, expected_value: T, new_value: T, success_order: AtomicOrder, fail_order: AtomicOrder) ?T</code></pre>
|
||||
<p>
|
||||
This function performs a strong atomic compare exchange operation. It's the equivalent of this code,
|
||||
except atomic:
|
||||
</p>
|
||||
{#code_begin|syntax#}
|
||||
fn cmpxchgStrongButNotAtomic(comptime T: type, ptr: &T, expected_value: T, new_value: T) ?T {
|
||||
fn cmpxchgStrongButNotAtomic(comptime T: type, ptr: *T, expected_value: T, new_value: T) ?T {
|
||||
const old_value = ptr.*;
|
||||
if (old_value == expected_value) {
|
||||
ptr.* = new_value;
|
||||
@ -4143,13 +4151,13 @@ fn cmpxchgStrongButNotAtomic(comptime T: type, ptr: &T, expected_value: T, new_v
|
||||
{#see_also|Compile Variables|cmpxchgWeak#}
|
||||
{#header_close#}
|
||||
{#header_open|@cmpxchgWeak#}
|
||||
<pre><code class="zig">@cmpxchgWeak(comptime T: type, ptr: &T, expected_value: T, new_value: T, success_order: AtomicOrder, fail_order: AtomicOrder) -> ?T</code></pre>
|
||||
<pre><code class="zig">@cmpxchgWeak(comptime T: type, ptr: *T, expected_value: T, new_value: T, success_order: AtomicOrder, fail_order: AtomicOrder) ?T</code></pre>
|
||||
<p>
|
||||
This function performs a weak atomic compare exchange operation. It's the equivalent of this code,
|
||||
except atomic:
|
||||
</p>
|
||||
{#code_begin|syntax#}
|
||||
fn cmpxchgWeakButNotAtomic(comptime T: type, ptr: &T, expected_value: T, new_value: T) ?T {
|
||||
fn cmpxchgWeakButNotAtomic(comptime T: type, ptr: *T, expected_value: T, new_value: T) ?T {
|
||||
const old_value = ptr.*;
|
||||
if (old_value == expected_value and usuallyTrueButSometimesFalse()) {
|
||||
ptr.* = new_value;
|
||||
@ -4237,7 +4245,7 @@ test "main" {
|
||||
{#code_end#}
|
||||
{#header_close#}
|
||||
{#header_open|@ctz#}
|
||||
<pre><code class="zig">@ctz(x: T) -> U</code></pre>
|
||||
<pre><code class="zig">@ctz(x: T) U</code></pre>
|
||||
<p>
|
||||
This function counts the number of trailing zeroes in <code>x</code> which is an integer
|
||||
type <code>T</code>.
|
||||
@ -4251,7 +4259,7 @@ test "main" {
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@divExact#}
|
||||
<pre><code class="zig">@divExact(numerator: T, denominator: T) -> T</code></pre>
|
||||
<pre><code class="zig">@divExact(numerator: T, denominator: T) T</code></pre>
|
||||
<p>
|
||||
Exact division. Caller guarantees <code>denominator != 0</code> and
|
||||
<code>@divTrunc(numerator, denominator) * denominator == numerator</code>.
|
||||
@ -4264,7 +4272,7 @@ test "main" {
|
||||
{#see_also|@divTrunc|@divFloor#}
|
||||
{#header_close#}
|
||||
{#header_open|@divFloor#}
|
||||
<pre><code class="zig">@divFloor(numerator: T, denominator: T) -> T</code></pre>
|
||||
<pre><code class="zig">@divFloor(numerator: T, denominator: T) T</code></pre>
|
||||
<p>
|
||||
Floored division. Rounds toward negative infinity. For unsigned integers it is
|
||||
the same as <code>numerator / denominator</code>. Caller guarantees <code>denominator != 0</code> and
|
||||
@ -4278,7 +4286,7 @@ test "main" {
|
||||
{#see_also|@divTrunc|@divExact#}
|
||||
{#header_close#}
|
||||
{#header_open|@divTrunc#}
|
||||
<pre><code class="zig">@divTrunc(numerator: T, denominator: T) -> T</code></pre>
|
||||
<pre><code class="zig">@divTrunc(numerator: T, denominator: T) T</code></pre>
|
||||
<p>
|
||||
Truncated division. Rounds toward zero. For unsigned integers it is
|
||||
the same as <code>numerator / denominator</code>. Caller guarantees <code>denominator != 0</code> and
|
||||
@ -4292,7 +4300,7 @@ test "main" {
|
||||
{#see_also|@divFloor|@divExact#}
|
||||
{#header_close#}
|
||||
{#header_open|@embedFile#}
|
||||
<pre><code class="zig">@embedFile(comptime path: []const u8) -> [X]u8</code></pre>
|
||||
<pre><code class="zig">@embedFile(comptime path: []const u8) [X]u8</code></pre>
|
||||
<p>
|
||||
This function returns a compile time constant fixed-size array with length
|
||||
equal to the byte count of the file given by <code>path</code>. The contents of the array
|
||||
@ -4304,19 +4312,19 @@ test "main" {
|
||||
{#see_also|@import#}
|
||||
{#header_close#}
|
||||
{#header_open|@export#}
|
||||
<pre><code class="zig">@export(comptime name: []const u8, target: var, linkage: builtin.GlobalLinkage) -> []const u8</code></pre>
|
||||
<pre><code class="zig">@export(comptime name: []const u8, target: var, linkage: builtin.GlobalLinkage) []const u8</code></pre>
|
||||
<p>
|
||||
Creates a symbol in the output object file.
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@tagName#}
|
||||
<pre><code class="zig">@tagName(value: var) -> []const u8</code></pre>
|
||||
<pre><code class="zig">@tagName(value: var) []const u8</code></pre>
|
||||
<p>
|
||||
Converts an enum value or union value to a slice of bytes representing the name.
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@TagType#}
|
||||
<pre><code class="zig">@TagType(T: type) -> type</code></pre>
|
||||
<pre><code class="zig">@TagType(T: type) type</code></pre>
|
||||
<p>
|
||||
For an enum, returns the integer type that is used to store the enumeration value.
|
||||
</p>
|
||||
@ -4325,7 +4333,7 @@ test "main" {
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@errorName#}
|
||||
<pre><code class="zig">@errorName(err: error) -> []u8</code></pre>
|
||||
<pre><code class="zig">@errorName(err: error) []u8</code></pre>
|
||||
<p>
|
||||
This function returns the string representation of an error. If an error
|
||||
declaration is:
|
||||
@ -4341,7 +4349,7 @@ test "main" {
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@errorReturnTrace#}
|
||||
<pre><code class="zig">@errorReturnTrace() -> ?&builtin.StackTrace</code></pre>
|
||||
<pre><code class="zig">@errorReturnTrace() ?*builtin.StackTrace</code></pre>
|
||||
<p>
|
||||
If the binary is built with error return tracing, and this function is invoked in a
|
||||
function that calls a function with an error or error union return type, returns a
|
||||
@ -4360,7 +4368,7 @@ test "main" {
|
||||
{#header_close#}
|
||||
{#header_open|@fieldParentPtr#}
|
||||
<pre><code class="zig">@fieldParentPtr(comptime ParentType: type, comptime field_name: []const u8,
|
||||
field_ptr: &T) -> &ParentType</code></pre>
|
||||
field_ptr: *T) *ParentType</code></pre>
|
||||
<p>
|
||||
Given a pointer to a field, returns the base pointer of a struct.
|
||||
</p>
|
||||
@ -4380,7 +4388,7 @@ test "main" {
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@import#}
|
||||
<pre><code class="zig">@import(comptime path: []u8) -> (namespace)</code></pre>
|
||||
<pre><code class="zig">@import(comptime path: []u8) (namespace)</code></pre>
|
||||
<p>
|
||||
This function finds a zig file corresponding to <code>path</code> and imports all the
|
||||
public top level declarations into the resulting namespace.
|
||||
@ -4400,7 +4408,7 @@ test "main" {
|
||||
{#see_also|Compile Variables|@embedFile#}
|
||||
{#header_close#}
|
||||
{#header_open|@inlineCall#}
|
||||
<pre><code class="zig">@inlineCall(function: X, args: ...) -> Y</code></pre>
|
||||
<pre><code class="zig">@inlineCall(function: X, args: ...) Y</code></pre>
|
||||
<p>
|
||||
This calls a function, in the same way that invoking an expression with parentheses does:
|
||||
</p>
|
||||
@ -4420,19 +4428,19 @@ fn add(a: i32, b: i32) i32 { return a + b; }
|
||||
{#see_also|@noInlineCall#}
|
||||
{#header_close#}
|
||||
{#header_open|@intToPtr#}
|
||||
<pre><code class="zig">@intToPtr(comptime DestType: type, int: usize) -> DestType</code></pre>
|
||||
<pre><code class="zig">@intToPtr(comptime DestType: type, int: usize) DestType</code></pre>
|
||||
<p>
|
||||
Converts an integer to a pointer. To convert the other way, use {#link|@ptrToInt#}.
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@IntType#}
|
||||
<pre><code class="zig">@IntType(comptime is_signed: bool, comptime bit_count: u8) -> type</code></pre>
|
||||
<pre><code class="zig">@IntType(comptime is_signed: bool, comptime bit_count: u8) type</code></pre>
|
||||
<p>
|
||||
This function returns an integer type with the given signness and bit count.
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@maxValue#}
|
||||
<pre><code class="zig">@maxValue(comptime T: type) -> (number literal)</code></pre>
|
||||
<pre><code class="zig">@maxValue(comptime T: type) (number literal)</code></pre>
|
||||
<p>
|
||||
This function returns the maximum value of the integer type <code>T</code>.
|
||||
</p>
|
||||
@ -4441,7 +4449,7 @@ fn add(a: i32, b: i32) i32 { return a + b; }
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@memberCount#}
|
||||
<pre><code class="zig">@memberCount(comptime T: type) -> (number literal)</code></pre>
|
||||
<pre><code class="zig">@memberCount(comptime T: type) (number literal)</code></pre>
|
||||
<p>
|
||||
This function returns the number of members in a struct, enum, or union type.
|
||||
</p>
|
||||
@ -4453,7 +4461,7 @@ fn add(a: i32, b: i32) i32 { return a + b; }
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@memberName#}
|
||||
<pre><code class="zig">@memberName(comptime T: type, comptime index: usize) -> [N]u8</code></pre>
|
||||
<pre><code class="zig">@memberName(comptime T: type, comptime index: usize) [N]u8</code></pre>
|
||||
<p>Returns the field name of a struct, union, or enum.</p>
|
||||
<p>
|
||||
The result is a compile time constant.
|
||||
@ -4463,15 +4471,15 @@ fn add(a: i32, b: i32) i32 { return a + b; }
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@field#}
|
||||
<pre><code class="zig">@field(lhs: var, comptime field_name: []const u8) -> (field)</code></pre>
|
||||
<pre><code class="zig">@field(lhs: var, comptime field_name: []const u8) (field)</code></pre>
|
||||
<p>Preforms field access equivalent to <code>lhs.->field_name-<</code>.</p>
|
||||
{#header_close#}
|
||||
{#header_open|@memberType#}
|
||||
<pre><code class="zig">@memberType(comptime T: type, comptime index: usize) -> type</code></pre>
|
||||
<pre><code class="zig">@memberType(comptime T: type, comptime index: usize) type</code></pre>
|
||||
<p>Returns the field type of a struct or union.</p>
|
||||
{#header_close#}
|
||||
{#header_open|@memcpy#}
|
||||
<pre><code class="zig">@memcpy(noalias dest: &u8, noalias source: &const u8, byte_count: usize)</code></pre>
|
||||
<pre><code class="zig">@memcpy(noalias dest: *u8, noalias source: *const u8, byte_count: usize)</code></pre>
|
||||
<p>
|
||||
This function copies bytes from one region of memory to another. <code>dest</code> and
|
||||
<code>source</code> are both pointers and must not overlap.
|
||||
@ -4489,7 +4497,7 @@ fn add(a: i32, b: i32) i32 { return a + b; }
|
||||
mem.copy(u8, dest[0...byte_count], source[0...byte_count]);</code></pre>
|
||||
{#header_close#}
|
||||
{#header_open|@memset#}
|
||||
<pre><code class="zig">@memset(dest: &u8, c: u8, byte_count: usize)</code></pre>
|
||||
<pre><code class="zig">@memset(dest: *u8, c: u8, byte_count: usize)</code></pre>
|
||||
<p>
|
||||
This function sets a region of memory to <code>c</code>. <code>dest</code> is a pointer.
|
||||
</p>
|
||||
@ -4506,7 +4514,7 @@ mem.copy(u8, dest[0...byte_count], source[0...byte_count]);</code></pre>
|
||||
mem.set(u8, dest, c);</code></pre>
|
||||
{#header_close#}
|
||||
{#header_open|@minValue#}
|
||||
<pre><code class="zig">@minValue(comptime T: type) -> (number literal)</code></pre>
|
||||
<pre><code class="zig">@minValue(comptime T: type) (number literal)</code></pre>
|
||||
<p>
|
||||
This function returns the minimum value of the integer type T.
|
||||
</p>
|
||||
@ -4515,7 +4523,7 @@ mem.set(u8, dest, c);</code></pre>
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@mod#}
|
||||
<pre><code class="zig">@mod(numerator: T, denominator: T) -> T</code></pre>
|
||||
<pre><code class="zig">@mod(numerator: T, denominator: T) T</code></pre>
|
||||
<p>
|
||||
Modulus division. For unsigned integers this is the same as
|
||||
<code>numerator % denominator</code>. Caller guarantees <code>denominator > 0</code>.
|
||||
@ -4528,7 +4536,7 @@ mem.set(u8, dest, c);</code></pre>
|
||||
{#see_also|@rem#}
|
||||
{#header_close#}
|
||||
{#header_open|@mulWithOverflow#}
|
||||
<pre><code class="zig">@mulWithOverflow(comptime T: type, a: T, b: T, result: &T) -> bool</code></pre>
|
||||
<pre><code class="zig">@mulWithOverflow(comptime T: type, a: T, b: T, result: *T) bool</code></pre>
|
||||
<p>
|
||||
Performs <code>result.* = a * b</code>. If overflow or underflow occurs,
|
||||
stores the overflowed bits in <code>result</code> and returns <code>true</code>.
|
||||
@ -4536,7 +4544,7 @@ mem.set(u8, dest, c);</code></pre>
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@newStackCall#}
|
||||
<pre><code class="zig">@newStackCall(new_stack: []u8, function: var, args: ...) -> var</code></pre>
|
||||
<pre><code class="zig">@newStackCall(new_stack: []u8, function: var, args: ...) var</code></pre>
|
||||
<p>
|
||||
This calls a function, in the same way that invoking an expression with parentheses does. However,
|
||||
instead of using the same stack as the caller, the function uses the stack provided in the <code>new_stack</code>
|
||||
@ -4572,7 +4580,7 @@ fn targetFunction(x: i32) usize {
|
||||
{#code_end#}
|
||||
{#header_close#}
|
||||
{#header_open|@noInlineCall#}
|
||||
<pre><code class="zig">@noInlineCall(function: var, args: ...) -> var</code></pre>
|
||||
<pre><code class="zig">@noInlineCall(function: var, args: ...) var</code></pre>
|
||||
<p>
|
||||
This calls a function, in the same way that invoking an expression with parentheses does:
|
||||
</p>
|
||||
@ -4594,13 +4602,13 @@ fn add(a: i32, b: i32) i32 {
|
||||
{#see_also|@inlineCall#}
|
||||
{#header_close#}
|
||||
{#header_open|@offsetOf#}
|
||||
<pre><code class="zig">@offsetOf(comptime T: type, comptime field_name: [] const u8) -> (number literal)</code></pre>
|
||||
<pre><code class="zig">@offsetOf(comptime T: type, comptime field_name: [] const u8) (number literal)</code></pre>
|
||||
<p>
|
||||
This function returns the byte offset of a field relative to its containing struct.
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@OpaqueType#}
|
||||
<pre><code class="zig">@OpaqueType() -> type</code></pre>
|
||||
<pre><code class="zig">@OpaqueType() type</code></pre>
|
||||
<p>
|
||||
Creates a new type with an unknown size and alignment.
|
||||
</p>
|
||||
@ -4608,12 +4616,12 @@ fn add(a: i32, b: i32) i32 {
|
||||
This is typically used for type safety when interacting with C code that does not expose struct details.
|
||||
Example:
|
||||
</p>
|
||||
{#code_begin|test_err|expected type '&Derp', found '&Wat'#}
|
||||
{#code_begin|test_err|expected type '*Derp', found '*Wat'#}
|
||||
const Derp = @OpaqueType();
|
||||
const Wat = @OpaqueType();
|
||||
|
||||
extern fn bar(d: &Derp) void;
|
||||
export fn foo(w: &Wat) void {
|
||||
extern fn bar(d: *Derp) void;
|
||||
export fn foo(w: *Wat) void {
|
||||
bar(w);
|
||||
}
|
||||
|
||||
@ -4623,7 +4631,7 @@ test "call foo" {
|
||||
{#code_end#}
|
||||
{#header_close#}
|
||||
{#header_open|@panic#}
|
||||
<pre><code class="zig">@panic(message: []const u8) -> noreturn</code></pre>
|
||||
<pre><code class="zig">@panic(message: []const u8) noreturn</code></pre>
|
||||
<p>
|
||||
Invokes the panic handler function. By default the panic handler function
|
||||
calls the public <code>panic</code> function exposed in the root source file, or
|
||||
@ -4639,19 +4647,19 @@ test "call foo" {
|
||||
{#see_also|Root Source File#}
|
||||
{#header_close#}
|
||||
{#header_open|@ptrCast#}
|
||||
<pre><code class="zig">@ptrCast(comptime DestType: type, value: var) -> DestType</code></pre>
|
||||
<pre><code class="zig">@ptrCast(comptime DestType: type, value: var) DestType</code></pre>
|
||||
<p>
|
||||
Converts a pointer of one type to a pointer of another type.
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@ptrToInt#}
|
||||
<pre><code class="zig">@ptrToInt(value: var) -> usize</code></pre>
|
||||
<pre><code class="zig">@ptrToInt(value: var) usize</code></pre>
|
||||
<p>
|
||||
Converts <code>value</code> to a <code>usize</code> which is the address of the pointer. <code>value</code> can be one of these types:
|
||||
</p>
|
||||
<ul>
|
||||
<li><code>&T</code></li>
|
||||
<li><code>?&T</code></li>
|
||||
<li><code>*T</code></li>
|
||||
<li><code>?*T</code></li>
|
||||
<li><code>fn()</code></li>
|
||||
<li><code>?fn()</code></li>
|
||||
</ul>
|
||||
@ -4659,7 +4667,7 @@ test "call foo" {
|
||||
|
||||
{#header_close#}
|
||||
{#header_open|@rem#}
|
||||
<pre><code class="zig">@rem(numerator: T, denominator: T) -> T</code></pre>
|
||||
<pre><code class="zig">@rem(numerator: T, denominator: T) T</code></pre>
|
||||
<p>
|
||||
Remainder division. For unsigned integers this is the same as
|
||||
<code>numerator % denominator</code>. Caller guarantees <code>denominator > 0</code>.
|
||||
@ -4776,13 +4784,13 @@ pub const FloatMode = enum {
|
||||
{#see_also|Compile Variables#}
|
||||
{#header_close#}
|
||||
{#header_open|@setGlobalSection#}
|
||||
<pre><code class="zig">@setGlobalSection(global_variable_name, comptime section_name: []const u8) -> bool</code></pre>
|
||||
<pre><code class="zig">@setGlobalSection(global_variable_name, comptime section_name: []const u8) bool</code></pre>
|
||||
<p>
|
||||
Puts the global variable in the specified section.
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@shlExact#}
|
||||
<pre><code class="zig">@shlExact(value: T, shift_amt: Log2T) -> T</code></pre>
|
||||
<pre><code class="zig">@shlExact(value: T, shift_amt: Log2T) T</code></pre>
|
||||
<p>
|
||||
Performs the left shift operation (<code><<</code>). Caller guarantees
|
||||
that the shift will not shift any 1 bits out.
|
||||
@ -4794,7 +4802,7 @@ pub const FloatMode = enum {
|
||||
{#see_also|@shrExact|@shlWithOverflow#}
|
||||
{#header_close#}
|
||||
{#header_open|@shlWithOverflow#}
|
||||
<pre><code class="zig">@shlWithOverflow(comptime T: type, a: T, shift_amt: Log2T, result: &T) -> bool</code></pre>
|
||||
<pre><code class="zig">@shlWithOverflow(comptime T: type, a: T, shift_amt: Log2T, result: *T) bool</code></pre>
|
||||
<p>
|
||||
Performs <code>result.* = a << b</code>. If overflow or underflow occurs,
|
||||
stores the overflowed bits in <code>result</code> and returns <code>true</code>.
|
||||
@ -4807,7 +4815,7 @@ pub const FloatMode = enum {
|
||||
{#see_also|@shlExact|@shrExact#}
|
||||
{#header_close#}
|
||||
{#header_open|@shrExact#}
|
||||
<pre><code class="zig">@shrExact(value: T, shift_amt: Log2T) -> T</code></pre>
|
||||
<pre><code class="zig">@shrExact(value: T, shift_amt: Log2T) T</code></pre>
|
||||
<p>
|
||||
Performs the right shift operation (<code>>></code>). Caller guarantees
|
||||
that the shift will not shift any 1 bits out.
|
||||
@ -4819,7 +4827,7 @@ pub const FloatMode = enum {
|
||||
{#see_also|@shlExact|@shlWithOverflow#}
|
||||
{#header_close#}
|
||||
{#header_open|@sizeOf#}
|
||||
<pre><code class="zig">@sizeOf(comptime T: type) -> (number literal)</code></pre>
|
||||
<pre><code class="zig">@sizeOf(comptime T: type) (number literal)</code></pre>
|
||||
<p>
|
||||
This function returns the number of bytes it takes to store <code>T</code> in memory.
|
||||
</p>
|
||||
@ -4828,7 +4836,7 @@ pub const FloatMode = enum {
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@sqrt#}
|
||||
<pre><code class="zig">@sqrt(comptime T: type, value: T) -> T</code></pre>
|
||||
<pre><code class="zig">@sqrt(comptime T: type, value: T) T</code></pre>
|
||||
<p>
|
||||
Performs the square root of a floating point number. Uses a dedicated hardware instruction
|
||||
when available. Currently only supports f32 and f64 at runtime. f128 at runtime is TODO.
|
||||
@ -4838,7 +4846,7 @@ pub const FloatMode = enum {
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@subWithOverflow#}
|
||||
<pre><code class="zig">@subWithOverflow(comptime T: type, a: T, b: T, result: &T) -> bool</code></pre>
|
||||
<pre><code class="zig">@subWithOverflow(comptime T: type, a: T, b: T, result: *T) bool</code></pre>
|
||||
<p>
|
||||
Performs <code>result.* = a - b</code>. If overflow or underflow occurs,
|
||||
stores the overflowed bits in <code>result</code> and returns <code>true</code>.
|
||||
@ -4846,7 +4854,7 @@ pub const FloatMode = enum {
|
||||
</p>
|
||||
{#header_close#}
|
||||
{#header_open|@truncate#}
|
||||
<pre><code class="zig">@truncate(comptime T: type, integer) -> T</code></pre>
|
||||
<pre><code class="zig">@truncate(comptime T: type, integer) T</code></pre>
|
||||
<p>
|
||||
This function truncates bits from an integer type, resulting in a smaller
|
||||
integer type.
|
||||
@ -4870,7 +4878,7 @@ const b: u8 = @truncate(u8, a);
|
||||
|
||||
{#header_close#}
|
||||
{#header_open|@typeId#}
|
||||
<pre><code class="zig">@typeId(comptime T: type) -> @import("builtin").TypeId</code></pre>
|
||||
<pre><code class="zig">@typeId(comptime T: type) @import("builtin").TypeId</code></pre>
|
||||
<p>
|
||||
Returns which kind of type something is. Possible values:
|
||||
</p>
|
||||
@ -4904,7 +4912,7 @@ pub const TypeId = enum {
|
||||
{#code_end#}
|
||||
{#header_close#}
|
||||
{#header_open|@typeInfo#}
|
||||
<pre><code class="zig">@typeInfo(comptime T: type) -> @import("builtin").TypeInfo</code></pre>
|
||||
<pre><code class="zig">@typeInfo(comptime T: type) @import("builtin").TypeInfo</code></pre>
|
||||
<p>
|
||||
Returns information on the type. Returns a value of the following union:
|
||||
</p>
|
||||
@ -5080,14 +5088,14 @@ pub const TypeInfo = union(TypeId) {
|
||||
{#code_end#}
|
||||
{#header_close#}
|
||||
{#header_open|@typeName#}
|
||||
<pre><code class="zig">@typeName(T: type) -> []u8</code></pre>
|
||||
<pre><code class="zig">@typeName(T: type) []u8</code></pre>
|
||||
<p>
|
||||
This function returns the string representation of a type.
|
||||
</p>
|
||||
|
||||
{#header_close#}
|
||||
{#header_open|@typeOf#}
|
||||
<pre><code class="zig">@typeOf(expression) -> type</code></pre>
|
||||
<pre><code class="zig">@typeOf(expression) type</code></pre>
|
||||
<p>
|
||||
This function returns a compile-time constant, which is the type of the
|
||||
expression passed as an argument. The expression is evaluated.
|
||||
@ -5937,7 +5945,7 @@ pub const __zig_test_fn_slice = {}; // overwritten later
|
||||
{#header_open|C String Literals#}
|
||||
{#code_begin|exe#}
|
||||
{#link_libc#}
|
||||
extern fn puts(&const u8) void;
|
||||
extern fn puts([*]const u8) void;
|
||||
|
||||
pub fn main() void {
|
||||
puts(c"this has a null terminator");
|
||||
@ -5996,8 +6004,8 @@ const c = @cImport({
|
||||
{#code_begin|syntax#}
|
||||
const base64 = @import("std").base64;
|
||||
|
||||
export fn decode_base_64(dest_ptr: &u8, dest_len: usize,
|
||||
source_ptr: &const u8, source_len: usize) usize
|
||||
export fn decode_base_64(dest_ptr: *u8, dest_len: usize,
|
||||
source_ptr: *const u8, source_len: usize) usize
|
||||
{
|
||||
const src = source_ptr[0..source_len];
|
||||
const dest = dest_ptr[0..dest_len];
|
||||
@ -6028,7 +6036,7 @@ int main(int argc, char **argv) {
|
||||
{#code_begin|syntax#}
|
||||
const Builder = @import("std").build.Builder;
|
||||
|
||||
pub fn build(b: &Builder) void {
|
||||
pub fn build(b: *Builder) void {
|
||||
const obj = b.addObject("base64", "base64.zig");
|
||||
|
||||
const exe = b.addCExecutable("test");
|
||||
@ -6450,7 +6458,7 @@ ContainerInitBody = list(StructLiteralField, ",") | list(Expression, ",")
|
||||
|
||||
StructLiteralField = "." Symbol "=" Expression
|
||||
|
||||
PrefixOp = "!" | "-" | "~" | ("*" option("align" "(" Expression option(":" Integer ":" Integer) ")" ) option("const") option("volatile")) | "?" | "??" | "-%" | "try" | "await"
|
||||
PrefixOp = "!" | "-" | "~" | (("*" | "[*]") option("align" "(" Expression option(":" Integer ":" Integer) ")" ) option("const") option("volatile")) | "?" | "??" | "-%" | "try" | "await"
|
||||
|
||||
PrimaryExpression = Integer | Float | String | CharLiteral | KeywordLiteral | GroupedExpression | BlockExpression(BlockOrExpression) | Symbol | ("@" Symbol FnCallExpression) | ArrayType | FnProto | AsmExpression | ContainerDecl | ("continue" option(":" Symbol)) | ErrorSetDecl | PromiseType
|
||||
|
||||
@ -6544,7 +6552,7 @@ hljs.registerLanguage("zig", function(t) {
|
||||
a = t.IR + "\\s*\\(",
|
||||
c = {
|
||||
keyword: "const align var extern stdcallcc nakedcc volatile export pub noalias inline struct packed enum union break return try catch test continue unreachable comptime and or asm defer errdefer if else switch while for fn use bool f32 f64 void type noreturn error i8 u8 i16 u16 i32 u32 i64 u64 isize usize i8w u8w i16w i32w u32w i64w u64w isizew usizew c_short c_ushort c_int c_uint c_long c_ulong c_longlong c_ulonglong",
|
||||
built_in: "atomicLoad breakpoint returnAddress frameAddress fieldParentPtr setFloatMode IntType OpaqueType compileError compileLog setCold setRuntimeSafety setEvalBranchQuota offsetOf memcpy inlineCall setGlobalLinkage setGlobalSection divTrunc divFloor enumTagName intToPtr ptrToInt panic canImplicitCast ptrCast bitCast rem mod memset sizeOf alignOf alignCast maxValue minValue memberCount memberName memberType typeOf addWithOverflow subWithOverflow mulWithOverflow shlWithOverflow shlExact shrExact cInclude cDefine cUndef ctz clz import cImport errorName embedFile cmpxchgStrong cmpxchgWeak fence divExact truncate atomicRmw sqrt field typeInfo newStackCall",
|
||||
built_in: "atomicLoad breakpoint returnAddress frameAddress fieldParentPtr setFloatMode IntType OpaqueType compileError compileLog setCold setRuntimeSafety setEvalBranchQuota offsetOf memcpy inlineCall setGlobalLinkage setGlobalSection divTrunc divFloor enumTagName intToPtr ptrToInt panic canImplicitCast ptrCast bitCast rem mod memset sizeOf alignOf alignCast maxValue minValue memberCount memberName memberType typeOf addWithOverflow subWithOverflow mulWithOverflow shlWithOverflow shlExact shrExact cInclude cDefine cUndef ctz clz import cImport errorName embedFile cmpxchgStrong cmpxchgWeak fence divExact truncate atomicRmw sqrt field typeInfo typeName newStackCall",
|
||||
literal: "true false null undefined"
|
||||
},
|
||||
n = [e, t.CLCM, t.CBCM, s, r];
|
||||
|
||||
@ -41,7 +41,7 @@ fn usage(exe: []const u8) !void {
|
||||
return error.Invalid;
|
||||
}
|
||||
|
||||
fn cat_file(stdout: &os.File, file: &os.File) !void {
|
||||
fn cat_file(stdout: *os.File, file: *os.File) !void {
|
||||
var buf: [1024 * 4]u8 = undefined;
|
||||
|
||||
while (true) {
|
||||
|
||||
@ -7,7 +7,7 @@ const c = @cImport({
|
||||
|
||||
const msg = c"Hello, world!\n";
|
||||
|
||||
export fn main(argc: c_int, argv: &&u8) c_int {
|
||||
export fn main(argc: c_int, argv: **u8) c_int {
|
||||
if (c.printf(msg) != c_int(c.strlen(msg))) return -1;
|
||||
|
||||
return 0;
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
const base64 = @import("std").base64;
|
||||
|
||||
export fn decode_base_64(dest_ptr: &u8, dest_len: usize, source_ptr: &const u8, source_len: usize) usize {
|
||||
export fn decode_base_64(dest_ptr: *u8, dest_len: usize, source_ptr: *const u8, source_len: usize) usize {
|
||||
const src = source_ptr[0..source_len];
|
||||
const dest = dest_ptr[0..dest_len];
|
||||
const base64_decoder = base64.standard_decoder_unsafe;
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
const Builder = @import("std").build.Builder;
|
||||
|
||||
pub fn build(b: &Builder) void {
|
||||
pub fn build(b: *Builder) void {
|
||||
const obj = b.addObject("base64", "base64.zig");
|
||||
|
||||
const exe = b.addCExecutable("test");
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
const Builder = @import("std").build.Builder;
|
||||
|
||||
pub fn build(b: &Builder) void {
|
||||
pub fn build(b: *Builder) void {
|
||||
const lib = b.addSharedLibrary("mathtest", "mathtest.zig", b.version(1, 0, 0));
|
||||
|
||||
const exe = b.addCExecutable("test");
|
||||
|
||||
@ -30,7 +30,7 @@ fn argInAllowedSet(maybe_set: ?[]const []const u8, arg: []const u8) bool {
|
||||
}
|
||||
|
||||
// Modifies the current argument index during iteration
|
||||
fn readFlagArguments(allocator: &Allocator, args: []const []const u8, required: usize, allowed_set: ?[]const []const u8, index: &usize) !FlagArg {
|
||||
fn readFlagArguments(allocator: *Allocator, args: []const []const u8, required: usize, allowed_set: ?[]const []const u8, index: *usize) !FlagArg {
|
||||
switch (required) {
|
||||
0 => return FlagArg{ .None = undefined }, // TODO: Required to force non-tag but value?
|
||||
1 => {
|
||||
@ -79,7 +79,7 @@ pub const Args = struct {
|
||||
flags: HashMapFlags,
|
||||
positionals: ArrayList([]const u8),
|
||||
|
||||
pub fn parse(allocator: &Allocator, comptime spec: []const Flag, args: []const []const u8) !Args {
|
||||
pub fn parse(allocator: *Allocator, comptime spec: []const Flag, args: []const []const u8) !Args {
|
||||
var parsed = Args{
|
||||
.flags = HashMapFlags.init(allocator),
|
||||
.positionals = ArrayList([]const u8).init(allocator),
|
||||
@ -143,18 +143,18 @@ pub const Args = struct {
|
||||
return parsed;
|
||||
}
|
||||
|
||||
pub fn deinit(self: &Args) void {
|
||||
pub fn deinit(self: *Args) void {
|
||||
self.flags.deinit();
|
||||
self.positionals.deinit();
|
||||
}
|
||||
|
||||
// e.g. --help
|
||||
pub fn present(self: &Args, name: []const u8) bool {
|
||||
pub fn present(self: *Args, name: []const u8) bool {
|
||||
return self.flags.contains(name);
|
||||
}
|
||||
|
||||
// e.g. --name value
|
||||
pub fn single(self: &Args, name: []const u8) ?[]const u8 {
|
||||
pub fn single(self: *Args, name: []const u8) ?[]const u8 {
|
||||
if (self.flags.get(name)) |entry| {
|
||||
switch (entry.value) {
|
||||
FlagArg.Single => |inner| {
|
||||
@ -168,7 +168,7 @@ pub const Args = struct {
|
||||
}
|
||||
|
||||
// e.g. --names value1 value2 value3
|
||||
pub fn many(self: &Args, name: []const u8) ?[]const []const u8 {
|
||||
pub fn many(self: *Args, name: []const u8) ?[]const []const u8 {
|
||||
if (self.flags.get(name)) |entry| {
|
||||
switch (entry.value) {
|
||||
FlagArg.Many => |inner| {
|
||||
|
||||
87
src-self-hosted/errmsg.zig
Normal file
87
src-self-hosted/errmsg.zig
Normal file
@ -0,0 +1,87 @@
|
||||
const std = @import("std");
|
||||
const mem = std.mem;
|
||||
const os = std.os;
|
||||
const Token = std.zig.Token;
|
||||
const ast = std.zig.ast;
|
||||
const TokenIndex = std.zig.ast.TokenIndex;
|
||||
|
||||
pub const Color = enum {
|
||||
Auto,
|
||||
Off,
|
||||
On,
|
||||
};
|
||||
|
||||
pub const Msg = struct {
|
||||
path: []const u8,
|
||||
text: []u8,
|
||||
first_token: TokenIndex,
|
||||
last_token: TokenIndex,
|
||||
tree: *ast.Tree,
|
||||
};
|
||||
|
||||
/// `path` must outlive the returned Msg
|
||||
/// `tree` must outlive the returned Msg
|
||||
/// Caller owns returned Msg and must free with `allocator`
|
||||
pub fn createFromParseError(
|
||||
allocator: *mem.Allocator,
|
||||
parse_error: *const ast.Error,
|
||||
tree: *ast.Tree,
|
||||
path: []const u8,
|
||||
) !*Msg {
|
||||
const loc_token = parse_error.loc();
|
||||
var text_buf = try std.Buffer.initSize(allocator, 0);
|
||||
defer text_buf.deinit();
|
||||
|
||||
var out_stream = &std.io.BufferOutStream.init(&text_buf).stream;
|
||||
try parse_error.render(&tree.tokens, out_stream);
|
||||
|
||||
const msg = try allocator.construct(Msg{
|
||||
.tree = tree,
|
||||
.path = path,
|
||||
.text = text_buf.toOwnedSlice(),
|
||||
.first_token = loc_token,
|
||||
.last_token = loc_token,
|
||||
});
|
||||
errdefer allocator.destroy(msg);
|
||||
|
||||
return msg;
|
||||
}
|
||||
|
||||
pub fn printToStream(stream: var, msg: *const Msg, color_on: bool) !void {
|
||||
const first_token = msg.tree.tokens.at(msg.first_token);
|
||||
const last_token = msg.tree.tokens.at(msg.last_token);
|
||||
const start_loc = msg.tree.tokenLocationPtr(0, first_token);
|
||||
const end_loc = msg.tree.tokenLocationPtr(first_token.end, last_token);
|
||||
if (!color_on) {
|
||||
try stream.print(
|
||||
"{}:{}:{}: error: {}\n",
|
||||
msg.path,
|
||||
start_loc.line + 1,
|
||||
start_loc.column + 1,
|
||||
msg.text,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
try stream.print(
|
||||
"{}:{}:{}: error: {}\n{}\n",
|
||||
msg.path,
|
||||
start_loc.line + 1,
|
||||
start_loc.column + 1,
|
||||
msg.text,
|
||||
msg.tree.source[start_loc.line_start..start_loc.line_end],
|
||||
);
|
||||
try stream.writeByteNTimes(' ', start_loc.column);
|
||||
try stream.writeByteNTimes('~', last_token.end - first_token.start);
|
||||
try stream.write("\n");
|
||||
}
|
||||
|
||||
pub fn printToFile(file: *os.File, msg: *const Msg, color: Color) !void {
|
||||
const color_on = switch (color) {
|
||||
Color.Auto => file.isTty(),
|
||||
Color.On => true,
|
||||
Color.Off => false,
|
||||
};
|
||||
var stream = &std.io.FileOutStream.init(file).stream;
|
||||
return printToStream(stream, msg, color_on);
|
||||
}
|
||||
@ -7,7 +7,7 @@ const os = std.os;
|
||||
const warn = std.debug.warn;
|
||||
|
||||
/// Caller must free result
|
||||
pub fn testZigInstallPrefix(allocator: &mem.Allocator, test_path: []const u8) ![]u8 {
|
||||
pub fn testZigInstallPrefix(allocator: *mem.Allocator, test_path: []const u8) ![]u8 {
|
||||
const test_zig_dir = try os.path.join(allocator, test_path, "lib", "zig");
|
||||
errdefer allocator.free(test_zig_dir);
|
||||
|
||||
@ -21,7 +21,7 @@ pub fn testZigInstallPrefix(allocator: &mem.Allocator, test_path: []const u8) ![
|
||||
}
|
||||
|
||||
/// Caller must free result
|
||||
pub fn findZigLibDir(allocator: &mem.Allocator) ![]u8 {
|
||||
pub fn findZigLibDir(allocator: *mem.Allocator) ![]u8 {
|
||||
const self_exe_path = try os.selfExeDirPath(allocator);
|
||||
defer allocator.free(self_exe_path);
|
||||
|
||||
@ -42,7 +42,7 @@ pub fn findZigLibDir(allocator: &mem.Allocator) ![]u8 {
|
||||
return error.FileNotFound;
|
||||
}
|
||||
|
||||
pub fn resolveZigLibDir(allocator: &mem.Allocator) ![]u8 {
|
||||
pub fn resolveZigLibDir(allocator: *mem.Allocator) ![]u8 {
|
||||
return findZigLibDir(allocator) catch |err| {
|
||||
warn(
|
||||
\\Unable to find zig lib directory: {}.
|
||||
|
||||
@ -2,7 +2,7 @@ const Scope = @import("scope.zig").Scope;
|
||||
|
||||
pub const Instruction = struct {
|
||||
id: Id,
|
||||
scope: &Scope,
|
||||
scope: *Scope,
|
||||
|
||||
pub const Id = enum {
|
||||
Br,
|
||||
|
||||
@ -15,9 +15,11 @@ const Args = arg.Args;
|
||||
const Flag = arg.Flag;
|
||||
const Module = @import("module.zig").Module;
|
||||
const Target = @import("target.zig").Target;
|
||||
const errmsg = @import("errmsg.zig");
|
||||
|
||||
var stderr: &io.OutStream(io.FileOutStream.Error) = undefined;
|
||||
var stdout: &io.OutStream(io.FileOutStream.Error) = undefined;
|
||||
var stderr_file: os.File = undefined;
|
||||
var stderr: *io.OutStream(io.FileOutStream.Error) = undefined;
|
||||
var stdout: *io.OutStream(io.FileOutStream.Error) = undefined;
|
||||
|
||||
const usage =
|
||||
\\usage: zig [command] [options]
|
||||
@ -41,7 +43,7 @@ const usage =
|
||||
|
||||
const Command = struct {
|
||||
name: []const u8,
|
||||
exec: fn(&Allocator, []const []const u8) error!void,
|
||||
exec: fn (*Allocator, []const []const u8) error!void,
|
||||
};
|
||||
|
||||
pub fn main() !void {
|
||||
@ -51,7 +53,7 @@ pub fn main() !void {
|
||||
var stdout_out_stream = std.io.FileOutStream.init(&stdout_file);
|
||||
stdout = &stdout_out_stream.stream;
|
||||
|
||||
var stderr_file = try std.io.getStdErr();
|
||||
stderr_file = try std.io.getStdErr();
|
||||
var stderr_out_stream = std.io.FileOutStream.init(&stderr_file);
|
||||
stderr = &stderr_out_stream.stream;
|
||||
|
||||
@ -189,7 +191,7 @@ const missing_build_file =
|
||||
\\
|
||||
;
|
||||
|
||||
fn cmdBuild(allocator: &Allocator, args: []const []const u8) !void {
|
||||
fn cmdBuild(allocator: *Allocator, args: []const []const u8) !void {
|
||||
var flags = try Args.parse(allocator, args_build_spec, args);
|
||||
defer flags.deinit();
|
||||
|
||||
@ -424,7 +426,7 @@ const args_build_generic = []Flag{
|
||||
Flag.Arg1("--ver-patch"),
|
||||
};
|
||||
|
||||
fn buildOutputType(allocator: &Allocator, args: []const []const u8, out_type: Module.Kind) !void {
|
||||
fn buildOutputType(allocator: *Allocator, args: []const []const u8, out_type: Module.Kind) !void {
|
||||
var flags = try Args.parse(allocator, args_build_generic, args);
|
||||
defer flags.deinit();
|
||||
|
||||
@ -440,18 +442,19 @@ fn buildOutputType(allocator: &Allocator, args: []const []const u8, out_type: Mo
|
||||
build_mode = builtin.Mode.ReleaseSafe;
|
||||
}
|
||||
|
||||
var color = Module.ErrColor.Auto;
|
||||
if (flags.single("color")) |color_flag| {
|
||||
if (mem.eql(u8, color_flag, "auto")) {
|
||||
color = Module.ErrColor.Auto;
|
||||
} else if (mem.eql(u8, color_flag, "on")) {
|
||||
color = Module.ErrColor.On;
|
||||
} else if (mem.eql(u8, color_flag, "off")) {
|
||||
color = Module.ErrColor.Off;
|
||||
const color = blk: {
|
||||
if (flags.single("color")) |color_flag| {
|
||||
if (mem.eql(u8, color_flag, "auto")) {
|
||||
break :blk errmsg.Color.Auto;
|
||||
} else if (mem.eql(u8, color_flag, "on")) {
|
||||
break :blk errmsg.Color.On;
|
||||
} else if (mem.eql(u8, color_flag, "off")) {
|
||||
break :blk errmsg.Color.Off;
|
||||
} else unreachable;
|
||||
} else {
|
||||
unreachable;
|
||||
break :blk errmsg.Color.Auto;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var emit_type = Module.Emit.Binary;
|
||||
if (flags.single("emit")) |emit_flag| {
|
||||
@ -658,19 +661,19 @@ fn buildOutputType(allocator: &Allocator, args: []const []const u8, out_type: Mo
|
||||
try stderr.print("building {}: {}\n", @tagName(out_type), in_file);
|
||||
}
|
||||
|
||||
fn cmdBuildExe(allocator: &Allocator, args: []const []const u8) !void {
|
||||
fn cmdBuildExe(allocator: *Allocator, args: []const []const u8) !void {
|
||||
try buildOutputType(allocator, args, Module.Kind.Exe);
|
||||
}
|
||||
|
||||
// cmd:build-lib ///////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
fn cmdBuildLib(allocator: &Allocator, args: []const []const u8) !void {
|
||||
fn cmdBuildLib(allocator: *Allocator, args: []const []const u8) !void {
|
||||
try buildOutputType(allocator, args, Module.Kind.Lib);
|
||||
}
|
||||
|
||||
// cmd:build-obj ///////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
fn cmdBuildObj(allocator: &Allocator, args: []const []const u8) !void {
|
||||
fn cmdBuildObj(allocator: *Allocator, args: []const []const u8) !void {
|
||||
try buildOutputType(allocator, args, Module.Kind.Obj);
|
||||
}
|
||||
|
||||
@ -683,13 +686,21 @@ const usage_fmt =
|
||||
\\
|
||||
\\Options:
|
||||
\\ --help Print this help and exit
|
||||
\\ --color [auto|off|on] Enable or disable colored error messages
|
||||
\\
|
||||
\\
|
||||
;
|
||||
|
||||
const args_fmt_spec = []Flag{Flag.Bool("--help")};
|
||||
const args_fmt_spec = []Flag{
|
||||
Flag.Bool("--help"),
|
||||
Flag.Option("--color", []const []const u8{
|
||||
"auto",
|
||||
"off",
|
||||
"on",
|
||||
}),
|
||||
};
|
||||
|
||||
fn cmdFmt(allocator: &Allocator, args: []const []const u8) !void {
|
||||
fn cmdFmt(allocator: *Allocator, args: []const []const u8) !void {
|
||||
var flags = try Args.parse(allocator, args_fmt_spec, args);
|
||||
defer flags.deinit();
|
||||
|
||||
@ -703,61 +714,69 @@ fn cmdFmt(allocator: &Allocator, args: []const []const u8) !void {
|
||||
os.exit(1);
|
||||
}
|
||||
|
||||
const color = blk: {
|
||||
if (flags.single("color")) |color_flag| {
|
||||
if (mem.eql(u8, color_flag, "auto")) {
|
||||
break :blk errmsg.Color.Auto;
|
||||
} else if (mem.eql(u8, color_flag, "on")) {
|
||||
break :blk errmsg.Color.On;
|
||||
} else if (mem.eql(u8, color_flag, "off")) {
|
||||
break :blk errmsg.Color.Off;
|
||||
} else unreachable;
|
||||
} else {
|
||||
break :blk errmsg.Color.Auto;
|
||||
}
|
||||
};
|
||||
|
||||
var fmt_errors = false;
|
||||
for (flags.positionals.toSliceConst()) |file_path| {
|
||||
var file = try os.File.openRead(allocator, file_path);
|
||||
defer file.close();
|
||||
|
||||
const source_code = io.readFileAlloc(allocator, file_path) catch |err| {
|
||||
try stderr.print("unable to open '{}': {}", file_path, err);
|
||||
fmt_errors = true;
|
||||
continue;
|
||||
};
|
||||
defer allocator.free(source_code);
|
||||
|
||||
var tree = std.zig.parse(allocator, source_code) catch |err| {
|
||||
try stderr.print("error parsing file '{}': {}\n", file_path, err);
|
||||
fmt_errors = true;
|
||||
continue;
|
||||
};
|
||||
defer tree.deinit();
|
||||
|
||||
var error_it = tree.errors.iterator(0);
|
||||
while (error_it.next()) |parse_error| {
|
||||
const token = tree.tokens.at(parse_error.loc());
|
||||
const loc = tree.tokenLocation(0, parse_error.loc());
|
||||
try stderr.print("{}:{}:{}: error: ", file_path, loc.line + 1, loc.column + 1);
|
||||
try tree.renderError(parse_error, stderr);
|
||||
try stderr.print("\n{}\n", source_code[loc.line_start..loc.line_end]);
|
||||
{
|
||||
var i: usize = 0;
|
||||
while (i < loc.column) : (i += 1) {
|
||||
try stderr.write(" ");
|
||||
}
|
||||
}
|
||||
{
|
||||
const caret_count = token.end - token.start;
|
||||
var i: usize = 0;
|
||||
while (i < caret_count) : (i += 1) {
|
||||
try stderr.write("~");
|
||||
}
|
||||
}
|
||||
try stderr.write("\n");
|
||||
const msg = try errmsg.createFromParseError(allocator, parse_error, &tree, file_path);
|
||||
defer allocator.destroy(msg);
|
||||
|
||||
try errmsg.printToFile(&stderr_file, msg, color);
|
||||
}
|
||||
if (tree.errors.len != 0) {
|
||||
fmt_errors = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
try stderr.print("{}\n", file_path);
|
||||
|
||||
const baf = try io.BufferedAtomicFile.create(allocator, file_path);
|
||||
defer baf.destroy();
|
||||
|
||||
try std.zig.render(allocator, baf.stream(), &tree);
|
||||
try baf.finish();
|
||||
const anything_changed = try std.zig.render(allocator, baf.stream(), &tree);
|
||||
if (anything_changed) {
|
||||
try stderr.print("{}\n", file_path);
|
||||
try baf.finish();
|
||||
}
|
||||
}
|
||||
|
||||
if (fmt_errors) {
|
||||
os.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// cmd:targets /////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
fn cmdTargets(allocator: &Allocator, args: []const []const u8) !void {
|
||||
fn cmdTargets(allocator: *Allocator, args: []const []const u8) !void {
|
||||
try stdout.write("Architectures:\n");
|
||||
{
|
||||
comptime var i: usize = 0;
|
||||
@ -799,7 +818,7 @@ fn cmdTargets(allocator: &Allocator, args: []const []const u8) !void {
|
||||
|
||||
// cmd:version /////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
fn cmdVersion(allocator: &Allocator, args: []const []const u8) !void {
|
||||
fn cmdVersion(allocator: *Allocator, args: []const []const u8) !void {
|
||||
try stdout.print("{}\n", std.cstr.toSliceConst(c.ZIG_VERSION_STRING));
|
||||
}
|
||||
|
||||
@ -816,7 +835,7 @@ const usage_test =
|
||||
|
||||
const args_test_spec = []Flag{Flag.Bool("--help")};
|
||||
|
||||
fn cmdTest(allocator: &Allocator, args: []const []const u8) !void {
|
||||
fn cmdTest(allocator: *Allocator, args: []const []const u8) !void {
|
||||
var flags = try Args.parse(allocator, args_build_spec, args);
|
||||
defer flags.deinit();
|
||||
|
||||
@ -851,14 +870,14 @@ const usage_run =
|
||||
|
||||
const args_run_spec = []Flag{Flag.Bool("--help")};
|
||||
|
||||
fn cmdRun(allocator: &Allocator, args: []const []const u8) !void {
|
||||
fn cmdRun(allocator: *Allocator, args: []const []const u8) !void {
|
||||
var compile_args = args;
|
||||
var runtime_args: []const []const u8 = []const []const u8{};
|
||||
|
||||
for (args) |argv, i| {
|
||||
if (mem.eql(u8, argv, "--")) {
|
||||
compile_args = args[0..i];
|
||||
runtime_args = args[i + 1..];
|
||||
runtime_args = args[i + 1 ..];
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -901,7 +920,7 @@ const args_translate_c_spec = []Flag{
|
||||
Flag.Arg1("--output"),
|
||||
};
|
||||
|
||||
fn cmdTranslateC(allocator: &Allocator, args: []const []const u8) !void {
|
||||
fn cmdTranslateC(allocator: *Allocator, args: []const []const u8) !void {
|
||||
var flags = try Args.parse(allocator, args_translate_c_spec, args);
|
||||
defer flags.deinit();
|
||||
|
||||
@ -947,7 +966,7 @@ fn cmdTranslateC(allocator: &Allocator, args: []const []const u8) !void {
|
||||
|
||||
// cmd:help ////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
fn cmdHelp(allocator: &Allocator, args: []const []const u8) !void {
|
||||
fn cmdHelp(allocator: *Allocator, args: []const []const u8) !void {
|
||||
try stderr.write(usage);
|
||||
}
|
||||
|
||||
@ -970,7 +989,7 @@ const info_zen =
|
||||
\\
|
||||
;
|
||||
|
||||
fn cmdZen(allocator: &Allocator, args: []const []const u8) !void {
|
||||
fn cmdZen(allocator: *Allocator, args: []const []const u8) !void {
|
||||
try stdout.write(info_zen);
|
||||
}
|
||||
|
||||
@ -985,7 +1004,7 @@ const usage_internal =
|
||||
\\
|
||||
;
|
||||
|
||||
fn cmdInternal(allocator: &Allocator, args: []const []const u8) !void {
|
||||
fn cmdInternal(allocator: *Allocator, args: []const []const u8) !void {
|
||||
if (args.len == 0) {
|
||||
try stderr.write(usage_internal);
|
||||
os.exit(1);
|
||||
@ -1007,7 +1026,7 @@ fn cmdInternal(allocator: &Allocator, args: []const []const u8) !void {
|
||||
try stderr.write(usage_internal);
|
||||
}
|
||||
|
||||
fn cmdInternalBuildInfo(allocator: &Allocator, args: []const []const u8) !void {
|
||||
fn cmdInternalBuildInfo(allocator: *Allocator, args: []const []const u8) !void {
|
||||
try stdout.print(
|
||||
\\ZIG_CMAKE_BINARY_DIR {}
|
||||
\\ZIG_CXX_COMPILER {}
|
||||
|
||||
@ -10,9 +10,10 @@ const Target = @import("target.zig").Target;
|
||||
const warn = std.debug.warn;
|
||||
const Token = std.zig.Token;
|
||||
const ArrayList = std.ArrayList;
|
||||
const errmsg = @import("errmsg.zig");
|
||||
|
||||
pub const Module = struct {
|
||||
allocator: &mem.Allocator,
|
||||
allocator: *mem.Allocator,
|
||||
name: Buffer,
|
||||
root_src_path: ?[]const u8,
|
||||
module: llvm.ModuleRef,
|
||||
@ -52,10 +53,10 @@ pub const Module = struct {
|
||||
windows_subsystem_windows: bool,
|
||||
windows_subsystem_console: bool,
|
||||
|
||||
link_libs_list: ArrayList(&LinkLib),
|
||||
libc_link_lib: ?&LinkLib,
|
||||
link_libs_list: ArrayList(*LinkLib),
|
||||
libc_link_lib: ?*LinkLib,
|
||||
|
||||
err_color: ErrColor,
|
||||
err_color: errmsg.Color,
|
||||
|
||||
verbose_tokenize: bool,
|
||||
verbose_ast_tree: bool,
|
||||
@ -87,12 +88,6 @@ pub const Module = struct {
|
||||
Obj,
|
||||
};
|
||||
|
||||
pub const ErrColor = enum {
|
||||
Auto,
|
||||
Off,
|
||||
On,
|
||||
};
|
||||
|
||||
pub const LinkLib = struct {
|
||||
name: []const u8,
|
||||
path: ?[]const u8,
|
||||
@ -111,19 +106,19 @@ pub const Module = struct {
|
||||
pub const CliPkg = struct {
|
||||
name: []const u8,
|
||||
path: []const u8,
|
||||
children: ArrayList(&CliPkg),
|
||||
parent: ?&CliPkg,
|
||||
children: ArrayList(*CliPkg),
|
||||
parent: ?*CliPkg,
|
||||
|
||||
pub fn init(allocator: &mem.Allocator, name: []const u8, path: []const u8, parent: ?&CliPkg) !&CliPkg {
|
||||
pub fn init(allocator: *mem.Allocator, name: []const u8, path: []const u8, parent: ?*CliPkg) !*CliPkg {
|
||||
var pkg = try allocator.create(CliPkg);
|
||||
pkg.name = name;
|
||||
pkg.path = path;
|
||||
pkg.children = ArrayList(&CliPkg).init(allocator);
|
||||
pkg.children = ArrayList(*CliPkg).init(allocator);
|
||||
pkg.parent = parent;
|
||||
return pkg;
|
||||
}
|
||||
|
||||
pub fn deinit(self: &CliPkg) void {
|
||||
pub fn deinit(self: *CliPkg) void {
|
||||
for (self.children.toSliceConst()) |child| {
|
||||
child.deinit();
|
||||
}
|
||||
@ -131,7 +126,7 @@ pub const Module = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub fn create(allocator: &mem.Allocator, name: []const u8, root_src_path: ?[]const u8, target: &const Target, kind: Kind, build_mode: builtin.Mode, zig_lib_dir: []const u8, cache_dir: []const u8) !&Module {
|
||||
pub fn create(allocator: *mem.Allocator, name: []const u8, root_src_path: ?[]const u8, target: *const Target, kind: Kind, build_mode: builtin.Mode, zig_lib_dir: []const u8, cache_dir: []const u8) !*Module {
|
||||
var name_buffer = try Buffer.init(allocator, name);
|
||||
errdefer name_buffer.deinit();
|
||||
|
||||
@ -193,9 +188,9 @@ pub const Module = struct {
|
||||
.link_objects = [][]const u8{},
|
||||
.windows_subsystem_windows = false,
|
||||
.windows_subsystem_console = false,
|
||||
.link_libs_list = ArrayList(&LinkLib).init(allocator),
|
||||
.link_libs_list = ArrayList(*LinkLib).init(allocator),
|
||||
.libc_link_lib = null,
|
||||
.err_color = ErrColor.Auto,
|
||||
.err_color = errmsg.Color.Auto,
|
||||
.darwin_frameworks = [][]const u8{},
|
||||
.darwin_version_min = DarwinVersionMin.None,
|
||||
.test_filters = [][]const u8{},
|
||||
@ -205,11 +200,11 @@ pub const Module = struct {
|
||||
return module_ptr;
|
||||
}
|
||||
|
||||
fn dump(self: &Module) void {
|
||||
fn dump(self: *Module) void {
|
||||
c.LLVMDumpModule(self.module);
|
||||
}
|
||||
|
||||
pub fn destroy(self: &Module) void {
|
||||
pub fn destroy(self: *Module) void {
|
||||
c.LLVMDisposeBuilder(self.builder);
|
||||
c.LLVMDisposeModule(self.module);
|
||||
c.LLVMContextDispose(self.context);
|
||||
@ -218,7 +213,7 @@ pub const Module = struct {
|
||||
self.allocator.destroy(self);
|
||||
}
|
||||
|
||||
pub fn build(self: &Module) !void {
|
||||
pub fn build(self: *Module) !void {
|
||||
if (self.llvm_argv.len != 0) {
|
||||
var c_compatible_args = try std.cstr.NullTerminated2DArray.fromSlices(self.allocator, [][]const []const u8{
|
||||
[][]const u8{"zig (LLVM option parsing)"},
|
||||
@ -255,7 +250,7 @@ pub const Module = struct {
|
||||
const out_stream = &stderr_file_out_stream.stream;
|
||||
|
||||
warn("====fmt:====\n");
|
||||
try std.zig.render(self.allocator, out_stream, &tree);
|
||||
_ = try std.zig.render(self.allocator, out_stream, &tree);
|
||||
|
||||
warn("====ir:====\n");
|
||||
warn("TODO\n\n");
|
||||
@ -264,12 +259,12 @@ pub const Module = struct {
|
||||
self.dump();
|
||||
}
|
||||
|
||||
pub fn link(self: &Module, out_file: ?[]const u8) !void {
|
||||
pub fn link(self: *Module, out_file: ?[]const u8) !void {
|
||||
warn("TODO link");
|
||||
return error.Todo;
|
||||
}
|
||||
|
||||
pub fn addLinkLib(self: &Module, name: []const u8, provided_explicitly: bool) !&LinkLib {
|
||||
pub fn addLinkLib(self: *Module, name: []const u8, provided_explicitly: bool) !*LinkLib {
|
||||
const is_libc = mem.eql(u8, name, "c");
|
||||
|
||||
if (is_libc) {
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
pub const Scope = struct {
|
||||
id: Id,
|
||||
parent: &Scope,
|
||||
parent: *Scope,
|
||||
|
||||
pub const Id = enum {
|
||||
Decls,
|
||||
|
||||
@ -11,7 +11,7 @@ pub const Target = union(enum) {
|
||||
Native,
|
||||
Cross: CrossTarget,
|
||||
|
||||
pub fn oFileExt(self: &const Target) []const u8 {
|
||||
pub fn oFileExt(self: *const Target) []const u8 {
|
||||
const environ = switch (self.*) {
|
||||
Target.Native => builtin.environ,
|
||||
Target.Cross => |t| t.environ,
|
||||
@ -22,28 +22,28 @@ pub const Target = union(enum) {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn exeFileExt(self: &const Target) []const u8 {
|
||||
pub fn exeFileExt(self: *const Target) []const u8 {
|
||||
return switch (self.getOs()) {
|
||||
builtin.Os.windows => ".exe",
|
||||
else => "",
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getOs(self: &const Target) builtin.Os {
|
||||
pub fn getOs(self: *const Target) builtin.Os {
|
||||
return switch (self.*) {
|
||||
Target.Native => builtin.os,
|
||||
Target.Cross => |t| t.os,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isDarwin(self: &const Target) bool {
|
||||
pub fn isDarwin(self: *const Target) bool {
|
||||
return switch (self.getOs()) {
|
||||
builtin.Os.ios, builtin.Os.macosx => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isWindows(self: &const Target) bool {
|
||||
pub fn isWindows(self: *const Target) bool {
|
||||
return switch (self.getOs()) {
|
||||
builtin.Os.windows => true,
|
||||
else => false,
|
||||
|
||||
@ -374,7 +374,7 @@ enum NodeType {
|
||||
NodeTypeCharLiteral,
|
||||
NodeTypeSymbol,
|
||||
NodeTypePrefixOpExpr,
|
||||
NodeTypeAddrOfExpr,
|
||||
NodeTypePointerType,
|
||||
NodeTypeFnCallExpr,
|
||||
NodeTypeArrayAccessExpr,
|
||||
NodeTypeSliceExpr,
|
||||
@ -616,6 +616,7 @@ enum PrefixOp {
|
||||
PrefixOpNegationWrap,
|
||||
PrefixOpMaybe,
|
||||
PrefixOpUnwrapMaybe,
|
||||
PrefixOpAddrOf,
|
||||
};
|
||||
|
||||
struct AstNodePrefixOpExpr {
|
||||
@ -623,7 +624,8 @@ struct AstNodePrefixOpExpr {
|
||||
AstNode *primary_expr;
|
||||
};
|
||||
|
||||
struct AstNodeAddrOfExpr {
|
||||
struct AstNodePointerType {
|
||||
Token *star_token;
|
||||
AstNode *align_expr;
|
||||
BigInt *bit_offset_start;
|
||||
BigInt *bit_offset_end;
|
||||
@ -899,7 +901,7 @@ struct AstNode {
|
||||
AstNodeBinOpExpr bin_op_expr;
|
||||
AstNodeCatchExpr unwrap_err_expr;
|
||||
AstNodePrefixOpExpr prefix_op_expr;
|
||||
AstNodeAddrOfExpr addr_of_expr;
|
||||
AstNodePointerType pointer_type;
|
||||
AstNodeFnCallExpr fn_call_expr;
|
||||
AstNodeArrayAccessExpr array_access_expr;
|
||||
AstNodeSliceExpr slice_expr;
|
||||
@ -972,8 +974,14 @@ struct FnTypeId {
|
||||
uint32_t fn_type_id_hash(FnTypeId*);
|
||||
bool fn_type_id_eql(FnTypeId *a, FnTypeId *b);
|
||||
|
||||
enum PtrLen {
|
||||
PtrLenUnknown,
|
||||
PtrLenSingle,
|
||||
};
|
||||
|
||||
struct TypeTableEntryPointer {
|
||||
TypeTableEntry *child_type;
|
||||
PtrLen ptr_len;
|
||||
bool is_const;
|
||||
bool is_volatile;
|
||||
uint32_t alignment;
|
||||
@ -1395,6 +1403,7 @@ struct TypeId {
|
||||
union {
|
||||
struct {
|
||||
TypeTableEntry *child_type;
|
||||
PtrLen ptr_len;
|
||||
bool is_const;
|
||||
bool is_volatile;
|
||||
uint32_t alignment;
|
||||
@ -2051,7 +2060,7 @@ enum IrInstructionId {
|
||||
IrInstructionIdTypeInfo,
|
||||
IrInstructionIdTypeId,
|
||||
IrInstructionIdSetEvalBranchQuota,
|
||||
IrInstructionIdPtrTypeOf,
|
||||
IrInstructionIdPtrType,
|
||||
IrInstructionIdAlignCast,
|
||||
IrInstructionIdOpaqueType,
|
||||
IrInstructionIdSetAlignStack,
|
||||
@ -2264,6 +2273,7 @@ struct IrInstructionElemPtr {
|
||||
|
||||
IrInstruction *array_ptr;
|
||||
IrInstruction *elem_index;
|
||||
PtrLen ptr_len;
|
||||
bool is_const;
|
||||
bool safety_check_on;
|
||||
};
|
||||
@ -2272,8 +2282,6 @@ struct IrInstructionVarPtr {
|
||||
IrInstruction base;
|
||||
|
||||
VariableTableEntry *var;
|
||||
bool is_const;
|
||||
bool is_volatile;
|
||||
};
|
||||
|
||||
struct IrInstructionCall {
|
||||
@ -2410,6 +2418,18 @@ struct IrInstructionArrayType {
|
||||
IrInstruction *child_type;
|
||||
};
|
||||
|
||||
struct IrInstructionPtrType {
|
||||
IrInstruction base;
|
||||
|
||||
IrInstruction *align_value;
|
||||
IrInstruction *child_type;
|
||||
uint32_t bit_offset_start;
|
||||
uint32_t bit_offset_end;
|
||||
PtrLen ptr_len;
|
||||
bool is_const;
|
||||
bool is_volatile;
|
||||
};
|
||||
|
||||
struct IrInstructionPromiseType {
|
||||
IrInstruction base;
|
||||
|
||||
@ -2889,17 +2909,6 @@ struct IrInstructionSetEvalBranchQuota {
|
||||
IrInstruction *new_quota;
|
||||
};
|
||||
|
||||
struct IrInstructionPtrTypeOf {
|
||||
IrInstruction base;
|
||||
|
||||
IrInstruction *align_value;
|
||||
IrInstruction *child_type;
|
||||
uint32_t bit_offset_start;
|
||||
uint32_t bit_offset_end;
|
||||
bool is_const;
|
||||
bool is_volatile;
|
||||
};
|
||||
|
||||
struct IrInstructionAlignCast {
|
||||
IrInstruction base;
|
||||
|
||||
|
||||
@ -25,6 +25,7 @@ static void resolve_struct_type(CodeGen *g, TypeTableEntry *struct_type);
|
||||
static void resolve_struct_zero_bits(CodeGen *g, TypeTableEntry *struct_type);
|
||||
static void resolve_enum_zero_bits(CodeGen *g, TypeTableEntry *enum_type);
|
||||
static void resolve_union_zero_bits(CodeGen *g, TypeTableEntry *union_type);
|
||||
static void analyze_fn_body(CodeGen *g, FnTableEntry *fn_table_entry);
|
||||
|
||||
ErrorMsg *add_node_error(CodeGen *g, AstNode *node, Buf *msg) {
|
||||
if (node->owner->c_import_node != nullptr) {
|
||||
@ -380,14 +381,14 @@ TypeTableEntry *get_promise_type(CodeGen *g, TypeTableEntry *result_type) {
|
||||
}
|
||||
|
||||
TypeTableEntry *get_pointer_to_type_extra(CodeGen *g, TypeTableEntry *child_type, bool is_const,
|
||||
bool is_volatile, uint32_t byte_alignment, uint32_t bit_offset, uint32_t unaligned_bit_count)
|
||||
bool is_volatile, PtrLen ptr_len, uint32_t byte_alignment, uint32_t bit_offset, uint32_t unaligned_bit_count)
|
||||
{
|
||||
assert(!type_is_invalid(child_type));
|
||||
|
||||
TypeId type_id = {};
|
||||
TypeTableEntry **parent_pointer = nullptr;
|
||||
uint32_t abi_alignment = get_abi_alignment(g, child_type);
|
||||
if (unaligned_bit_count != 0 || is_volatile || byte_alignment != abi_alignment) {
|
||||
if (unaligned_bit_count != 0 || is_volatile || byte_alignment != abi_alignment || ptr_len != PtrLenSingle) {
|
||||
type_id.id = TypeTableEntryIdPointer;
|
||||
type_id.data.pointer.child_type = child_type;
|
||||
type_id.data.pointer.is_const = is_const;
|
||||
@ -395,6 +396,7 @@ TypeTableEntry *get_pointer_to_type_extra(CodeGen *g, TypeTableEntry *child_type
|
||||
type_id.data.pointer.alignment = byte_alignment;
|
||||
type_id.data.pointer.bit_offset = bit_offset;
|
||||
type_id.data.pointer.unaligned_bit_count = unaligned_bit_count;
|
||||
type_id.data.pointer.ptr_len = ptr_len;
|
||||
|
||||
auto existing_entry = g->type_table.maybe_get(type_id);
|
||||
if (existing_entry)
|
||||
@ -413,16 +415,17 @@ TypeTableEntry *get_pointer_to_type_extra(CodeGen *g, TypeTableEntry *child_type
|
||||
TypeTableEntry *entry = new_type_table_entry(TypeTableEntryIdPointer);
|
||||
entry->is_copyable = true;
|
||||
|
||||
const char *star_str = ptr_len == PtrLenSingle ? "*" : "[*]";
|
||||
const char *const_str = is_const ? "const " : "";
|
||||
const char *volatile_str = is_volatile ? "volatile " : "";
|
||||
buf_resize(&entry->name, 0);
|
||||
if (unaligned_bit_count == 0 && byte_alignment == abi_alignment) {
|
||||
buf_appendf(&entry->name, "&%s%s%s", const_str, volatile_str, buf_ptr(&child_type->name));
|
||||
buf_appendf(&entry->name, "%s%s%s%s", star_str, const_str, volatile_str, buf_ptr(&child_type->name));
|
||||
} else if (unaligned_bit_count == 0) {
|
||||
buf_appendf(&entry->name, "&align(%" PRIu32 ") %s%s%s", byte_alignment,
|
||||
buf_appendf(&entry->name, "%salign(%" PRIu32 ") %s%s%s", star_str, byte_alignment,
|
||||
const_str, volatile_str, buf_ptr(&child_type->name));
|
||||
} else {
|
||||
buf_appendf(&entry->name, "&align(%" PRIu32 ":%" PRIu32 ":%" PRIu32 ") %s%s%s", byte_alignment,
|
||||
buf_appendf(&entry->name, "%salign(%" PRIu32 ":%" PRIu32 ":%" PRIu32 ") %s%s%s", star_str, byte_alignment,
|
||||
bit_offset, bit_offset + unaligned_bit_count, const_str, volatile_str, buf_ptr(&child_type->name));
|
||||
}
|
||||
|
||||
@ -432,7 +435,9 @@ TypeTableEntry *get_pointer_to_type_extra(CodeGen *g, TypeTableEntry *child_type
|
||||
|
||||
if (!entry->zero_bits) {
|
||||
assert(byte_alignment > 0);
|
||||
if (is_const || is_volatile || unaligned_bit_count != 0 || byte_alignment != abi_alignment) {
|
||||
if (is_const || is_volatile || unaligned_bit_count != 0 || byte_alignment != abi_alignment ||
|
||||
ptr_len != PtrLenSingle)
|
||||
{
|
||||
TypeTableEntry *peer_type = get_pointer_to_type(g, child_type, false);
|
||||
entry->type_ref = peer_type->type_ref;
|
||||
entry->di_type = peer_type->di_type;
|
||||
@ -450,6 +455,7 @@ TypeTableEntry *get_pointer_to_type_extra(CodeGen *g, TypeTableEntry *child_type
|
||||
entry->di_type = g->builtin_types.entry_void->di_type;
|
||||
}
|
||||
|
||||
entry->data.pointer.ptr_len = ptr_len;
|
||||
entry->data.pointer.child_type = child_type;
|
||||
entry->data.pointer.is_const = is_const;
|
||||
entry->data.pointer.is_volatile = is_volatile;
|
||||
@ -466,7 +472,8 @@ TypeTableEntry *get_pointer_to_type_extra(CodeGen *g, TypeTableEntry *child_type
|
||||
}
|
||||
|
||||
TypeTableEntry *get_pointer_to_type(CodeGen *g, TypeTableEntry *child_type, bool is_const) {
|
||||
return get_pointer_to_type_extra(g, child_type, is_const, false, get_abi_alignment(g, child_type), 0, 0);
|
||||
return get_pointer_to_type_extra(g, child_type, is_const, false, PtrLenSingle,
|
||||
get_abi_alignment(g, child_type), 0, 0);
|
||||
}
|
||||
|
||||
TypeTableEntry *get_promise_frame_type(CodeGen *g, TypeTableEntry *return_type) {
|
||||
@ -756,6 +763,7 @@ static void slice_type_common_init(CodeGen *g, TypeTableEntry *pointer_type, Typ
|
||||
|
||||
TypeTableEntry *get_slice_type(CodeGen *g, TypeTableEntry *ptr_type) {
|
||||
assert(ptr_type->id == TypeTableEntryIdPointer);
|
||||
assert(ptr_type->data.pointer.ptr_len == PtrLenUnknown);
|
||||
|
||||
TypeTableEntry **parent_pointer = &ptr_type->data.pointer.slice_parent;
|
||||
if (*parent_pointer) {
|
||||
@ -767,14 +775,16 @@ TypeTableEntry *get_slice_type(CodeGen *g, TypeTableEntry *ptr_type) {
|
||||
|
||||
// replace the & with [] to go from a ptr type name to a slice type name
|
||||
buf_resize(&entry->name, 0);
|
||||
buf_appendf(&entry->name, "[]%s", buf_ptr(&ptr_type->name) + 1);
|
||||
size_t name_offset = (ptr_type->data.pointer.ptr_len == PtrLenSingle) ? 1 : 3;
|
||||
buf_appendf(&entry->name, "[]%s", buf_ptr(&ptr_type->name) + name_offset);
|
||||
|
||||
TypeTableEntry *child_type = ptr_type->data.pointer.child_type;
|
||||
uint32_t abi_alignment;
|
||||
uint32_t abi_alignment = get_abi_alignment(g, child_type);
|
||||
if (ptr_type->data.pointer.is_const || ptr_type->data.pointer.is_volatile ||
|
||||
ptr_type->data.pointer.alignment != (abi_alignment = get_abi_alignment(g, child_type)))
|
||||
ptr_type->data.pointer.alignment != abi_alignment)
|
||||
{
|
||||
TypeTableEntry *peer_ptr_type = get_pointer_to_type(g, child_type, false);
|
||||
TypeTableEntry *peer_ptr_type = get_pointer_to_type_extra(g, child_type, false, false,
|
||||
PtrLenUnknown, abi_alignment, 0, 0);
|
||||
TypeTableEntry *peer_slice_type = get_slice_type(g, peer_ptr_type);
|
||||
|
||||
slice_type_common_init(g, ptr_type, entry);
|
||||
@ -798,9 +808,11 @@ TypeTableEntry *get_slice_type(CodeGen *g, TypeTableEntry *ptr_type) {
|
||||
if (child_ptr_type->data.pointer.is_const || child_ptr_type->data.pointer.is_volatile ||
|
||||
child_ptr_type->data.pointer.alignment != get_abi_alignment(g, grand_child_type))
|
||||
{
|
||||
TypeTableEntry *bland_child_ptr_type = get_pointer_to_type(g, grand_child_type, false);
|
||||
TypeTableEntry *bland_child_ptr_type = get_pointer_to_type_extra(g, grand_child_type, false, false,
|
||||
PtrLenUnknown, get_abi_alignment(g, grand_child_type), 0, 0);
|
||||
TypeTableEntry *bland_child_slice = get_slice_type(g, bland_child_ptr_type);
|
||||
TypeTableEntry *peer_ptr_type = get_pointer_to_type(g, bland_child_slice, false);
|
||||
TypeTableEntry *peer_ptr_type = get_pointer_to_type_extra(g, bland_child_slice, false, false,
|
||||
PtrLenUnknown, get_abi_alignment(g, bland_child_slice), 0, 0);
|
||||
TypeTableEntry *peer_slice_type = get_slice_type(g, peer_ptr_type);
|
||||
|
||||
entry->type_ref = peer_slice_type->type_ref;
|
||||
@ -1283,7 +1295,8 @@ static bool analyze_const_align(CodeGen *g, Scope *scope, AstNode *node, uint32_
|
||||
}
|
||||
|
||||
static bool analyze_const_string(CodeGen *g, Scope *scope, AstNode *node, Buf **out_buffer) {
|
||||
TypeTableEntry *ptr_type = get_pointer_to_type(g, g->builtin_types.entry_u8, true);
|
||||
TypeTableEntry *ptr_type = get_pointer_to_type_extra(g, g->builtin_types.entry_u8, true, false,
|
||||
PtrLenUnknown, get_abi_alignment(g, g->builtin_types.entry_u8), 0, 0);
|
||||
TypeTableEntry *str_type = get_slice_type(g, ptr_type);
|
||||
IrInstruction *instr = analyze_const_value(g, scope, node, str_type, nullptr);
|
||||
if (type_is_invalid(instr->value.type))
|
||||
@ -2953,7 +2966,8 @@ static void typecheck_panic_fn(CodeGen *g, FnTableEntry *panic_fn) {
|
||||
if (fn_type_id->param_count != 2) {
|
||||
return wrong_panic_prototype(g, proto_node, fn_type);
|
||||
}
|
||||
TypeTableEntry *const_u8_ptr = get_pointer_to_type(g, g->builtin_types.entry_u8, true);
|
||||
TypeTableEntry *const_u8_ptr = get_pointer_to_type_extra(g, g->builtin_types.entry_u8, true, false,
|
||||
PtrLenUnknown, get_abi_alignment(g, g->builtin_types.entry_u8), 0, 0);
|
||||
TypeTableEntry *const_u8_slice = get_slice_type(g, const_u8_ptr);
|
||||
if (fn_type_id->param_info[0].type != const_u8_slice) {
|
||||
return wrong_panic_prototype(g, proto_node, fn_type);
|
||||
@ -3269,7 +3283,7 @@ void scan_decls(CodeGen *g, ScopeDecls *decls_scope, AstNode *node) {
|
||||
case NodeTypeThisLiteral:
|
||||
case NodeTypeSymbol:
|
||||
case NodeTypePrefixOpExpr:
|
||||
case NodeTypeAddrOfExpr:
|
||||
case NodeTypePointerType:
|
||||
case NodeTypeIfBoolExpr:
|
||||
case NodeTypeWhileExpr:
|
||||
case NodeTypeForExpr:
|
||||
@ -3880,7 +3894,7 @@ static void define_local_param_variables(CodeGen *g, FnTableEntry *fn_table_entr
|
||||
}
|
||||
}
|
||||
|
||||
static bool analyze_resolve_inferred_error_set(CodeGen *g, TypeTableEntry *err_set_type, AstNode *source_node) {
|
||||
bool resolve_inferred_error_set(CodeGen *g, TypeTableEntry *err_set_type, AstNode *source_node) {
|
||||
FnTableEntry *infer_fn = err_set_type->data.error_set.infer_fn;
|
||||
if (infer_fn != nullptr) {
|
||||
if (infer_fn->anal_state == FnAnalStateInvalid) {
|
||||
@ -3932,7 +3946,7 @@ void analyze_fn_ir(CodeGen *g, FnTableEntry *fn_table_entry, AstNode *return_typ
|
||||
}
|
||||
|
||||
if (inferred_err_set_type->data.error_set.infer_fn != nullptr) {
|
||||
if (!analyze_resolve_inferred_error_set(g, inferred_err_set_type, return_type_node)) {
|
||||
if (!resolve_inferred_error_set(g, inferred_err_set_type, return_type_node)) {
|
||||
fn_table_entry->anal_state = FnAnalStateInvalid;
|
||||
return;
|
||||
}
|
||||
@ -3962,7 +3976,7 @@ void analyze_fn_ir(CodeGen *g, FnTableEntry *fn_table_entry, AstNode *return_typ
|
||||
fn_table_entry->anal_state = FnAnalStateComplete;
|
||||
}
|
||||
|
||||
void analyze_fn_body(CodeGen *g, FnTableEntry *fn_table_entry) {
|
||||
static void analyze_fn_body(CodeGen *g, FnTableEntry *fn_table_entry) {
|
||||
assert(fn_table_entry->anal_state != FnAnalStateProbing);
|
||||
if (fn_table_entry->anal_state != FnAnalStateReady)
|
||||
return;
|
||||
@ -4993,7 +5007,9 @@ void init_const_c_str_lit(CodeGen *g, ConstExprValue *const_val, Buf *str) {
|
||||
|
||||
// then make the pointer point to it
|
||||
const_val->special = ConstValSpecialStatic;
|
||||
const_val->type = get_pointer_to_type(g, g->builtin_types.entry_u8, true);
|
||||
// TODO make this `[*]null u8` instead of `[*]u8`
|
||||
const_val->type = get_pointer_to_type_extra(g, g->builtin_types.entry_u8, true, false,
|
||||
PtrLenUnknown, get_abi_alignment(g, g->builtin_types.entry_u8), 0, 0);
|
||||
const_val->data.x_ptr.special = ConstPtrSpecialBaseArray;
|
||||
const_val->data.x_ptr.data.base_array.array_val = array_val;
|
||||
const_val->data.x_ptr.data.base_array.elem_index = 0;
|
||||
@ -5134,7 +5150,9 @@ void init_const_slice(CodeGen *g, ConstExprValue *const_val, ConstExprValue *arr
|
||||
{
|
||||
assert(array_val->type->id == TypeTableEntryIdArray);
|
||||
|
||||
TypeTableEntry *ptr_type = get_pointer_to_type(g, array_val->type->data.array.child_type, is_const);
|
||||
TypeTableEntry *ptr_type = get_pointer_to_type_extra(g, array_val->type->data.array.child_type,
|
||||
is_const, false, PtrLenUnknown, get_abi_alignment(g, array_val->type->data.array.child_type),
|
||||
0, 0);
|
||||
|
||||
const_val->special = ConstValSpecialStatic;
|
||||
const_val->type = get_slice_type(g, ptr_type);
|
||||
@ -5758,6 +5776,7 @@ uint32_t type_id_hash(TypeId x) {
|
||||
return hash_ptr(x.data.error_union.err_set_type) ^ hash_ptr(x.data.error_union.payload_type);
|
||||
case TypeTableEntryIdPointer:
|
||||
return hash_ptr(x.data.pointer.child_type) +
|
||||
((x.data.pointer.ptr_len == PtrLenSingle) ? (uint32_t)1120226602 : (uint32_t)3200913342) +
|
||||
(x.data.pointer.is_const ? (uint32_t)2749109194 : (uint32_t)4047371087) +
|
||||
(x.data.pointer.is_volatile ? (uint32_t)536730450 : (uint32_t)1685612214) +
|
||||
(((uint32_t)x.data.pointer.alignment) ^ (uint32_t)0x777fbe0e) +
|
||||
@ -5806,6 +5825,7 @@ bool type_id_eql(TypeId a, TypeId b) {
|
||||
|
||||
case TypeTableEntryIdPointer:
|
||||
return a.data.pointer.child_type == b.data.pointer.child_type &&
|
||||
a.data.pointer.ptr_len == b.data.pointer.ptr_len &&
|
||||
a.data.pointer.is_const == b.data.pointer.is_const &&
|
||||
a.data.pointer.is_volatile == b.data.pointer.is_volatile &&
|
||||
a.data.pointer.alignment == b.data.pointer.alignment &&
|
||||
|
||||
@ -16,7 +16,7 @@ ErrorMsg *add_error_note(CodeGen *g, ErrorMsg *parent_msg, AstNode *node, Buf *m
|
||||
TypeTableEntry *new_type_table_entry(TypeTableEntryId id);
|
||||
TypeTableEntry *get_pointer_to_type(CodeGen *g, TypeTableEntry *child_type, bool is_const);
|
||||
TypeTableEntry *get_pointer_to_type_extra(CodeGen *g, TypeTableEntry *child_type, bool is_const,
|
||||
bool is_volatile, uint32_t byte_alignment, uint32_t bit_offset, uint32_t unaligned_bit_count);
|
||||
bool is_volatile, PtrLen ptr_len, uint32_t byte_alignment, uint32_t bit_offset, uint32_t unaligned_bit_count);
|
||||
uint64_t type_size(CodeGen *g, TypeTableEntry *type_entry);
|
||||
uint64_t type_size_bits(CodeGen *g, TypeTableEntry *type_entry);
|
||||
TypeTableEntry **get_int_type_ptr(CodeGen *g, bool is_signed, uint32_t size_in_bits);
|
||||
@ -191,7 +191,7 @@ void add_fn_export(CodeGen *g, FnTableEntry *fn_table_entry, Buf *symbol_name, G
|
||||
|
||||
ConstExprValue *get_builtin_value(CodeGen *codegen, const char *name);
|
||||
TypeTableEntry *get_ptr_to_stack_trace_type(CodeGen *g);
|
||||
void analyze_fn_body(CodeGen *g, FnTableEntry *fn_table_entry);
|
||||
bool resolve_inferred_error_set(CodeGen *g, TypeTableEntry *err_set_type, AstNode *source_node);
|
||||
|
||||
TypeTableEntry *get_auto_err_set_type(CodeGen *g, FnTableEntry *fn_entry);
|
||||
|
||||
|
||||
@ -68,6 +68,7 @@ static const char *prefix_op_str(PrefixOp prefix_op) {
|
||||
case PrefixOpBinNot: return "~";
|
||||
case PrefixOpMaybe: return "?";
|
||||
case PrefixOpUnwrapMaybe: return "??";
|
||||
case PrefixOpAddrOf: return "&";
|
||||
}
|
||||
zig_unreachable();
|
||||
}
|
||||
@ -185,8 +186,6 @@ static const char *node_type_str(NodeType node_type) {
|
||||
return "Symbol";
|
||||
case NodeTypePrefixOpExpr:
|
||||
return "PrefixOpExpr";
|
||||
case NodeTypeAddrOfExpr:
|
||||
return "AddrOfExpr";
|
||||
case NodeTypeUse:
|
||||
return "Use";
|
||||
case NodeTypeBoolLiteral:
|
||||
@ -251,6 +250,8 @@ static const char *node_type_str(NodeType node_type) {
|
||||
return "Suspend";
|
||||
case NodeTypePromiseType:
|
||||
return "PromiseType";
|
||||
case NodeTypePointerType:
|
||||
return "PointerType";
|
||||
}
|
||||
zig_unreachable();
|
||||
}
|
||||
@ -616,41 +617,47 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
|
||||
fprintf(ar->f, "%s", prefix_op_str(op));
|
||||
|
||||
AstNode *child_node = node->data.prefix_op_expr.primary_expr;
|
||||
bool new_grouped = child_node->type == NodeTypePrefixOpExpr || child_node->type == NodeTypeAddrOfExpr;
|
||||
bool new_grouped = child_node->type == NodeTypePrefixOpExpr || child_node->type == NodeTypePointerType;
|
||||
render_node_extra(ar, child_node, new_grouped);
|
||||
if (!grouped) fprintf(ar->f, ")");
|
||||
break;
|
||||
}
|
||||
case NodeTypeAddrOfExpr:
|
||||
case NodeTypePointerType:
|
||||
{
|
||||
if (!grouped) fprintf(ar->f, "(");
|
||||
fprintf(ar->f, "&");
|
||||
if (node->data.addr_of_expr.align_expr != nullptr) {
|
||||
const char *star = "[*]";
|
||||
if (node->data.pointer_type.star_token != nullptr &&
|
||||
(node->data.pointer_type.star_token->id == TokenIdStar || node->data.pointer_type.star_token->id == TokenIdStarStar))
|
||||
{
|
||||
star = "*";
|
||||
}
|
||||
fprintf(ar->f, "%s", star);
|
||||
if (node->data.pointer_type.align_expr != nullptr) {
|
||||
fprintf(ar->f, "align(");
|
||||
render_node_grouped(ar, node->data.addr_of_expr.align_expr);
|
||||
if (node->data.addr_of_expr.bit_offset_start != nullptr) {
|
||||
assert(node->data.addr_of_expr.bit_offset_end != nullptr);
|
||||
render_node_grouped(ar, node->data.pointer_type.align_expr);
|
||||
if (node->data.pointer_type.bit_offset_start != nullptr) {
|
||||
assert(node->data.pointer_type.bit_offset_end != nullptr);
|
||||
|
||||
Buf offset_start_buf = BUF_INIT;
|
||||
buf_resize(&offset_start_buf, 0);
|
||||
bigint_append_buf(&offset_start_buf, node->data.addr_of_expr.bit_offset_start, 10);
|
||||
bigint_append_buf(&offset_start_buf, node->data.pointer_type.bit_offset_start, 10);
|
||||
|
||||
Buf offset_end_buf = BUF_INIT;
|
||||
buf_resize(&offset_end_buf, 0);
|
||||
bigint_append_buf(&offset_end_buf, node->data.addr_of_expr.bit_offset_end, 10);
|
||||
bigint_append_buf(&offset_end_buf, node->data.pointer_type.bit_offset_end, 10);
|
||||
|
||||
fprintf(ar->f, ":%s:%s ", buf_ptr(&offset_start_buf), buf_ptr(&offset_end_buf));
|
||||
}
|
||||
fprintf(ar->f, ") ");
|
||||
}
|
||||
if (node->data.addr_of_expr.is_const) {
|
||||
if (node->data.pointer_type.is_const) {
|
||||
fprintf(ar->f, "const ");
|
||||
}
|
||||
if (node->data.addr_of_expr.is_volatile) {
|
||||
if (node->data.pointer_type.is_volatile) {
|
||||
fprintf(ar->f, "volatile ");
|
||||
}
|
||||
|
||||
render_node_ungrouped(ar, node->data.addr_of_expr.op_expr);
|
||||
render_node_ungrouped(ar, node->data.pointer_type.op_expr);
|
||||
if (!grouped) fprintf(ar->f, ")");
|
||||
break;
|
||||
}
|
||||
@ -669,7 +676,7 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
|
||||
fprintf(ar->f, " ");
|
||||
}
|
||||
AstNode *fn_ref_node = node->data.fn_call_expr.fn_ref_expr;
|
||||
bool grouped = (fn_ref_node->type != NodeTypePrefixOpExpr && fn_ref_node->type != NodeTypeAddrOfExpr);
|
||||
bool grouped = (fn_ref_node->type != NodeTypePrefixOpExpr && fn_ref_node->type != NodeTypePointerType);
|
||||
render_node_extra(ar, fn_ref_node, grouped);
|
||||
fprintf(ar->f, "(");
|
||||
for (size_t i = 0; i < node->data.fn_call_expr.params.length; i += 1) {
|
||||
|
||||
@ -897,7 +897,8 @@ static LLVMValueRef get_panic_msg_ptr_val(CodeGen *g, PanicMsgId msg_id) {
|
||||
assert(val->global_refs->llvm_global);
|
||||
}
|
||||
|
||||
TypeTableEntry *u8_ptr_type = get_pointer_to_type(g, g->builtin_types.entry_u8, true);
|
||||
TypeTableEntry *u8_ptr_type = get_pointer_to_type_extra(g, g->builtin_types.entry_u8, true, false,
|
||||
PtrLenUnknown, get_abi_alignment(g, g->builtin_types.entry_u8), 0, 0);
|
||||
TypeTableEntry *str_type = get_slice_type(g, u8_ptr_type);
|
||||
return LLVMConstBitCast(val->global_refs->llvm_global, LLVMPointerType(str_type->type_ref, 0));
|
||||
}
|
||||
@ -1446,7 +1447,8 @@ static LLVMValueRef get_safety_crash_err_fn(CodeGen *g) {
|
||||
LLVMValueRef full_buf_ptr = LLVMConstInBoundsGEP(global_array, full_buf_ptr_indices, 2);
|
||||
|
||||
|
||||
TypeTableEntry *u8_ptr_type = get_pointer_to_type(g, g->builtin_types.entry_u8, true);
|
||||
TypeTableEntry *u8_ptr_type = get_pointer_to_type_extra(g, g->builtin_types.entry_u8, true, false,
|
||||
PtrLenUnknown, get_abi_alignment(g, g->builtin_types.entry_u8), 0, 0);
|
||||
TypeTableEntry *str_type = get_slice_type(g, u8_ptr_type);
|
||||
LLVMValueRef global_slice_fields[] = {
|
||||
full_buf_ptr,
|
||||
@ -2179,9 +2181,13 @@ static LLVMValueRef ir_render_bin_op(CodeGen *g, IrExecutable *executable,
|
||||
IrInstruction *op2 = bin_op_instruction->op2;
|
||||
|
||||
assert(op1->value.type == op2->value.type || op_id == IrBinOpBitShiftLeftLossy ||
|
||||
op_id == IrBinOpBitShiftLeftExact || op_id == IrBinOpBitShiftRightLossy ||
|
||||
op_id == IrBinOpBitShiftRightExact ||
|
||||
(op1->value.type->id == TypeTableEntryIdErrorSet && op2->value.type->id == TypeTableEntryIdErrorSet));
|
||||
op_id == IrBinOpBitShiftLeftExact || op_id == IrBinOpBitShiftRightLossy ||
|
||||
op_id == IrBinOpBitShiftRightExact ||
|
||||
(op1->value.type->id == TypeTableEntryIdErrorSet && op2->value.type->id == TypeTableEntryIdErrorSet) ||
|
||||
(op1->value.type->id == TypeTableEntryIdPointer &&
|
||||
(op_id == IrBinOpAdd || op_id == IrBinOpSub) &&
|
||||
op1->value.type->data.pointer.ptr_len == PtrLenUnknown)
|
||||
);
|
||||
TypeTableEntry *type_entry = op1->value.type;
|
||||
|
||||
bool want_runtime_safety = bin_op_instruction->safety_check_on &&
|
||||
@ -2189,6 +2195,8 @@ static LLVMValueRef ir_render_bin_op(CodeGen *g, IrExecutable *executable,
|
||||
|
||||
LLVMValueRef op1_value = ir_llvm_value(g, op1);
|
||||
LLVMValueRef op2_value = ir_llvm_value(g, op2);
|
||||
|
||||
|
||||
switch (op_id) {
|
||||
case IrBinOpInvalid:
|
||||
case IrBinOpArrayCat:
|
||||
@ -2227,7 +2235,11 @@ static LLVMValueRef ir_render_bin_op(CodeGen *g, IrExecutable *executable,
|
||||
}
|
||||
case IrBinOpAdd:
|
||||
case IrBinOpAddWrap:
|
||||
if (type_entry->id == TypeTableEntryIdFloat) {
|
||||
if (type_entry->id == TypeTableEntryIdPointer) {
|
||||
assert(type_entry->data.pointer.ptr_len == PtrLenUnknown);
|
||||
// TODO runtime safety
|
||||
return LLVMBuildInBoundsGEP(g->builder, op1_value, &op2_value, 1, "");
|
||||
} else if (type_entry->id == TypeTableEntryIdFloat) {
|
||||
ZigLLVMSetFastMath(g->builder, ir_want_fast_math(g, &bin_op_instruction->base));
|
||||
return LLVMBuildFAdd(g->builder, op1_value, op2_value, "");
|
||||
} else if (type_entry->id == TypeTableEntryIdInt) {
|
||||
@ -2290,7 +2302,12 @@ static LLVMValueRef ir_render_bin_op(CodeGen *g, IrExecutable *executable,
|
||||
}
|
||||
case IrBinOpSub:
|
||||
case IrBinOpSubWrap:
|
||||
if (type_entry->id == TypeTableEntryIdFloat) {
|
||||
if (type_entry->id == TypeTableEntryIdPointer) {
|
||||
assert(type_entry->data.pointer.ptr_len == PtrLenUnknown);
|
||||
// TODO runtime safety
|
||||
LLVMValueRef subscript_value = LLVMBuildNeg(g->builder, op2_value, "");
|
||||
return LLVMBuildInBoundsGEP(g->builder, op1_value, &subscript_value, 1, "");
|
||||
} else if (type_entry->id == TypeTableEntryIdFloat) {
|
||||
ZigLLVMSetFastMath(g->builder, ir_want_fast_math(g, &bin_op_instruction->base));
|
||||
return LLVMBuildFSub(g->builder, op1_value, op2_value, "");
|
||||
} else if (type_entry->id == TypeTableEntryIdInt) {
|
||||
@ -2718,7 +2735,7 @@ static LLVMValueRef ir_render_decl_var(CodeGen *g, IrExecutable *executable,
|
||||
if (have_init_expr) {
|
||||
assert(var->value->type == init_value->value.type);
|
||||
TypeTableEntry *var_ptr_type = get_pointer_to_type_extra(g, var->value->type, false, false,
|
||||
var->align_bytes, 0, 0);
|
||||
PtrLenSingle, var->align_bytes, 0, 0);
|
||||
gen_assign_raw(g, var->value_ref, var_ptr_type, ir_llvm_value(g, init_value));
|
||||
} else {
|
||||
bool want_safe = ir_want_runtime_safety(g, &decl_var_instruction->base);
|
||||
@ -4087,7 +4104,7 @@ static LLVMValueRef ir_render_struct_init(CodeGen *g, IrExecutable *executable,
|
||||
uint32_t field_align_bytes = get_abi_alignment(g, type_struct_field->type_entry);
|
||||
|
||||
TypeTableEntry *ptr_type = get_pointer_to_type_extra(g, type_struct_field->type_entry,
|
||||
false, false, field_align_bytes,
|
||||
false, false, PtrLenSingle, field_align_bytes,
|
||||
(uint32_t)type_struct_field->packed_bits_offset, (uint32_t)type_struct_field->unaligned_bit_count);
|
||||
|
||||
gen_assign_raw(g, field_ptr, ptr_type, value);
|
||||
@ -4103,7 +4120,7 @@ static LLVMValueRef ir_render_union_init(CodeGen *g, IrExecutable *executable, I
|
||||
|
||||
uint32_t field_align_bytes = get_abi_alignment(g, type_union_field->type_entry);
|
||||
TypeTableEntry *ptr_type = get_pointer_to_type_extra(g, type_union_field->type_entry,
|
||||
false, false, field_align_bytes,
|
||||
false, false, PtrLenSingle, field_align_bytes,
|
||||
0, 0);
|
||||
|
||||
LLVMValueRef uncasted_union_ptr;
|
||||
@ -4350,7 +4367,8 @@ static LLVMValueRef get_coro_alloc_helper_fn_val(CodeGen *g, LLVMTypeRef alloc_f
|
||||
|
||||
LLVMPositionBuilderAtEnd(g->builder, ok_block);
|
||||
LLVMValueRef payload_ptr = LLVMBuildStructGEP(g->builder, sret_ptr, err_union_payload_index, "");
|
||||
TypeTableEntry *u8_ptr_type = get_pointer_to_type(g, g->builtin_types.entry_u8, false);
|
||||
TypeTableEntry *u8_ptr_type = get_pointer_to_type_extra(g, g->builtin_types.entry_u8, false, false,
|
||||
PtrLenUnknown, get_abi_alignment(g, g->builtin_types.entry_u8), 0, 0);
|
||||
TypeTableEntry *slice_type = get_slice_type(g, u8_ptr_type);
|
||||
size_t ptr_field_index = slice_type->data.structure.fields[slice_ptr_index].gen_index;
|
||||
LLVMValueRef ptr_field_ptr = LLVMBuildStructGEP(g->builder, payload_ptr, ptr_field_index, "");
|
||||
@ -4515,7 +4533,7 @@ static LLVMValueRef ir_render_instruction(CodeGen *g, IrExecutable *executable,
|
||||
case IrInstructionIdTypeInfo:
|
||||
case IrInstructionIdTypeId:
|
||||
case IrInstructionIdSetEvalBranchQuota:
|
||||
case IrInstructionIdPtrTypeOf:
|
||||
case IrInstructionIdPtrType:
|
||||
case IrInstructionIdOpaqueType:
|
||||
case IrInstructionIdSetAlignStack:
|
||||
case IrInstructionIdArgType:
|
||||
@ -5292,7 +5310,8 @@ static void generate_error_name_table(CodeGen *g) {
|
||||
|
||||
assert(g->errors_by_index.length > 0);
|
||||
|
||||
TypeTableEntry *u8_ptr_type = get_pointer_to_type(g, g->builtin_types.entry_u8, true);
|
||||
TypeTableEntry *u8_ptr_type = get_pointer_to_type_extra(g, g->builtin_types.entry_u8, true, false,
|
||||
PtrLenUnknown, get_abi_alignment(g, g->builtin_types.entry_u8), 0, 0);
|
||||
TypeTableEntry *str_type = get_slice_type(g, u8_ptr_type);
|
||||
|
||||
LLVMValueRef *values = allocate<LLVMValueRef>(g->errors_by_index.length);
|
||||
@ -5330,7 +5349,8 @@ static void generate_error_name_table(CodeGen *g) {
|
||||
}
|
||||
|
||||
static void generate_enum_name_tables(CodeGen *g) {
|
||||
TypeTableEntry *u8_ptr_type = get_pointer_to_type(g, g->builtin_types.entry_u8, true);
|
||||
TypeTableEntry *u8_ptr_type = get_pointer_to_type_extra(g, g->builtin_types.entry_u8, true, false,
|
||||
PtrLenUnknown, get_abi_alignment(g, g->builtin_types.entry_u8), 0, 0);
|
||||
TypeTableEntry *str_type = get_slice_type(g, u8_ptr_type);
|
||||
|
||||
TypeTableEntry *usize = g->builtin_types.entry_usize;
|
||||
@ -6784,7 +6804,8 @@ static void create_test_compile_var_and_add_test_runner(CodeGen *g) {
|
||||
exit(0);
|
||||
}
|
||||
|
||||
TypeTableEntry *u8_ptr_type = get_pointer_to_type(g, g->builtin_types.entry_u8, true);
|
||||
TypeTableEntry *u8_ptr_type = get_pointer_to_type_extra(g, g->builtin_types.entry_u8, true, false,
|
||||
PtrLenUnknown, get_abi_alignment(g, g->builtin_types.entry_u8), 0, 0);
|
||||
TypeTableEntry *str_type = get_slice_type(g, u8_ptr_type);
|
||||
TypeTableEntry *fn_type = get_test_fn_type(g);
|
||||
|
||||
|
||||
598
src/ir.cpp
598
src/ir.cpp
File diff suppressed because it is too large
Load Diff
@ -921,7 +921,7 @@ static void ir_print_can_implicit_cast(IrPrint *irp, IrInstructionCanImplicitCas
|
||||
fprintf(irp->f, ")");
|
||||
}
|
||||
|
||||
static void ir_print_ptr_type_of(IrPrint *irp, IrInstructionPtrTypeOf *instruction) {
|
||||
static void ir_print_ptr_type(IrPrint *irp, IrInstructionPtrType *instruction) {
|
||||
fprintf(irp->f, "&");
|
||||
if (instruction->align_value != nullptr) {
|
||||
fprintf(irp->f, "align(");
|
||||
@ -1527,8 +1527,8 @@ static void ir_print_instruction(IrPrint *irp, IrInstruction *instruction) {
|
||||
case IrInstructionIdCanImplicitCast:
|
||||
ir_print_can_implicit_cast(irp, (IrInstructionCanImplicitCast *)instruction);
|
||||
break;
|
||||
case IrInstructionIdPtrTypeOf:
|
||||
ir_print_ptr_type_of(irp, (IrInstructionPtrTypeOf *)instruction);
|
||||
case IrInstructionIdPtrType:
|
||||
ir_print_ptr_type(irp, (IrInstructionPtrType *)instruction);
|
||||
break;
|
||||
case IrInstructionIdDeclRef:
|
||||
ir_print_decl_ref(irp, (IrInstructionDeclRef *)instruction);
|
||||
|
||||
@ -1167,20 +1167,20 @@ static PrefixOp tok_to_prefix_op(Token *token) {
|
||||
case TokenIdTilde: return PrefixOpBinNot;
|
||||
case TokenIdMaybe: return PrefixOpMaybe;
|
||||
case TokenIdDoubleQuestion: return PrefixOpUnwrapMaybe;
|
||||
case TokenIdAmpersand: return PrefixOpAddrOf;
|
||||
default: return PrefixOpInvalid;
|
||||
}
|
||||
}
|
||||
|
||||
static AstNode *ast_parse_addr_of(ParseContext *pc, size_t *token_index) {
|
||||
Token *ampersand_tok = ast_eat_token(pc, token_index, TokenIdAmpersand);
|
||||
|
||||
AstNode *node = ast_create_node(pc, NodeTypeAddrOfExpr, ampersand_tok);
|
||||
static AstNode *ast_parse_pointer_type(ParseContext *pc, size_t *token_index, Token *star_tok) {
|
||||
AstNode *node = ast_create_node(pc, NodeTypePointerType, star_tok);
|
||||
node->data.pointer_type.star_token = star_tok;
|
||||
|
||||
Token *token = &pc->tokens->at(*token_index);
|
||||
if (token->id == TokenIdKeywordAlign) {
|
||||
*token_index += 1;
|
||||
ast_eat_token(pc, token_index, TokenIdLParen);
|
||||
node->data.addr_of_expr.align_expr = ast_parse_expression(pc, token_index, true);
|
||||
node->data.pointer_type.align_expr = ast_parse_expression(pc, token_index, true);
|
||||
|
||||
token = &pc->tokens->at(*token_index);
|
||||
if (token->id == TokenIdColon) {
|
||||
@ -1189,35 +1189,45 @@ static AstNode *ast_parse_addr_of(ParseContext *pc, size_t *token_index) {
|
||||
ast_eat_token(pc, token_index, TokenIdColon);
|
||||
Token *bit_offset_end_tok = ast_eat_token(pc, token_index, TokenIdIntLiteral);
|
||||
|
||||
node->data.addr_of_expr.bit_offset_start = token_bigint(bit_offset_start_tok);
|
||||
node->data.addr_of_expr.bit_offset_end = token_bigint(bit_offset_end_tok);
|
||||
node->data.pointer_type.bit_offset_start = token_bigint(bit_offset_start_tok);
|
||||
node->data.pointer_type.bit_offset_end = token_bigint(bit_offset_end_tok);
|
||||
}
|
||||
ast_eat_token(pc, token_index, TokenIdRParen);
|
||||
token = &pc->tokens->at(*token_index);
|
||||
}
|
||||
if (token->id == TokenIdKeywordConst) {
|
||||
*token_index += 1;
|
||||
node->data.addr_of_expr.is_const = true;
|
||||
node->data.pointer_type.is_const = true;
|
||||
|
||||
token = &pc->tokens->at(*token_index);
|
||||
}
|
||||
if (token->id == TokenIdKeywordVolatile) {
|
||||
*token_index += 1;
|
||||
node->data.addr_of_expr.is_volatile = true;
|
||||
node->data.pointer_type.is_volatile = true;
|
||||
}
|
||||
|
||||
node->data.addr_of_expr.op_expr = ast_parse_prefix_op_expr(pc, token_index, true);
|
||||
node->data.pointer_type.op_expr = ast_parse_prefix_op_expr(pc, token_index, true);
|
||||
return node;
|
||||
}
|
||||
|
||||
/*
|
||||
PrefixOpExpression = PrefixOp ErrorSetExpr | SuffixOpExpression
|
||||
PrefixOp = "!" | "-" | "~" | ("*" option("align" "(" Expression option(":" Integer ":" Integer) ")" ) option("const") option("volatile")) | "?" | "??" | "-%" | "try" | "await"
|
||||
PrefixOp = "!" | "-" | "~" | (("*" | "[*]") option("align" "(" Expression option(":" Integer ":" Integer) ")" ) option("const") option("volatile")) | "?" | "??" | "-%" | "try" | "await"
|
||||
*/
|
||||
static AstNode *ast_parse_prefix_op_expr(ParseContext *pc, size_t *token_index, bool mandatory) {
|
||||
Token *token = &pc->tokens->at(*token_index);
|
||||
if (token->id == TokenIdAmpersand) {
|
||||
return ast_parse_addr_of(pc, token_index);
|
||||
if (token->id == TokenIdStar || token->id == TokenIdBracketStarBracket) {
|
||||
*token_index += 1;
|
||||
return ast_parse_pointer_type(pc, token_index, token);
|
||||
}
|
||||
if (token->id == TokenIdStarStar) {
|
||||
*token_index += 1;
|
||||
AstNode *child_node = ast_parse_pointer_type(pc, token_index, token);
|
||||
child_node->column += 1;
|
||||
AstNode *parent_node = ast_create_node(pc, NodeTypePointerType, token);
|
||||
parent_node->data.pointer_type.star_token = token;
|
||||
parent_node->data.pointer_type.op_expr = child_node;
|
||||
return parent_node;
|
||||
}
|
||||
if (token->id == TokenIdKeywordTry) {
|
||||
return ast_parse_try_expr(pc, token_index);
|
||||
@ -1234,13 +1244,12 @@ static AstNode *ast_parse_prefix_op_expr(ParseContext *pc, size_t *token_index,
|
||||
|
||||
|
||||
AstNode *node = ast_create_node(pc, NodeTypePrefixOpExpr, token);
|
||||
AstNode *parent_node = node;
|
||||
|
||||
AstNode *prefix_op_expr = ast_parse_error_set_expr(pc, token_index, true);
|
||||
node->data.prefix_op_expr.primary_expr = prefix_op_expr;
|
||||
node->data.prefix_op_expr.prefix_op = prefix_op;
|
||||
|
||||
return parent_node;
|
||||
return node;
|
||||
}
|
||||
|
||||
|
||||
@ -3121,9 +3130,9 @@ void ast_visit_node_children(AstNode *node, void (*visit)(AstNode **, void *cont
|
||||
case NodeTypeErrorType:
|
||||
// none
|
||||
break;
|
||||
case NodeTypeAddrOfExpr:
|
||||
visit_field(&node->data.addr_of_expr.align_expr, visit, context);
|
||||
visit_field(&node->data.addr_of_expr.op_expr, visit, context);
|
||||
case NodeTypePointerType:
|
||||
visit_field(&node->data.pointer_type.align_expr, visit, context);
|
||||
visit_field(&node->data.pointer_type.op_expr, visit, context);
|
||||
break;
|
||||
case NodeTypeErrorSetDecl:
|
||||
visit_node_list(&node->data.err_set_decl.decls, visit, context);
|
||||
|
||||
@ -219,6 +219,8 @@ enum TokenizeState {
|
||||
TokenizeStateSawAtSign,
|
||||
TokenizeStateCharCode,
|
||||
TokenizeStateError,
|
||||
TokenizeStateLBracket,
|
||||
TokenizeStateLBracketStar,
|
||||
};
|
||||
|
||||
|
||||
@ -539,8 +541,8 @@ void tokenize(Buf *buf, Tokenization *out) {
|
||||
end_token(&t);
|
||||
break;
|
||||
case '[':
|
||||
t.state = TokenizeStateLBracket;
|
||||
begin_token(&t, TokenIdLBracket);
|
||||
end_token(&t);
|
||||
break;
|
||||
case ']':
|
||||
begin_token(&t, TokenIdRBracket);
|
||||
@ -852,6 +854,30 @@ void tokenize(Buf *buf, Tokenization *out) {
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizeStateLBracket:
|
||||
switch (c) {
|
||||
case '*':
|
||||
t.state = TokenizeStateLBracketStar;
|
||||
set_token_id(&t, t.cur_tok, TokenIdBracketStarBracket);
|
||||
break;
|
||||
default:
|
||||
// reinterpret as just an lbracket
|
||||
t.pos -= 1;
|
||||
end_token(&t);
|
||||
t.state = TokenizeStateStart;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizeStateLBracketStar:
|
||||
switch (c) {
|
||||
case ']':
|
||||
end_token(&t);
|
||||
t.state = TokenizeStateStart;
|
||||
break;
|
||||
default:
|
||||
invalid_char_error(&t, c);
|
||||
}
|
||||
break;
|
||||
case TokenizeStateSawPlusPercent:
|
||||
switch (c) {
|
||||
case '=':
|
||||
@ -1467,12 +1493,14 @@ void tokenize(Buf *buf, Tokenization *out) {
|
||||
case TokenizeStateLineString:
|
||||
case TokenizeStateLineStringEnd:
|
||||
case TokenizeStateSawBarBar:
|
||||
case TokenizeStateLBracket:
|
||||
end_token(&t);
|
||||
break;
|
||||
case TokenizeStateSawDotDot:
|
||||
case TokenizeStateSawBackslash:
|
||||
case TokenizeStateLineStringContinue:
|
||||
case TokenizeStateLineStringContinueC:
|
||||
case TokenizeStateLBracketStar:
|
||||
tokenize_error(&t, "unexpected EOF");
|
||||
break;
|
||||
case TokenizeStateLineComment:
|
||||
@ -1509,6 +1537,7 @@ const char * token_name(TokenId id) {
|
||||
case TokenIdBitShiftRight: return ">>";
|
||||
case TokenIdBitShiftRightEq: return ">>=";
|
||||
case TokenIdBitXorEq: return "^=";
|
||||
case TokenIdBracketStarBracket: return "[*]";
|
||||
case TokenIdCharLiteral: return "CharLiteral";
|
||||
case TokenIdCmpEq: return "==";
|
||||
case TokenIdCmpGreaterOrEq: return ">=";
|
||||
|
||||
@ -28,6 +28,7 @@ enum TokenId {
|
||||
TokenIdBitShiftRight,
|
||||
TokenIdBitShiftRightEq,
|
||||
TokenIdBitXorEq,
|
||||
TokenIdBracketStarBracket,
|
||||
TokenIdCharLiteral,
|
||||
TokenIdCmpEq,
|
||||
TokenIdCmpGreaterOrEq,
|
||||
|
||||
@ -276,11 +276,18 @@ static AstNode *maybe_suppress_result(Context *c, ResultUsed result_used, AstNod
|
||||
node);
|
||||
}
|
||||
|
||||
static AstNode *trans_create_node_addr_of(Context *c, bool is_const, bool is_volatile, AstNode *child_node) {
|
||||
AstNode *node = trans_create_node(c, NodeTypeAddrOfExpr);
|
||||
node->data.addr_of_expr.is_const = is_const;
|
||||
node->data.addr_of_expr.is_volatile = is_volatile;
|
||||
node->data.addr_of_expr.op_expr = child_node;
|
||||
static AstNode *trans_create_node_ptr_type(Context *c, bool is_const, bool is_volatile, AstNode *child_node) {
|
||||
AstNode *node = trans_create_node(c, NodeTypePointerType);
|
||||
node->data.pointer_type.is_const = is_const;
|
||||
node->data.pointer_type.is_volatile = is_volatile;
|
||||
node->data.pointer_type.op_expr = child_node;
|
||||
return node;
|
||||
}
|
||||
|
||||
static AstNode *trans_create_node_addr_of(Context *c, AstNode *child_node) {
|
||||
AstNode *node = trans_create_node(c, NodeTypePrefixOpExpr);
|
||||
node->data.prefix_op_expr.prefix_op = PrefixOpAddrOf;
|
||||
node->data.prefix_op_expr.primary_expr = child_node;
|
||||
return node;
|
||||
}
|
||||
|
||||
@ -849,7 +856,7 @@ static AstNode *trans_type(Context *c, const Type *ty, const SourceLocation &sou
|
||||
return trans_create_node_prefix_op(c, PrefixOpMaybe, child_node);
|
||||
}
|
||||
|
||||
AstNode *pointer_node = trans_create_node_addr_of(c, child_qt.isConstQualified(),
|
||||
AstNode *pointer_node = trans_create_node_ptr_type(c, child_qt.isConstQualified(),
|
||||
child_qt.isVolatileQualified(), child_node);
|
||||
return trans_create_node_prefix_op(c, PrefixOpMaybe, pointer_node);
|
||||
}
|
||||
@ -1034,7 +1041,7 @@ static AstNode *trans_type(Context *c, const Type *ty, const SourceLocation &sou
|
||||
emit_warning(c, source_loc, "unresolved array element type");
|
||||
return nullptr;
|
||||
}
|
||||
AstNode *pointer_node = trans_create_node_addr_of(c, child_qt.isConstQualified(),
|
||||
AstNode *pointer_node = trans_create_node_ptr_type(c, child_qt.isConstQualified(),
|
||||
child_qt.isVolatileQualified(), child_type_node);
|
||||
return pointer_node;
|
||||
}
|
||||
@ -1403,7 +1410,7 @@ static AstNode *trans_create_compound_assign_shift(Context *c, ResultUsed result
|
||||
// const _ref = &lhs;
|
||||
AstNode *lhs = trans_expr(c, ResultUsedYes, &child_scope->base, stmt->getLHS(), TransLValue);
|
||||
if (lhs == nullptr) return nullptr;
|
||||
AstNode *addr_of_lhs = trans_create_node_addr_of(c, false, false, lhs);
|
||||
AstNode *addr_of_lhs = trans_create_node_addr_of(c, lhs);
|
||||
// TODO: avoid name collisions with generated variable names
|
||||
Buf* tmp_var_name = buf_create_from_str("_ref");
|
||||
AstNode *tmp_var_decl = trans_create_node_var_decl_local(c, true, tmp_var_name, nullptr, addr_of_lhs);
|
||||
@ -1477,7 +1484,7 @@ static AstNode *trans_create_compound_assign(Context *c, ResultUsed result_used,
|
||||
// const _ref = &lhs;
|
||||
AstNode *lhs = trans_expr(c, ResultUsedYes, &child_scope->base, stmt->getLHS(), TransLValue);
|
||||
if (lhs == nullptr) return nullptr;
|
||||
AstNode *addr_of_lhs = trans_create_node_addr_of(c, false, false, lhs);
|
||||
AstNode *addr_of_lhs = trans_create_node_addr_of(c, lhs);
|
||||
// TODO: avoid name collisions with generated variable names
|
||||
Buf* tmp_var_name = buf_create_from_str("_ref");
|
||||
AstNode *tmp_var_decl = trans_create_node_var_decl_local(c, true, tmp_var_name, nullptr, addr_of_lhs);
|
||||
@ -1814,7 +1821,7 @@ static AstNode *trans_create_post_crement(Context *c, ResultUsed result_used, Tr
|
||||
// const _ref = &expr;
|
||||
AstNode *expr = trans_expr(c, ResultUsedYes, &child_scope->base, op_expr, TransLValue);
|
||||
if (expr == nullptr) return nullptr;
|
||||
AstNode *addr_of_expr = trans_create_node_addr_of(c, false, false, expr);
|
||||
AstNode *addr_of_expr = trans_create_node_addr_of(c, expr);
|
||||
// TODO: avoid name collisions with generated variable names
|
||||
Buf* ref_var_name = buf_create_from_str("_ref");
|
||||
AstNode *ref_var_decl = trans_create_node_var_decl_local(c, true, ref_var_name, nullptr, addr_of_expr);
|
||||
@ -1869,7 +1876,7 @@ static AstNode *trans_create_pre_crement(Context *c, ResultUsed result_used, Tra
|
||||
// const _ref = &expr;
|
||||
AstNode *expr = trans_expr(c, ResultUsedYes, &child_scope->base, op_expr, TransLValue);
|
||||
if (expr == nullptr) return nullptr;
|
||||
AstNode *addr_of_expr = trans_create_node_addr_of(c, false, false, expr);
|
||||
AstNode *addr_of_expr = trans_create_node_addr_of(c, expr);
|
||||
// TODO: avoid name collisions with generated variable names
|
||||
Buf* ref_var_name = buf_create_from_str("_ref");
|
||||
AstNode *ref_var_decl = trans_create_node_var_decl_local(c, true, ref_var_name, nullptr, addr_of_expr);
|
||||
@ -1918,7 +1925,7 @@ static AstNode *trans_unary_operator(Context *c, ResultUsed result_used, TransSc
|
||||
AstNode *value_node = trans_expr(c, result_used, scope, stmt->getSubExpr(), TransLValue);
|
||||
if (value_node == nullptr)
|
||||
return value_node;
|
||||
return trans_create_node_addr_of(c, false, false, value_node);
|
||||
return trans_create_node_addr_of(c, value_node);
|
||||
}
|
||||
case UO_Deref:
|
||||
{
|
||||
@ -4443,7 +4450,7 @@ static AstNode *parse_ctok_suffix_op_expr(Context *c, CTokenize *ctok, size_t *t
|
||||
} else if (first_tok->id == CTokIdAsterisk) {
|
||||
*tok_i += 1;
|
||||
|
||||
node = trans_create_node_addr_of(c, false, false, node);
|
||||
node = trans_create_node_ptr_type(c, false, false, node);
|
||||
} else {
|
||||
return node;
|
||||
}
|
||||
|
||||
10
src/util.hpp
10
src/util.hpp
@ -38,11 +38,11 @@ ATTRIBUTE_NORETURN
|
||||
ATTRIBUTE_PRINTF(1, 2)
|
||||
void zig_panic(const char *format, ...);
|
||||
|
||||
ATTRIBUTE_COLD
|
||||
ATTRIBUTE_NORETURN
|
||||
static inline void zig_unreachable(void) {
|
||||
zig_panic("unreachable");
|
||||
}
|
||||
#ifdef WIN32
|
||||
#define __func__ __FUNCTION__
|
||||
#endif
|
||||
|
||||
#define zig_unreachable() zig_panic("unreachable: %s:%s:%d", __FILE__, __func__, __LINE__)
|
||||
|
||||
#if defined(_MSC_VER)
|
||||
static inline int clzll(unsigned long long mask) {
|
||||
|
||||
@ -17,10 +17,10 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
|
||||
/// you uninitialized memory.
|
||||
items: []align(A) T,
|
||||
len: usize,
|
||||
allocator: &Allocator,
|
||||
allocator: *Allocator,
|
||||
|
||||
/// Deinitialize with `deinit` or use `toOwnedSlice`.
|
||||
pub fn init(allocator: &Allocator) Self {
|
||||
pub fn init(allocator: *Allocator) Self {
|
||||
return Self{
|
||||
.items = []align(A) T{},
|
||||
.len = 0,
|
||||
@ -28,30 +28,30 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(l: &const Self) void {
|
||||
pub fn deinit(l: *const Self) void {
|
||||
l.allocator.free(l.items);
|
||||
}
|
||||
|
||||
pub fn toSlice(l: &const Self) []align(A) T {
|
||||
pub fn toSlice(l: *const Self) []align(A) T {
|
||||
return l.items[0..l.len];
|
||||
}
|
||||
|
||||
pub fn toSliceConst(l: &const Self) []align(A) const T {
|
||||
pub fn toSliceConst(l: *const Self) []align(A) const T {
|
||||
return l.items[0..l.len];
|
||||
}
|
||||
|
||||
pub fn at(l: &const Self, n: usize) T {
|
||||
pub fn at(l: *const Self, n: usize) T {
|
||||
return l.toSliceConst()[n];
|
||||
}
|
||||
|
||||
pub fn count(self: &const Self) usize {
|
||||
pub fn count(self: *const Self) usize {
|
||||
return self.len;
|
||||
}
|
||||
|
||||
/// ArrayList takes ownership of the passed in slice. The slice must have been
|
||||
/// allocated with `allocator`.
|
||||
/// Deinitialize with `deinit` or use `toOwnedSlice`.
|
||||
pub fn fromOwnedSlice(allocator: &Allocator, slice: []align(A) T) Self {
|
||||
pub fn fromOwnedSlice(allocator: *Allocator, slice: []align(A) T) Self {
|
||||
return Self{
|
||||
.items = slice,
|
||||
.len = slice.len,
|
||||
@ -60,51 +60,51 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
|
||||
}
|
||||
|
||||
/// The caller owns the returned memory. ArrayList becomes empty.
|
||||
pub fn toOwnedSlice(self: &Self) []align(A) T {
|
||||
pub fn toOwnedSlice(self: *Self) []align(A) T {
|
||||
const allocator = self.allocator;
|
||||
const result = allocator.alignedShrink(T, A, self.items, self.len);
|
||||
self.* = init(allocator);
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn insert(l: &Self, n: usize, item: &const T) !void {
|
||||
pub fn insert(l: *Self, n: usize, item: *const T) !void {
|
||||
try l.ensureCapacity(l.len + 1);
|
||||
l.len += 1;
|
||||
|
||||
mem.copy(T, l.items[n + 1..l.len], l.items[n..l.len - 1]);
|
||||
mem.copy(T, l.items[n + 1 .. l.len], l.items[n .. l.len - 1]);
|
||||
l.items[n] = item.*;
|
||||
}
|
||||
|
||||
pub fn insertSlice(l: &Self, n: usize, items: []align(A) const T) !void {
|
||||
pub fn insertSlice(l: *Self, n: usize, items: []align(A) const T) !void {
|
||||
try l.ensureCapacity(l.len + items.len);
|
||||
l.len += items.len;
|
||||
|
||||
mem.copy(T, l.items[n + items.len..l.len], l.items[n..l.len - items.len]);
|
||||
mem.copy(T, l.items[n..n + items.len], items);
|
||||
mem.copy(T, l.items[n + items.len .. l.len], l.items[n .. l.len - items.len]);
|
||||
mem.copy(T, l.items[n .. n + items.len], items);
|
||||
}
|
||||
|
||||
pub fn append(l: &Self, item: &const T) !void {
|
||||
pub fn append(l: *Self, item: *const T) !void {
|
||||
const new_item_ptr = try l.addOne();
|
||||
new_item_ptr.* = item.*;
|
||||
}
|
||||
|
||||
pub fn appendSlice(l: &Self, items: []align(A) const T) !void {
|
||||
pub fn appendSlice(l: *Self, items: []align(A) const T) !void {
|
||||
try l.ensureCapacity(l.len + items.len);
|
||||
mem.copy(T, l.items[l.len..], items);
|
||||
l.len += items.len;
|
||||
}
|
||||
|
||||
pub fn resize(l: &Self, new_len: usize) !void {
|
||||
pub fn resize(l: *Self, new_len: usize) !void {
|
||||
try l.ensureCapacity(new_len);
|
||||
l.len = new_len;
|
||||
}
|
||||
|
||||
pub fn shrink(l: &Self, new_len: usize) void {
|
||||
pub fn shrink(l: *Self, new_len: usize) void {
|
||||
assert(new_len <= l.len);
|
||||
l.len = new_len;
|
||||
}
|
||||
|
||||
pub fn ensureCapacity(l: &Self, new_capacity: usize) !void {
|
||||
pub fn ensureCapacity(l: *Self, new_capacity: usize) !void {
|
||||
var better_capacity = l.items.len;
|
||||
if (better_capacity >= new_capacity) return;
|
||||
while (true) {
|
||||
@ -114,7 +114,7 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
|
||||
l.items = try l.allocator.alignedRealloc(T, A, l.items, better_capacity);
|
||||
}
|
||||
|
||||
pub fn addOne(l: &Self) !&T {
|
||||
pub fn addOne(l: *Self) !*T {
|
||||
const new_length = l.len + 1;
|
||||
try l.ensureCapacity(new_length);
|
||||
const result = &l.items[l.len];
|
||||
@ -122,34 +122,34 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn pop(self: &Self) T {
|
||||
pub fn pop(self: *Self) T {
|
||||
self.len -= 1;
|
||||
return self.items[self.len];
|
||||
}
|
||||
|
||||
pub fn popOrNull(self: &Self) ?T {
|
||||
pub fn popOrNull(self: *Self) ?T {
|
||||
if (self.len == 0) return null;
|
||||
return self.pop();
|
||||
}
|
||||
|
||||
pub const Iterator = struct {
|
||||
list: &const Self,
|
||||
list: *const Self,
|
||||
// how many items have we returned
|
||||
count: usize,
|
||||
|
||||
pub fn next(it: &Iterator) ?T {
|
||||
pub fn next(it: *Iterator) ?T {
|
||||
if (it.count >= it.list.len) return null;
|
||||
const val = it.list.at(it.count);
|
||||
it.count += 1;
|
||||
return val;
|
||||
}
|
||||
|
||||
pub fn reset(it: &Iterator) void {
|
||||
pub fn reset(it: *Iterator) void {
|
||||
it.count = 0;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn iterator(self: &const Self) Iterator {
|
||||
pub fn iterator(self: *const Self) Iterator {
|
||||
return Iterator{
|
||||
.list = self,
|
||||
.count = 0,
|
||||
|
||||
@ -5,36 +5,36 @@ const AtomicRmwOp = builtin.AtomicRmwOp;
|
||||
/// Many reader, many writer, non-allocating, thread-safe, lock-free
|
||||
pub fn Queue(comptime T: type) type {
|
||||
return struct {
|
||||
head: &Node,
|
||||
tail: &Node,
|
||||
head: *Node,
|
||||
tail: *Node,
|
||||
root: Node,
|
||||
|
||||
pub const Self = this;
|
||||
|
||||
pub const Node = struct {
|
||||
next: ?&Node,
|
||||
next: ?*Node,
|
||||
data: T,
|
||||
};
|
||||
|
||||
// TODO: well defined copy elision: https://github.com/ziglang/zig/issues/287
|
||||
pub fn init(self: &Self) void {
|
||||
pub fn init(self: *Self) void {
|
||||
self.root.next = null;
|
||||
self.head = &self.root;
|
||||
self.tail = &self.root;
|
||||
}
|
||||
|
||||
pub fn put(self: &Self, node: &Node) void {
|
||||
pub fn put(self: *Self, node: *Node) void {
|
||||
node.next = null;
|
||||
|
||||
const tail = @atomicRmw(&Node, &self.tail, AtomicRmwOp.Xchg, node, AtomicOrder.SeqCst);
|
||||
_ = @atomicRmw(?&Node, &tail.next, AtomicRmwOp.Xchg, node, AtomicOrder.SeqCst);
|
||||
const tail = @atomicRmw(*Node, &self.tail, AtomicRmwOp.Xchg, node, AtomicOrder.SeqCst);
|
||||
_ = @atomicRmw(?*Node, &tail.next, AtomicRmwOp.Xchg, node, AtomicOrder.SeqCst);
|
||||
}
|
||||
|
||||
pub fn get(self: &Self) ?&Node {
|
||||
var head = @atomicLoad(&Node, &self.head, AtomicOrder.SeqCst);
|
||||
pub fn get(self: *Self) ?*Node {
|
||||
var head = @atomicLoad(*Node, &self.head, AtomicOrder.SeqCst);
|
||||
while (true) {
|
||||
const node = head.next ?? return null;
|
||||
head = @cmpxchgWeak(&Node, &self.head, head, node, AtomicOrder.SeqCst, AtomicOrder.SeqCst) ?? return node;
|
||||
head = @cmpxchgWeak(*Node, &self.head, head, node, AtomicOrder.SeqCst, AtomicOrder.SeqCst) ?? return node;
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -42,8 +42,8 @@ pub fn Queue(comptime T: type) type {
|
||||
|
||||
const std = @import("std");
|
||||
const Context = struct {
|
||||
allocator: &std.mem.Allocator,
|
||||
queue: &Queue(i32),
|
||||
allocator: *std.mem.Allocator,
|
||||
queue: *Queue(i32),
|
||||
put_sum: isize,
|
||||
get_sum: isize,
|
||||
get_count: usize,
|
||||
@ -79,11 +79,11 @@ test "std.atomic.queue" {
|
||||
.get_count = 0,
|
||||
};
|
||||
|
||||
var putters: [put_thread_count]&std.os.Thread = undefined;
|
||||
var putters: [put_thread_count]*std.os.Thread = undefined;
|
||||
for (putters) |*t| {
|
||||
t.* = try std.os.spawnThread(&context, startPuts);
|
||||
}
|
||||
var getters: [put_thread_count]&std.os.Thread = undefined;
|
||||
var getters: [put_thread_count]*std.os.Thread = undefined;
|
||||
for (getters) |*t| {
|
||||
t.* = try std.os.spawnThread(&context, startGets);
|
||||
}
|
||||
@ -98,7 +98,7 @@ test "std.atomic.queue" {
|
||||
std.debug.assert(context.get_count == puts_per_thread * put_thread_count);
|
||||
}
|
||||
|
||||
fn startPuts(ctx: &Context) u8 {
|
||||
fn startPuts(ctx: *Context) u8 {
|
||||
var put_count: usize = puts_per_thread;
|
||||
var r = std.rand.DefaultPrng.init(0xdeadbeef);
|
||||
while (put_count != 0) : (put_count -= 1) {
|
||||
@ -112,7 +112,7 @@ fn startPuts(ctx: &Context) u8 {
|
||||
return 0;
|
||||
}
|
||||
|
||||
fn startGets(ctx: &Context) u8 {
|
||||
fn startGets(ctx: *Context) u8 {
|
||||
while (true) {
|
||||
while (ctx.queue.get()) |node| {
|
||||
std.os.time.sleep(0, 1); // let the os scheduler be our fuzz
|
||||
|
||||
@ -4,12 +4,12 @@ const AtomicOrder = builtin.AtomicOrder;
|
||||
/// Many reader, many writer, non-allocating, thread-safe, lock-free
|
||||
pub fn Stack(comptime T: type) type {
|
||||
return struct {
|
||||
root: ?&Node,
|
||||
root: ?*Node,
|
||||
|
||||
pub const Self = this;
|
||||
|
||||
pub const Node = struct {
|
||||
next: ?&Node,
|
||||
next: ?*Node,
|
||||
data: T,
|
||||
};
|
||||
|
||||
@ -19,36 +19,36 @@ pub fn Stack(comptime T: type) type {
|
||||
|
||||
/// push operation, but only if you are the first item in the stack. if you did not succeed in
|
||||
/// being the first item in the stack, returns the other item that was there.
|
||||
pub fn pushFirst(self: &Self, node: &Node) ?&Node {
|
||||
pub fn pushFirst(self: *Self, node: *Node) ?*Node {
|
||||
node.next = null;
|
||||
return @cmpxchgStrong(?&Node, &self.root, null, node, AtomicOrder.SeqCst, AtomicOrder.SeqCst);
|
||||
return @cmpxchgStrong(?*Node, &self.root, null, node, AtomicOrder.SeqCst, AtomicOrder.SeqCst);
|
||||
}
|
||||
|
||||
pub fn push(self: &Self, node: &Node) void {
|
||||
var root = @atomicLoad(?&Node, &self.root, AtomicOrder.SeqCst);
|
||||
pub fn push(self: *Self, node: *Node) void {
|
||||
var root = @atomicLoad(?*Node, &self.root, AtomicOrder.SeqCst);
|
||||
while (true) {
|
||||
node.next = root;
|
||||
root = @cmpxchgWeak(?&Node, &self.root, root, node, AtomicOrder.SeqCst, AtomicOrder.SeqCst) ?? break;
|
||||
root = @cmpxchgWeak(?*Node, &self.root, root, node, AtomicOrder.SeqCst, AtomicOrder.SeqCst) ?? break;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pop(self: &Self) ?&Node {
|
||||
var root = @atomicLoad(?&Node, &self.root, AtomicOrder.SeqCst);
|
||||
pub fn pop(self: *Self) ?*Node {
|
||||
var root = @atomicLoad(?*Node, &self.root, AtomicOrder.SeqCst);
|
||||
while (true) {
|
||||
root = @cmpxchgWeak(?&Node, &self.root, root, (root ?? return null).next, AtomicOrder.SeqCst, AtomicOrder.SeqCst) ?? return root;
|
||||
root = @cmpxchgWeak(?*Node, &self.root, root, (root ?? return null).next, AtomicOrder.SeqCst, AtomicOrder.SeqCst) ?? return root;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn isEmpty(self: &Self) bool {
|
||||
return @atomicLoad(?&Node, &self.root, AtomicOrder.SeqCst) == null;
|
||||
pub fn isEmpty(self: *Self) bool {
|
||||
return @atomicLoad(?*Node, &self.root, AtomicOrder.SeqCst) == null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
const Context = struct {
|
||||
allocator: &std.mem.Allocator,
|
||||
stack: &Stack(i32),
|
||||
allocator: *std.mem.Allocator,
|
||||
stack: *Stack(i32),
|
||||
put_sum: isize,
|
||||
get_sum: isize,
|
||||
get_count: usize,
|
||||
@ -82,11 +82,11 @@ test "std.atomic.stack" {
|
||||
.get_count = 0,
|
||||
};
|
||||
|
||||
var putters: [put_thread_count]&std.os.Thread = undefined;
|
||||
var putters: [put_thread_count]*std.os.Thread = undefined;
|
||||
for (putters) |*t| {
|
||||
t.* = try std.os.spawnThread(&context, startPuts);
|
||||
}
|
||||
var getters: [put_thread_count]&std.os.Thread = undefined;
|
||||
var getters: [put_thread_count]*std.os.Thread = undefined;
|
||||
for (getters) |*t| {
|
||||
t.* = try std.os.spawnThread(&context, startGets);
|
||||
}
|
||||
@ -101,7 +101,7 @@ test "std.atomic.stack" {
|
||||
std.debug.assert(context.get_count == puts_per_thread * put_thread_count);
|
||||
}
|
||||
|
||||
fn startPuts(ctx: &Context) u8 {
|
||||
fn startPuts(ctx: *Context) u8 {
|
||||
var put_count: usize = puts_per_thread;
|
||||
var r = std.rand.DefaultPrng.init(0xdeadbeef);
|
||||
while (put_count != 0) : (put_count -= 1) {
|
||||
@ -115,7 +115,7 @@ fn startPuts(ctx: &Context) u8 {
|
||||
return 0;
|
||||
}
|
||||
|
||||
fn startGets(ctx: &Context) u8 {
|
||||
fn startGets(ctx: *Context) u8 {
|
||||
while (true) {
|
||||
while (ctx.stack.pop()) |node| {
|
||||
std.os.time.sleep(0, 1); // let the os scheduler be our fuzz
|
||||
|
||||
@ -32,7 +32,7 @@ pub const Base64Encoder = struct {
|
||||
}
|
||||
|
||||
/// dest.len must be what you get from ::calcSize.
|
||||
pub fn encode(encoder: &const Base64Encoder, dest: []u8, source: []const u8) void {
|
||||
pub fn encode(encoder: *const Base64Encoder, dest: []u8, source: []const u8) void {
|
||||
assert(dest.len == Base64Encoder.calcSize(source.len));
|
||||
|
||||
var i: usize = 0;
|
||||
@ -81,6 +81,7 @@ pub const Base64Decoder = struct {
|
||||
/// e.g. 'A' => 0.
|
||||
/// undefined for any value not in the 64 alphabet chars.
|
||||
char_to_index: [256]u8,
|
||||
|
||||
/// true only for the 64 chars in the alphabet, not the pad char.
|
||||
char_in_alphabet: [256]bool,
|
||||
pad_char: u8,
|
||||
@ -106,7 +107,7 @@ pub const Base64Decoder = struct {
|
||||
}
|
||||
|
||||
/// If the encoded buffer is detected to be invalid, returns error.InvalidPadding.
|
||||
pub fn calcSize(decoder: &const Base64Decoder, source: []const u8) !usize {
|
||||
pub fn calcSize(decoder: *const Base64Decoder, source: []const u8) !usize {
|
||||
if (source.len % 4 != 0) return error.InvalidPadding;
|
||||
return calcDecodedSizeExactUnsafe(source, decoder.pad_char);
|
||||
}
|
||||
@ -114,7 +115,7 @@ pub const Base64Decoder = struct {
|
||||
/// dest.len must be what you get from ::calcSize.
|
||||
/// invalid characters result in error.InvalidCharacter.
|
||||
/// invalid padding results in error.InvalidPadding.
|
||||
pub fn decode(decoder: &const Base64Decoder, dest: []u8, source: []const u8) !void {
|
||||
pub fn decode(decoder: *const Base64Decoder, dest: []u8, source: []const u8) !void {
|
||||
assert(dest.len == (decoder.calcSize(source) catch unreachable));
|
||||
assert(source.len % 4 == 0);
|
||||
|
||||
@ -180,7 +181,7 @@ pub const Base64DecoderWithIgnore = struct {
|
||||
/// Invalid padding results in error.InvalidPadding.
|
||||
/// Decoding more data than can fit in dest results in error.OutputTooSmall. See also ::calcSizeUpperBound.
|
||||
/// Returns the number of bytes writen to dest.
|
||||
pub fn decode(decoder_with_ignore: &const Base64DecoderWithIgnore, dest: []u8, source: []const u8) !usize {
|
||||
pub fn decode(decoder_with_ignore: *const Base64DecoderWithIgnore, dest: []u8, source: []const u8) !usize {
|
||||
const decoder = &decoder_with_ignore.decoder;
|
||||
|
||||
var src_cursor: usize = 0;
|
||||
@ -289,13 +290,13 @@ pub const Base64DecoderUnsafe = struct {
|
||||
}
|
||||
|
||||
/// The source buffer must be valid.
|
||||
pub fn calcSize(decoder: &const Base64DecoderUnsafe, source: []const u8) usize {
|
||||
pub fn calcSize(decoder: *const Base64DecoderUnsafe, source: []const u8) usize {
|
||||
return calcDecodedSizeExactUnsafe(source, decoder.pad_char);
|
||||
}
|
||||
|
||||
/// dest.len must be what you get from ::calcDecodedSizeExactUnsafe.
|
||||
/// invalid characters or padding will result in undefined values.
|
||||
pub fn decode(decoder: &const Base64DecoderUnsafe, dest: []u8, source: []const u8) void {
|
||||
pub fn decode(decoder: *const Base64DecoderUnsafe, dest: []u8, source: []const u8) void {
|
||||
assert(dest.len == decoder.calcSize(source));
|
||||
|
||||
var src_index: usize = 0;
|
||||
@ -449,7 +450,7 @@ fn testError(encoded: []const u8, expected_err: error) !void {
|
||||
fn testOutputTooSmallError(encoded: []const u8) !void {
|
||||
const standard_decoder_ignore_space = Base64DecoderWithIgnore.init(standard_alphabet_chars, standard_pad_char, " ");
|
||||
var buffer: [0x100]u8 = undefined;
|
||||
var decoded = buffer[0..calcDecodedSizeExactUnsafe(encoded, standard_pad_char) - 1];
|
||||
var decoded = buffer[0 .. calcDecodedSizeExactUnsafe(encoded, standard_pad_char) - 1];
|
||||
if (standard_decoder_ignore_space.decode(decoded, encoded)) |_| {
|
||||
return error.ExpectedError;
|
||||
} else |err| if (err != error.OutputTooSmall) return err;
|
||||
|
||||
@ -11,12 +11,12 @@ pub const BufMap = struct {
|
||||
|
||||
const BufMapHashMap = HashMap([]const u8, []const u8, mem.hash_slice_u8, mem.eql_slice_u8);
|
||||
|
||||
pub fn init(allocator: &Allocator) BufMap {
|
||||
pub fn init(allocator: *Allocator) BufMap {
|
||||
var self = BufMap{ .hash_map = BufMapHashMap.init(allocator) };
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: &const BufMap) void {
|
||||
pub fn deinit(self: *const BufMap) void {
|
||||
var it = self.hash_map.iterator();
|
||||
while (true) {
|
||||
const entry = it.next() ?? break;
|
||||
@ -27,7 +27,7 @@ pub const BufMap = struct {
|
||||
self.hash_map.deinit();
|
||||
}
|
||||
|
||||
pub fn set(self: &BufMap, key: []const u8, value: []const u8) !void {
|
||||
pub fn set(self: *BufMap, key: []const u8, value: []const u8) !void {
|
||||
self.delete(key);
|
||||
const key_copy = try self.copy(key);
|
||||
errdefer self.free(key_copy);
|
||||
@ -36,30 +36,30 @@ pub const BufMap = struct {
|
||||
_ = try self.hash_map.put(key_copy, value_copy);
|
||||
}
|
||||
|
||||
pub fn get(self: &const BufMap, key: []const u8) ?[]const u8 {
|
||||
pub fn get(self: *const BufMap, key: []const u8) ?[]const u8 {
|
||||
const entry = self.hash_map.get(key) ?? return null;
|
||||
return entry.value;
|
||||
}
|
||||
|
||||
pub fn delete(self: &BufMap, key: []const u8) void {
|
||||
pub fn delete(self: *BufMap, key: []const u8) void {
|
||||
const entry = self.hash_map.remove(key) ?? return;
|
||||
self.free(entry.key);
|
||||
self.free(entry.value);
|
||||
}
|
||||
|
||||
pub fn count(self: &const BufMap) usize {
|
||||
pub fn count(self: *const BufMap) usize {
|
||||
return self.hash_map.count();
|
||||
}
|
||||
|
||||
pub fn iterator(self: &const BufMap) BufMapHashMap.Iterator {
|
||||
pub fn iterator(self: *const BufMap) BufMapHashMap.Iterator {
|
||||
return self.hash_map.iterator();
|
||||
}
|
||||
|
||||
fn free(self: &const BufMap, value: []const u8) void {
|
||||
fn free(self: *const BufMap, value: []const u8) void {
|
||||
self.hash_map.allocator.free(value);
|
||||
}
|
||||
|
||||
fn copy(self: &const BufMap, value: []const u8) ![]const u8 {
|
||||
fn copy(self: *const BufMap, value: []const u8) ![]const u8 {
|
||||
return mem.dupe(self.hash_map.allocator, u8, value);
|
||||
}
|
||||
};
|
||||
|
||||
@ -9,12 +9,12 @@ pub const BufSet = struct {
|
||||
|
||||
const BufSetHashMap = HashMap([]const u8, void, mem.hash_slice_u8, mem.eql_slice_u8);
|
||||
|
||||
pub fn init(a: &Allocator) BufSet {
|
||||
pub fn init(a: *Allocator) BufSet {
|
||||
var self = BufSet{ .hash_map = BufSetHashMap.init(a) };
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: &const BufSet) void {
|
||||
pub fn deinit(self: *const BufSet) void {
|
||||
var it = self.hash_map.iterator();
|
||||
while (true) {
|
||||
const entry = it.next() ?? break;
|
||||
@ -24,7 +24,7 @@ pub const BufSet = struct {
|
||||
self.hash_map.deinit();
|
||||
}
|
||||
|
||||
pub fn put(self: &BufSet, key: []const u8) !void {
|
||||
pub fn put(self: *BufSet, key: []const u8) !void {
|
||||
if (self.hash_map.get(key) == null) {
|
||||
const key_copy = try self.copy(key);
|
||||
errdefer self.free(key_copy);
|
||||
@ -32,28 +32,28 @@ pub const BufSet = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delete(self: &BufSet, key: []const u8) void {
|
||||
pub fn delete(self: *BufSet, key: []const u8) void {
|
||||
const entry = self.hash_map.remove(key) ?? return;
|
||||
self.free(entry.key);
|
||||
}
|
||||
|
||||
pub fn count(self: &const BufSet) usize {
|
||||
pub fn count(self: *const BufSet) usize {
|
||||
return self.hash_map.count();
|
||||
}
|
||||
|
||||
pub fn iterator(self: &const BufSet) BufSetHashMap.Iterator {
|
||||
pub fn iterator(self: *const BufSet) BufSetHashMap.Iterator {
|
||||
return self.hash_map.iterator();
|
||||
}
|
||||
|
||||
pub fn allocator(self: &const BufSet) &Allocator {
|
||||
pub fn allocator(self: *const BufSet) *Allocator {
|
||||
return self.hash_map.allocator;
|
||||
}
|
||||
|
||||
fn free(self: &const BufSet, value: []const u8) void {
|
||||
fn free(self: *const BufSet, value: []const u8) void {
|
||||
self.hash_map.allocator.free(value);
|
||||
}
|
||||
|
||||
fn copy(self: &const BufSet, value: []const u8) ![]const u8 {
|
||||
fn copy(self: *const BufSet, value: []const u8) ![]const u8 {
|
||||
const result = try self.hash_map.allocator.alloc(u8, value.len);
|
||||
mem.copy(u8, result, value);
|
||||
return result;
|
||||
|
||||
@ -12,14 +12,14 @@ pub const Buffer = struct {
|
||||
list: ArrayList(u8),
|
||||
|
||||
/// Must deinitialize with deinit.
|
||||
pub fn init(allocator: &Allocator, m: []const u8) !Buffer {
|
||||
pub fn init(allocator: *Allocator, m: []const u8) !Buffer {
|
||||
var self = try initSize(allocator, m.len);
|
||||
mem.copy(u8, self.list.items, m);
|
||||
return self;
|
||||
}
|
||||
|
||||
/// Must deinitialize with deinit.
|
||||
pub fn initSize(allocator: &Allocator, size: usize) !Buffer {
|
||||
pub fn initSize(allocator: *Allocator, size: usize) !Buffer {
|
||||
var self = initNull(allocator);
|
||||
try self.resize(size);
|
||||
return self;
|
||||
@ -30,19 +30,19 @@ pub const Buffer = struct {
|
||||
/// * ::replaceContents
|
||||
/// * ::replaceContentsBuffer
|
||||
/// * ::resize
|
||||
pub fn initNull(allocator: &Allocator) Buffer {
|
||||
pub fn initNull(allocator: *Allocator) Buffer {
|
||||
return Buffer{ .list = ArrayList(u8).init(allocator) };
|
||||
}
|
||||
|
||||
/// Must deinitialize with deinit.
|
||||
pub fn initFromBuffer(buffer: &const Buffer) !Buffer {
|
||||
pub fn initFromBuffer(buffer: *const Buffer) !Buffer {
|
||||
return Buffer.init(buffer.list.allocator, buffer.toSliceConst());
|
||||
}
|
||||
|
||||
/// Buffer takes ownership of the passed in slice. The slice must have been
|
||||
/// allocated with `allocator`.
|
||||
/// Must deinitialize with deinit.
|
||||
pub fn fromOwnedSlice(allocator: &Allocator, slice: []u8) Buffer {
|
||||
pub fn fromOwnedSlice(allocator: *Allocator, slice: []u8) Buffer {
|
||||
var self = Buffer{ .list = ArrayList(u8).fromOwnedSlice(allocator, slice) };
|
||||
self.list.append(0);
|
||||
return self;
|
||||
@ -50,79 +50,79 @@ pub const Buffer = struct {
|
||||
|
||||
/// The caller owns the returned memory. The Buffer becomes null and
|
||||
/// is safe to `deinit`.
|
||||
pub fn toOwnedSlice(self: &Buffer) []u8 {
|
||||
pub fn toOwnedSlice(self: *Buffer) []u8 {
|
||||
const allocator = self.list.allocator;
|
||||
const result = allocator.shrink(u8, self.list.items, self.len());
|
||||
self.* = initNull(allocator);
|
||||
return result;
|
||||
}
|
||||
|
||||
pub fn deinit(self: &Buffer) void {
|
||||
pub fn deinit(self: *Buffer) void {
|
||||
self.list.deinit();
|
||||
}
|
||||
|
||||
pub fn toSlice(self: &const Buffer) []u8 {
|
||||
pub fn toSlice(self: *const Buffer) []u8 {
|
||||
return self.list.toSlice()[0..self.len()];
|
||||
}
|
||||
|
||||
pub fn toSliceConst(self: &const Buffer) []const u8 {
|
||||
pub fn toSliceConst(self: *const Buffer) []const u8 {
|
||||
return self.list.toSliceConst()[0..self.len()];
|
||||
}
|
||||
|
||||
pub fn shrink(self: &Buffer, new_len: usize) void {
|
||||
pub fn shrink(self: *Buffer, new_len: usize) void {
|
||||
assert(new_len <= self.len());
|
||||
self.list.shrink(new_len + 1);
|
||||
self.list.items[self.len()] = 0;
|
||||
}
|
||||
|
||||
pub fn resize(self: &Buffer, new_len: usize) !void {
|
||||
pub fn resize(self: *Buffer, new_len: usize) !void {
|
||||
try self.list.resize(new_len + 1);
|
||||
self.list.items[self.len()] = 0;
|
||||
}
|
||||
|
||||
pub fn isNull(self: &const Buffer) bool {
|
||||
pub fn isNull(self: *const Buffer) bool {
|
||||
return self.list.len == 0;
|
||||
}
|
||||
|
||||
pub fn len(self: &const Buffer) usize {
|
||||
pub fn len(self: *const Buffer) usize {
|
||||
return self.list.len - 1;
|
||||
}
|
||||
|
||||
pub fn append(self: &Buffer, m: []const u8) !void {
|
||||
pub fn append(self: *Buffer, m: []const u8) !void {
|
||||
const old_len = self.len();
|
||||
try self.resize(old_len + m.len);
|
||||
mem.copy(u8, self.list.toSlice()[old_len..], m);
|
||||
}
|
||||
|
||||
pub fn appendByte(self: &Buffer, byte: u8) !void {
|
||||
pub fn appendByte(self: *Buffer, byte: u8) !void {
|
||||
const old_len = self.len();
|
||||
try self.resize(old_len + 1);
|
||||
self.list.toSlice()[old_len] = byte;
|
||||
}
|
||||
|
||||
pub fn eql(self: &const Buffer, m: []const u8) bool {
|
||||
pub fn eql(self: *const Buffer, m: []const u8) bool {
|
||||
return mem.eql(u8, self.toSliceConst(), m);
|
||||
}
|
||||
|
||||
pub fn startsWith(self: &const Buffer, m: []const u8) bool {
|
||||
pub fn startsWith(self: *const Buffer, m: []const u8) bool {
|
||||
if (self.len() < m.len) return false;
|
||||
return mem.eql(u8, self.list.items[0..m.len], m);
|
||||
}
|
||||
|
||||
pub fn endsWith(self: &const Buffer, m: []const u8) bool {
|
||||
pub fn endsWith(self: *const Buffer, m: []const u8) bool {
|
||||
const l = self.len();
|
||||
if (l < m.len) return false;
|
||||
const start = l - m.len;
|
||||
return mem.eql(u8, self.list.items[start..l], m);
|
||||
}
|
||||
|
||||
pub fn replaceContents(self: &const Buffer, m: []const u8) !void {
|
||||
pub fn replaceContents(self: *const Buffer, m: []const u8) !void {
|
||||
try self.resize(m.len);
|
||||
mem.copy(u8, self.list.toSlice(), m);
|
||||
}
|
||||
|
||||
/// For passing to C functions.
|
||||
pub fn ptr(self: &const Buffer) &u8 {
|
||||
pub fn ptr(self: *const Buffer) [*]u8 {
|
||||
return self.list.items.ptr;
|
||||
}
|
||||
};
|
||||
|
||||
278
std/build.zig
278
std/build.zig
@ -20,7 +20,7 @@ pub const Builder = struct {
|
||||
install_tls: TopLevelStep,
|
||||
have_uninstall_step: bool,
|
||||
have_install_step: bool,
|
||||
allocator: &Allocator,
|
||||
allocator: *Allocator,
|
||||
lib_paths: ArrayList([]const u8),
|
||||
include_paths: ArrayList([]const u8),
|
||||
rpaths: ArrayList([]const u8),
|
||||
@ -36,9 +36,9 @@ pub const Builder = struct {
|
||||
verbose_cimport: bool,
|
||||
invalid_user_input: bool,
|
||||
zig_exe: []const u8,
|
||||
default_step: &Step,
|
||||
default_step: *Step,
|
||||
env_map: BufMap,
|
||||
top_level_steps: ArrayList(&TopLevelStep),
|
||||
top_level_steps: ArrayList(*TopLevelStep),
|
||||
prefix: []const u8,
|
||||
search_prefixes: ArrayList([]const u8),
|
||||
lib_dir: []const u8,
|
||||
@ -82,7 +82,7 @@ pub const Builder = struct {
|
||||
description: []const u8,
|
||||
};
|
||||
|
||||
pub fn init(allocator: &Allocator, zig_exe: []const u8, build_root: []const u8, cache_root: []const u8) Builder {
|
||||
pub fn init(allocator: *Allocator, zig_exe: []const u8, build_root: []const u8, cache_root: []const u8) Builder {
|
||||
var self = Builder{
|
||||
.zig_exe = zig_exe,
|
||||
.build_root = build_root,
|
||||
@ -102,7 +102,7 @@ pub const Builder = struct {
|
||||
.user_input_options = UserInputOptionsMap.init(allocator),
|
||||
.available_options_map = AvailableOptionsMap.init(allocator),
|
||||
.available_options_list = ArrayList(AvailableOption).init(allocator),
|
||||
.top_level_steps = ArrayList(&TopLevelStep).init(allocator),
|
||||
.top_level_steps = ArrayList(*TopLevelStep).init(allocator),
|
||||
.default_step = undefined,
|
||||
.env_map = os.getEnvMap(allocator) catch unreachable,
|
||||
.prefix = undefined,
|
||||
@ -127,7 +127,7 @@ pub const Builder = struct {
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn deinit(self: &Builder) void {
|
||||
pub fn deinit(self: *Builder) void {
|
||||
self.lib_paths.deinit();
|
||||
self.include_paths.deinit();
|
||||
self.rpaths.deinit();
|
||||
@ -135,81 +135,81 @@ pub const Builder = struct {
|
||||
self.top_level_steps.deinit();
|
||||
}
|
||||
|
||||
pub fn setInstallPrefix(self: &Builder, maybe_prefix: ?[]const u8) void {
|
||||
pub fn setInstallPrefix(self: *Builder, maybe_prefix: ?[]const u8) void {
|
||||
self.prefix = maybe_prefix ?? "/usr/local"; // TODO better default
|
||||
self.lib_dir = os.path.join(self.allocator, self.prefix, "lib") catch unreachable;
|
||||
self.exe_dir = os.path.join(self.allocator, self.prefix, "bin") catch unreachable;
|
||||
}
|
||||
|
||||
pub fn addExecutable(self: &Builder, name: []const u8, root_src: ?[]const u8) &LibExeObjStep {
|
||||
pub fn addExecutable(self: *Builder, name: []const u8, root_src: ?[]const u8) *LibExeObjStep {
|
||||
return LibExeObjStep.createExecutable(self, name, root_src);
|
||||
}
|
||||
|
||||
pub fn addObject(self: &Builder, name: []const u8, root_src: []const u8) &LibExeObjStep {
|
||||
pub fn addObject(self: *Builder, name: []const u8, root_src: []const u8) *LibExeObjStep {
|
||||
return LibExeObjStep.createObject(self, name, root_src);
|
||||
}
|
||||
|
||||
pub fn addSharedLibrary(self: &Builder, name: []const u8, root_src: ?[]const u8, ver: &const Version) &LibExeObjStep {
|
||||
pub fn addSharedLibrary(self: *Builder, name: []const u8, root_src: ?[]const u8, ver: *const Version) *LibExeObjStep {
|
||||
return LibExeObjStep.createSharedLibrary(self, name, root_src, ver);
|
||||
}
|
||||
|
||||
pub fn addStaticLibrary(self: &Builder, name: []const u8, root_src: ?[]const u8) &LibExeObjStep {
|
||||
pub fn addStaticLibrary(self: *Builder, name: []const u8, root_src: ?[]const u8) *LibExeObjStep {
|
||||
return LibExeObjStep.createStaticLibrary(self, name, root_src);
|
||||
}
|
||||
|
||||
pub fn addTest(self: &Builder, root_src: []const u8) &TestStep {
|
||||
pub fn addTest(self: *Builder, root_src: []const u8) *TestStep {
|
||||
const test_step = self.allocator.create(TestStep) catch unreachable;
|
||||
test_step.* = TestStep.init(self, root_src);
|
||||
return test_step;
|
||||
}
|
||||
|
||||
pub fn addAssemble(self: &Builder, name: []const u8, src: []const u8) &LibExeObjStep {
|
||||
pub fn addAssemble(self: *Builder, name: []const u8, src: []const u8) *LibExeObjStep {
|
||||
const obj_step = LibExeObjStep.createObject(self, name, null);
|
||||
obj_step.addAssemblyFile(src);
|
||||
return obj_step;
|
||||
}
|
||||
|
||||
pub fn addCStaticLibrary(self: &Builder, name: []const u8) &LibExeObjStep {
|
||||
pub fn addCStaticLibrary(self: *Builder, name: []const u8) *LibExeObjStep {
|
||||
return LibExeObjStep.createCStaticLibrary(self, name);
|
||||
}
|
||||
|
||||
pub fn addCSharedLibrary(self: &Builder, name: []const u8, ver: &const Version) &LibExeObjStep {
|
||||
pub fn addCSharedLibrary(self: *Builder, name: []const u8, ver: *const Version) *LibExeObjStep {
|
||||
return LibExeObjStep.createCSharedLibrary(self, name, ver);
|
||||
}
|
||||
|
||||
pub fn addCExecutable(self: &Builder, name: []const u8) &LibExeObjStep {
|
||||
pub fn addCExecutable(self: *Builder, name: []const u8) *LibExeObjStep {
|
||||
return LibExeObjStep.createCExecutable(self, name);
|
||||
}
|
||||
|
||||
pub fn addCObject(self: &Builder, name: []const u8, src: []const u8) &LibExeObjStep {
|
||||
pub fn addCObject(self: *Builder, name: []const u8, src: []const u8) *LibExeObjStep {
|
||||
return LibExeObjStep.createCObject(self, name, src);
|
||||
}
|
||||
|
||||
/// ::argv is copied.
|
||||
pub fn addCommand(self: &Builder, cwd: ?[]const u8, env_map: &const BufMap, argv: []const []const u8) &CommandStep {
|
||||
pub fn addCommand(self: *Builder, cwd: ?[]const u8, env_map: *const BufMap, argv: []const []const u8) *CommandStep {
|
||||
return CommandStep.create(self, cwd, env_map, argv);
|
||||
}
|
||||
|
||||
pub fn addWriteFile(self: &Builder, file_path: []const u8, data: []const u8) &WriteFileStep {
|
||||
pub fn addWriteFile(self: *Builder, file_path: []const u8, data: []const u8) *WriteFileStep {
|
||||
const write_file_step = self.allocator.create(WriteFileStep) catch unreachable;
|
||||
write_file_step.* = WriteFileStep.init(self, file_path, data);
|
||||
return write_file_step;
|
||||
}
|
||||
|
||||
pub fn addLog(self: &Builder, comptime format: []const u8, args: ...) &LogStep {
|
||||
pub fn addLog(self: *Builder, comptime format: []const u8, args: ...) *LogStep {
|
||||
const data = self.fmt(format, args);
|
||||
const log_step = self.allocator.create(LogStep) catch unreachable;
|
||||
log_step.* = LogStep.init(self, data);
|
||||
return log_step;
|
||||
}
|
||||
|
||||
pub fn addRemoveDirTree(self: &Builder, dir_path: []const u8) &RemoveDirStep {
|
||||
pub fn addRemoveDirTree(self: *Builder, dir_path: []const u8) *RemoveDirStep {
|
||||
const remove_dir_step = self.allocator.create(RemoveDirStep) catch unreachable;
|
||||
remove_dir_step.* = RemoveDirStep.init(self, dir_path);
|
||||
return remove_dir_step;
|
||||
}
|
||||
|
||||
pub fn version(self: &const Builder, major: u32, minor: u32, patch: u32) Version {
|
||||
pub fn version(self: *const Builder, major: u32, minor: u32, patch: u32) Version {
|
||||
return Version{
|
||||
.major = major,
|
||||
.minor = minor,
|
||||
@ -217,20 +217,20 @@ pub const Builder = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn addCIncludePath(self: &Builder, path: []const u8) void {
|
||||
pub fn addCIncludePath(self: *Builder, path: []const u8) void {
|
||||
self.include_paths.append(path) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn addRPath(self: &Builder, path: []const u8) void {
|
||||
pub fn addRPath(self: *Builder, path: []const u8) void {
|
||||
self.rpaths.append(path) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn addLibPath(self: &Builder, path: []const u8) void {
|
||||
pub fn addLibPath(self: *Builder, path: []const u8) void {
|
||||
self.lib_paths.append(path) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn make(self: &Builder, step_names: []const []const u8) !void {
|
||||
var wanted_steps = ArrayList(&Step).init(self.allocator);
|
||||
pub fn make(self: *Builder, step_names: []const []const u8) !void {
|
||||
var wanted_steps = ArrayList(*Step).init(self.allocator);
|
||||
defer wanted_steps.deinit();
|
||||
|
||||
if (step_names.len == 0) {
|
||||
@ -247,7 +247,7 @@ pub const Builder = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getInstallStep(self: &Builder) &Step {
|
||||
pub fn getInstallStep(self: *Builder) *Step {
|
||||
if (self.have_install_step) return &self.install_tls.step;
|
||||
|
||||
self.top_level_steps.append(&self.install_tls) catch unreachable;
|
||||
@ -255,7 +255,7 @@ pub const Builder = struct {
|
||||
return &self.install_tls.step;
|
||||
}
|
||||
|
||||
pub fn getUninstallStep(self: &Builder) &Step {
|
||||
pub fn getUninstallStep(self: *Builder) *Step {
|
||||
if (self.have_uninstall_step) return &self.uninstall_tls.step;
|
||||
|
||||
self.top_level_steps.append(&self.uninstall_tls) catch unreachable;
|
||||
@ -263,7 +263,7 @@ pub const Builder = struct {
|
||||
return &self.uninstall_tls.step;
|
||||
}
|
||||
|
||||
fn makeUninstall(uninstall_step: &Step) error!void {
|
||||
fn makeUninstall(uninstall_step: *Step) error!void {
|
||||
const uninstall_tls = @fieldParentPtr(TopLevelStep, "step", uninstall_step);
|
||||
const self = @fieldParentPtr(Builder, "uninstall_tls", uninstall_tls);
|
||||
|
||||
@ -277,7 +277,7 @@ pub const Builder = struct {
|
||||
// TODO remove empty directories
|
||||
}
|
||||
|
||||
fn makeOneStep(self: &Builder, s: &Step) error!void {
|
||||
fn makeOneStep(self: *Builder, s: *Step) error!void {
|
||||
if (s.loop_flag) {
|
||||
warn("Dependency loop detected:\n {}\n", s.name);
|
||||
return error.DependencyLoopDetected;
|
||||
@ -298,7 +298,7 @@ pub const Builder = struct {
|
||||
try s.make();
|
||||
}
|
||||
|
||||
fn getTopLevelStepByName(self: &Builder, name: []const u8) !&Step {
|
||||
fn getTopLevelStepByName(self: *Builder, name: []const u8) !*Step {
|
||||
for (self.top_level_steps.toSliceConst()) |top_level_step| {
|
||||
if (mem.eql(u8, top_level_step.step.name, name)) {
|
||||
return &top_level_step.step;
|
||||
@ -308,7 +308,7 @@ pub const Builder = struct {
|
||||
return error.InvalidStepName;
|
||||
}
|
||||
|
||||
fn processNixOSEnvVars(self: &Builder) void {
|
||||
fn processNixOSEnvVars(self: *Builder) void {
|
||||
if (os.getEnvVarOwned(self.allocator, "NIX_CFLAGS_COMPILE")) |nix_cflags_compile| {
|
||||
var it = mem.split(nix_cflags_compile, " ");
|
||||
while (true) {
|
||||
@ -350,7 +350,7 @@ pub const Builder = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn option(self: &Builder, comptime T: type, name: []const u8, description: []const u8) ?T {
|
||||
pub fn option(self: *Builder, comptime T: type, name: []const u8, description: []const u8) ?T {
|
||||
const type_id = comptime typeToEnum(T);
|
||||
const available_option = AvailableOption{
|
||||
.name = name,
|
||||
@ -403,7 +403,7 @@ pub const Builder = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn step(self: &Builder, name: []const u8, description: []const u8) &Step {
|
||||
pub fn step(self: *Builder, name: []const u8, description: []const u8) *Step {
|
||||
const step_info = self.allocator.create(TopLevelStep) catch unreachable;
|
||||
step_info.* = TopLevelStep{
|
||||
.step = Step.initNoOp(name, self.allocator),
|
||||
@ -413,7 +413,7 @@ pub const Builder = struct {
|
||||
return &step_info.step;
|
||||
}
|
||||
|
||||
pub fn standardReleaseOptions(self: &Builder) builtin.Mode {
|
||||
pub fn standardReleaseOptions(self: *Builder) builtin.Mode {
|
||||
if (self.release_mode) |mode| return mode;
|
||||
|
||||
const release_safe = self.option(bool, "release-safe", "optimizations on and safety on") ?? false;
|
||||
@ -429,7 +429,7 @@ pub const Builder = struct {
|
||||
return mode;
|
||||
}
|
||||
|
||||
pub fn addUserInputOption(self: &Builder, name: []const u8, value: []const u8) bool {
|
||||
pub fn addUserInputOption(self: *Builder, name: []const u8, value: []const u8) bool {
|
||||
if (self.user_input_options.put(name, UserInputOption{
|
||||
.name = name,
|
||||
.value = UserValue{ .Scalar = value },
|
||||
@ -466,7 +466,7 @@ pub const Builder = struct {
|
||||
return false;
|
||||
}
|
||||
|
||||
pub fn addUserInputFlag(self: &Builder, name: []const u8) bool {
|
||||
pub fn addUserInputFlag(self: *Builder, name: []const u8) bool {
|
||||
if (self.user_input_options.put(name, UserInputOption{
|
||||
.name = name,
|
||||
.value = UserValue{ .Flag = {} },
|
||||
@ -500,7 +500,7 @@ pub const Builder = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn markInvalidUserInput(self: &Builder) void {
|
||||
fn markInvalidUserInput(self: *Builder) void {
|
||||
self.invalid_user_input = true;
|
||||
}
|
||||
|
||||
@ -514,7 +514,7 @@ pub const Builder = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn validateUserInputDidItFail(self: &Builder) bool {
|
||||
pub fn validateUserInputDidItFail(self: *Builder) bool {
|
||||
// make sure all args are used
|
||||
var it = self.user_input_options.iterator();
|
||||
while (true) {
|
||||
@ -528,7 +528,7 @@ pub const Builder = struct {
|
||||
return self.invalid_user_input;
|
||||
}
|
||||
|
||||
fn spawnChild(self: &Builder, argv: []const []const u8) !void {
|
||||
fn spawnChild(self: *Builder, argv: []const []const u8) !void {
|
||||
return self.spawnChildEnvMap(null, &self.env_map, argv);
|
||||
}
|
||||
|
||||
@ -540,7 +540,7 @@ pub const Builder = struct {
|
||||
warn("\n");
|
||||
}
|
||||
|
||||
fn spawnChildEnvMap(self: &Builder, cwd: ?[]const u8, env_map: &const BufMap, argv: []const []const u8) !void {
|
||||
fn spawnChildEnvMap(self: *Builder, cwd: ?[]const u8, env_map: *const BufMap, argv: []const []const u8) !void {
|
||||
if (self.verbose) {
|
||||
printCmd(cwd, argv);
|
||||
}
|
||||
@ -573,28 +573,28 @@ pub const Builder = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn makePath(self: &Builder, path: []const u8) !void {
|
||||
pub fn makePath(self: *Builder, path: []const u8) !void {
|
||||
os.makePath(self.allocator, self.pathFromRoot(path)) catch |err| {
|
||||
warn("Unable to create path {}: {}\n", path, @errorName(err));
|
||||
return err;
|
||||
};
|
||||
}
|
||||
|
||||
pub fn installArtifact(self: &Builder, artifact: &LibExeObjStep) void {
|
||||
pub fn installArtifact(self: *Builder, artifact: *LibExeObjStep) void {
|
||||
self.getInstallStep().dependOn(&self.addInstallArtifact(artifact).step);
|
||||
}
|
||||
|
||||
pub fn addInstallArtifact(self: &Builder, artifact: &LibExeObjStep) &InstallArtifactStep {
|
||||
pub fn addInstallArtifact(self: *Builder, artifact: *LibExeObjStep) *InstallArtifactStep {
|
||||
return InstallArtifactStep.create(self, artifact);
|
||||
}
|
||||
|
||||
///::dest_rel_path is relative to prefix path or it can be an absolute path
|
||||
pub fn installFile(self: &Builder, src_path: []const u8, dest_rel_path: []const u8) void {
|
||||
pub fn installFile(self: *Builder, src_path: []const u8, dest_rel_path: []const u8) void {
|
||||
self.getInstallStep().dependOn(&self.addInstallFile(src_path, dest_rel_path).step);
|
||||
}
|
||||
|
||||
///::dest_rel_path is relative to prefix path or it can be an absolute path
|
||||
pub fn addInstallFile(self: &Builder, src_path: []const u8, dest_rel_path: []const u8) &InstallFileStep {
|
||||
pub fn addInstallFile(self: *Builder, src_path: []const u8, dest_rel_path: []const u8) *InstallFileStep {
|
||||
const full_dest_path = os.path.resolve(self.allocator, self.prefix, dest_rel_path) catch unreachable;
|
||||
self.pushInstalledFile(full_dest_path);
|
||||
|
||||
@ -603,16 +603,16 @@ pub const Builder = struct {
|
||||
return install_step;
|
||||
}
|
||||
|
||||
pub fn pushInstalledFile(self: &Builder, full_path: []const u8) void {
|
||||
pub fn pushInstalledFile(self: *Builder, full_path: []const u8) void {
|
||||
_ = self.getUninstallStep();
|
||||
self.installed_files.append(full_path) catch unreachable;
|
||||
}
|
||||
|
||||
fn copyFile(self: &Builder, source_path: []const u8, dest_path: []const u8) !void {
|
||||
fn copyFile(self: *Builder, source_path: []const u8, dest_path: []const u8) !void {
|
||||
return self.copyFileMode(source_path, dest_path, os.default_file_mode);
|
||||
}
|
||||
|
||||
fn copyFileMode(self: &Builder, source_path: []const u8, dest_path: []const u8, mode: os.FileMode) !void {
|
||||
fn copyFileMode(self: *Builder, source_path: []const u8, dest_path: []const u8, mode: os.FileMode) !void {
|
||||
if (self.verbose) {
|
||||
warn("cp {} {}\n", source_path, dest_path);
|
||||
}
|
||||
@ -629,15 +629,15 @@ pub const Builder = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn pathFromRoot(self: &Builder, rel_path: []const u8) []u8 {
|
||||
fn pathFromRoot(self: *Builder, rel_path: []const u8) []u8 {
|
||||
return os.path.resolve(self.allocator, self.build_root, rel_path) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn fmt(self: &Builder, comptime format: []const u8, args: ...) []u8 {
|
||||
pub fn fmt(self: *Builder, comptime format: []const u8, args: ...) []u8 {
|
||||
return fmt_lib.allocPrint(self.allocator, format, args) catch unreachable;
|
||||
}
|
||||
|
||||
fn getCCExe(self: &Builder) []const u8 {
|
||||
fn getCCExe(self: *Builder) []const u8 {
|
||||
if (builtin.environ == builtin.Environ.msvc) {
|
||||
return "cl.exe";
|
||||
} else {
|
||||
@ -645,7 +645,7 @@ pub const Builder = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn findProgram(self: &Builder, names: []const []const u8, paths: []const []const u8) ![]const u8 {
|
||||
pub fn findProgram(self: *Builder, names: []const []const u8, paths: []const []const u8) ![]const u8 {
|
||||
// TODO report error for ambiguous situations
|
||||
const exe_extension = (Target{ .Native = {} }).exeFileExt();
|
||||
for (self.search_prefixes.toSliceConst()) |search_prefix| {
|
||||
@ -693,7 +693,7 @@ pub const Builder = struct {
|
||||
return error.FileNotFound;
|
||||
}
|
||||
|
||||
pub fn exec(self: &Builder, argv: []const []const u8) ![]u8 {
|
||||
pub fn exec(self: *Builder, argv: []const []const u8) ![]u8 {
|
||||
const max_output_size = 100 * 1024;
|
||||
const result = try os.ChildProcess.exec(self.allocator, argv, null, null, max_output_size);
|
||||
switch (result.term) {
|
||||
@ -715,7 +715,7 @@ pub const Builder = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn addSearchPrefix(self: &Builder, search_prefix: []const u8) void {
|
||||
pub fn addSearchPrefix(self: *Builder, search_prefix: []const u8) void {
|
||||
self.search_prefixes.append(search_prefix) catch unreachable;
|
||||
}
|
||||
};
|
||||
@ -736,7 +736,7 @@ pub const Target = union(enum) {
|
||||
Native: void,
|
||||
Cross: CrossTarget,
|
||||
|
||||
pub fn oFileExt(self: &const Target) []const u8 {
|
||||
pub fn oFileExt(self: *const Target) []const u8 {
|
||||
const environ = switch (self.*) {
|
||||
Target.Native => builtin.environ,
|
||||
Target.Cross => |t| t.environ,
|
||||
@ -747,49 +747,49 @@ pub const Target = union(enum) {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn exeFileExt(self: &const Target) []const u8 {
|
||||
pub fn exeFileExt(self: *const Target) []const u8 {
|
||||
return switch (self.getOs()) {
|
||||
builtin.Os.windows => ".exe",
|
||||
else => "",
|
||||
};
|
||||
}
|
||||
|
||||
pub fn libFileExt(self: &const Target) []const u8 {
|
||||
pub fn libFileExt(self: *const Target) []const u8 {
|
||||
return switch (self.getOs()) {
|
||||
builtin.Os.windows => ".lib",
|
||||
else => ".a",
|
||||
};
|
||||
}
|
||||
|
||||
pub fn getOs(self: &const Target) builtin.Os {
|
||||
pub fn getOs(self: *const Target) builtin.Os {
|
||||
return switch (self.*) {
|
||||
Target.Native => builtin.os,
|
||||
Target.Cross => |t| t.os,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isDarwin(self: &const Target) bool {
|
||||
pub fn isDarwin(self: *const Target) bool {
|
||||
return switch (self.getOs()) {
|
||||
builtin.Os.ios, builtin.Os.macosx => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isWindows(self: &const Target) bool {
|
||||
pub fn isWindows(self: *const Target) bool {
|
||||
return switch (self.getOs()) {
|
||||
builtin.Os.windows => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn wantSharedLibSymLinks(self: &const Target) bool {
|
||||
pub fn wantSharedLibSymLinks(self: *const Target) bool {
|
||||
return !self.isWindows();
|
||||
}
|
||||
};
|
||||
|
||||
pub const LibExeObjStep = struct {
|
||||
step: Step,
|
||||
builder: &Builder,
|
||||
builder: *Builder,
|
||||
name: []const u8,
|
||||
target: Target,
|
||||
link_libs: BufSet,
|
||||
@ -836,56 +836,56 @@ pub const LibExeObjStep = struct {
|
||||
Obj,
|
||||
};
|
||||
|
||||
pub fn createSharedLibrary(builder: &Builder, name: []const u8, root_src: ?[]const u8, ver: &const Version) &LibExeObjStep {
|
||||
pub fn createSharedLibrary(builder: *Builder, name: []const u8, root_src: ?[]const u8, ver: *const Version) *LibExeObjStep {
|
||||
const self = builder.allocator.create(LibExeObjStep) catch unreachable;
|
||||
self.* = initExtraArgs(builder, name, root_src, Kind.Lib, false, ver);
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn createCSharedLibrary(builder: &Builder, name: []const u8, version: &const Version) &LibExeObjStep {
|
||||
pub fn createCSharedLibrary(builder: *Builder, name: []const u8, version: *const Version) *LibExeObjStep {
|
||||
const self = builder.allocator.create(LibExeObjStep) catch unreachable;
|
||||
self.* = initC(builder, name, Kind.Lib, version, false);
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn createStaticLibrary(builder: &Builder, name: []const u8, root_src: ?[]const u8) &LibExeObjStep {
|
||||
pub fn createStaticLibrary(builder: *Builder, name: []const u8, root_src: ?[]const u8) *LibExeObjStep {
|
||||
const self = builder.allocator.create(LibExeObjStep) catch unreachable;
|
||||
self.* = initExtraArgs(builder, name, root_src, Kind.Lib, true, builder.version(0, 0, 0));
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn createCStaticLibrary(builder: &Builder, name: []const u8) &LibExeObjStep {
|
||||
pub fn createCStaticLibrary(builder: *Builder, name: []const u8) *LibExeObjStep {
|
||||
const self = builder.allocator.create(LibExeObjStep) catch unreachable;
|
||||
self.* = initC(builder, name, Kind.Lib, builder.version(0, 0, 0), true);
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn createObject(builder: &Builder, name: []const u8, root_src: []const u8) &LibExeObjStep {
|
||||
pub fn createObject(builder: *Builder, name: []const u8, root_src: []const u8) *LibExeObjStep {
|
||||
const self = builder.allocator.create(LibExeObjStep) catch unreachable;
|
||||
self.* = initExtraArgs(builder, name, root_src, Kind.Obj, false, builder.version(0, 0, 0));
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn createCObject(builder: &Builder, name: []const u8, src: []const u8) &LibExeObjStep {
|
||||
pub fn createCObject(builder: *Builder, name: []const u8, src: []const u8) *LibExeObjStep {
|
||||
const self = builder.allocator.create(LibExeObjStep) catch unreachable;
|
||||
self.* = initC(builder, name, Kind.Obj, builder.version(0, 0, 0), false);
|
||||
self.object_src = src;
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn createExecutable(builder: &Builder, name: []const u8, root_src: ?[]const u8) &LibExeObjStep {
|
||||
pub fn createExecutable(builder: *Builder, name: []const u8, root_src: ?[]const u8) *LibExeObjStep {
|
||||
const self = builder.allocator.create(LibExeObjStep) catch unreachable;
|
||||
self.* = initExtraArgs(builder, name, root_src, Kind.Exe, false, builder.version(0, 0, 0));
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn createCExecutable(builder: &Builder, name: []const u8) &LibExeObjStep {
|
||||
pub fn createCExecutable(builder: *Builder, name: []const u8) *LibExeObjStep {
|
||||
const self = builder.allocator.create(LibExeObjStep) catch unreachable;
|
||||
self.* = initC(builder, name, Kind.Exe, builder.version(0, 0, 0), false);
|
||||
return self;
|
||||
}
|
||||
|
||||
fn initExtraArgs(builder: &Builder, name: []const u8, root_src: ?[]const u8, kind: Kind, static: bool, ver: &const Version) LibExeObjStep {
|
||||
fn initExtraArgs(builder: *Builder, name: []const u8, root_src: ?[]const u8, kind: Kind, static: bool, ver: *const Version) LibExeObjStep {
|
||||
var self = LibExeObjStep{
|
||||
.strip = false,
|
||||
.builder = builder,
|
||||
@ -924,7 +924,7 @@ pub const LibExeObjStep = struct {
|
||||
return self;
|
||||
}
|
||||
|
||||
fn initC(builder: &Builder, name: []const u8, kind: Kind, version: &const Version, static: bool) LibExeObjStep {
|
||||
fn initC(builder: *Builder, name: []const u8, kind: Kind, version: *const Version, static: bool) LibExeObjStep {
|
||||
var self = LibExeObjStep{
|
||||
.builder = builder,
|
||||
.name = name,
|
||||
@ -964,7 +964,7 @@ pub const LibExeObjStep = struct {
|
||||
return self;
|
||||
}
|
||||
|
||||
fn computeOutFileNames(self: &LibExeObjStep) void {
|
||||
fn computeOutFileNames(self: *LibExeObjStep) void {
|
||||
switch (self.kind) {
|
||||
Kind.Obj => {
|
||||
self.out_filename = self.builder.fmt("{}{}", self.name, self.target.oFileExt());
|
||||
@ -996,7 +996,7 @@ pub const LibExeObjStep = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn setTarget(self: &LibExeObjStep, target_arch: builtin.Arch, target_os: builtin.Os, target_environ: builtin.Environ) void {
|
||||
pub fn setTarget(self: *LibExeObjStep, target_arch: builtin.Arch, target_os: builtin.Os, target_environ: builtin.Environ) void {
|
||||
self.target = Target{
|
||||
.Cross = CrossTarget{
|
||||
.arch = target_arch,
|
||||
@ -1008,16 +1008,16 @@ pub const LibExeObjStep = struct {
|
||||
}
|
||||
|
||||
// TODO respect this in the C args
|
||||
pub fn setLinkerScriptPath(self: &LibExeObjStep, path: []const u8) void {
|
||||
pub fn setLinkerScriptPath(self: *LibExeObjStep, path: []const u8) void {
|
||||
self.linker_script = path;
|
||||
}
|
||||
|
||||
pub fn linkFramework(self: &LibExeObjStep, framework_name: []const u8) void {
|
||||
pub fn linkFramework(self: *LibExeObjStep, framework_name: []const u8) void {
|
||||
assert(self.target.isDarwin());
|
||||
self.frameworks.put(framework_name) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn linkLibrary(self: &LibExeObjStep, lib: &LibExeObjStep) void {
|
||||
pub fn linkLibrary(self: *LibExeObjStep, lib: *LibExeObjStep) void {
|
||||
assert(self.kind != Kind.Obj);
|
||||
assert(lib.kind == Kind.Lib);
|
||||
|
||||
@ -1038,26 +1038,26 @@ pub const LibExeObjStep = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn linkSystemLibrary(self: &LibExeObjStep, name: []const u8) void {
|
||||
pub fn linkSystemLibrary(self: *LibExeObjStep, name: []const u8) void {
|
||||
assert(self.kind != Kind.Obj);
|
||||
self.link_libs.put(name) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn addSourceFile(self: &LibExeObjStep, file: []const u8) void {
|
||||
pub fn addSourceFile(self: *LibExeObjStep, file: []const u8) void {
|
||||
assert(self.kind != Kind.Obj);
|
||||
assert(!self.is_zig);
|
||||
self.source_files.append(file) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn setVerboseLink(self: &LibExeObjStep, value: bool) void {
|
||||
pub fn setVerboseLink(self: *LibExeObjStep, value: bool) void {
|
||||
self.verbose_link = value;
|
||||
}
|
||||
|
||||
pub fn setBuildMode(self: &LibExeObjStep, mode: builtin.Mode) void {
|
||||
pub fn setBuildMode(self: *LibExeObjStep, mode: builtin.Mode) void {
|
||||
self.build_mode = mode;
|
||||
}
|
||||
|
||||
pub fn setOutputPath(self: &LibExeObjStep, file_path: []const u8) void {
|
||||
pub fn setOutputPath(self: *LibExeObjStep, file_path: []const u8) void {
|
||||
self.output_path = file_path;
|
||||
|
||||
// catch a common mistake
|
||||
@ -1066,11 +1066,11 @@ pub const LibExeObjStep = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getOutputPath(self: &LibExeObjStep) []const u8 {
|
||||
pub fn getOutputPath(self: *LibExeObjStep) []const u8 {
|
||||
return if (self.output_path) |output_path| output_path else os.path.join(self.builder.allocator, self.builder.cache_root, self.out_filename) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn setOutputHPath(self: &LibExeObjStep, file_path: []const u8) void {
|
||||
pub fn setOutputHPath(self: *LibExeObjStep, file_path: []const u8) void {
|
||||
self.output_h_path = file_path;
|
||||
|
||||
// catch a common mistake
|
||||
@ -1079,21 +1079,21 @@ pub const LibExeObjStep = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getOutputHPath(self: &LibExeObjStep) []const u8 {
|
||||
pub fn getOutputHPath(self: *LibExeObjStep) []const u8 {
|
||||
return if (self.output_h_path) |output_h_path| output_h_path else os.path.join(self.builder.allocator, self.builder.cache_root, self.out_h_filename) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn addAssemblyFile(self: &LibExeObjStep, path: []const u8) void {
|
||||
pub fn addAssemblyFile(self: *LibExeObjStep, path: []const u8) void {
|
||||
self.assembly_files.append(path) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn addObjectFile(self: &LibExeObjStep, path: []const u8) void {
|
||||
pub fn addObjectFile(self: *LibExeObjStep, path: []const u8) void {
|
||||
assert(self.kind != Kind.Obj);
|
||||
|
||||
self.object_files.append(path) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn addObject(self: &LibExeObjStep, obj: &LibExeObjStep) void {
|
||||
pub fn addObject(self: *LibExeObjStep, obj: *LibExeObjStep) void {
|
||||
assert(obj.kind == Kind.Obj);
|
||||
assert(self.kind != Kind.Obj);
|
||||
|
||||
@ -1110,15 +1110,15 @@ pub const LibExeObjStep = struct {
|
||||
self.include_dirs.append(self.builder.cache_root) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn addIncludeDir(self: &LibExeObjStep, path: []const u8) void {
|
||||
pub fn addIncludeDir(self: *LibExeObjStep, path: []const u8) void {
|
||||
self.include_dirs.append(path) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn addLibPath(self: &LibExeObjStep, path: []const u8) void {
|
||||
pub fn addLibPath(self: *LibExeObjStep, path: []const u8) void {
|
||||
self.lib_paths.append(path) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn addPackagePath(self: &LibExeObjStep, name: []const u8, pkg_index_path: []const u8) void {
|
||||
pub fn addPackagePath(self: *LibExeObjStep, name: []const u8, pkg_index_path: []const u8) void {
|
||||
assert(self.is_zig);
|
||||
|
||||
self.packages.append(Pkg{
|
||||
@ -1127,23 +1127,23 @@ pub const LibExeObjStep = struct {
|
||||
}) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn addCompileFlags(self: &LibExeObjStep, flags: []const []const u8) void {
|
||||
pub fn addCompileFlags(self: *LibExeObjStep, flags: []const []const u8) void {
|
||||
for (flags) |flag| {
|
||||
self.cflags.append(flag) catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn setNoStdLib(self: &LibExeObjStep, disable: bool) void {
|
||||
pub fn setNoStdLib(self: *LibExeObjStep, disable: bool) void {
|
||||
assert(!self.is_zig);
|
||||
self.disable_libc = disable;
|
||||
}
|
||||
|
||||
fn make(step: &Step) !void {
|
||||
fn make(step: *Step) !void {
|
||||
const self = @fieldParentPtr(LibExeObjStep, "step", step);
|
||||
return if (self.is_zig) self.makeZig() else self.makeC();
|
||||
}
|
||||
|
||||
fn makeZig(self: &LibExeObjStep) !void {
|
||||
fn makeZig(self: *LibExeObjStep) !void {
|
||||
const builder = self.builder;
|
||||
|
||||
assert(self.is_zig);
|
||||
@ -1309,7 +1309,7 @@ pub const LibExeObjStep = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn appendCompileFlags(self: &LibExeObjStep, args: &ArrayList([]const u8)) void {
|
||||
fn appendCompileFlags(self: *LibExeObjStep, args: *ArrayList([]const u8)) void {
|
||||
if (!self.strip) {
|
||||
args.append("-g") catch unreachable;
|
||||
}
|
||||
@ -1354,7 +1354,7 @@ pub const LibExeObjStep = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn makeC(self: &LibExeObjStep) !void {
|
||||
fn makeC(self: *LibExeObjStep) !void {
|
||||
const builder = self.builder;
|
||||
|
||||
const cc = builder.getCCExe();
|
||||
@ -1580,7 +1580,7 @@ pub const LibExeObjStep = struct {
|
||||
|
||||
pub const TestStep = struct {
|
||||
step: Step,
|
||||
builder: &Builder,
|
||||
builder: *Builder,
|
||||
root_src: []const u8,
|
||||
build_mode: builtin.Mode,
|
||||
verbose: bool,
|
||||
@ -1591,7 +1591,7 @@ pub const TestStep = struct {
|
||||
exec_cmd_args: ?[]const ?[]const u8,
|
||||
include_dirs: ArrayList([]const u8),
|
||||
|
||||
pub fn init(builder: &Builder, root_src: []const u8) TestStep {
|
||||
pub fn init(builder: *Builder, root_src: []const u8) TestStep {
|
||||
const step_name = builder.fmt("test {}", root_src);
|
||||
return TestStep{
|
||||
.step = Step.init(step_name, builder.allocator, make),
|
||||
@ -1608,31 +1608,31 @@ pub const TestStep = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn setVerbose(self: &TestStep, value: bool) void {
|
||||
pub fn setVerbose(self: *TestStep, value: bool) void {
|
||||
self.verbose = value;
|
||||
}
|
||||
|
||||
pub fn addIncludeDir(self: &TestStep, path: []const u8) void {
|
||||
pub fn addIncludeDir(self: *TestStep, path: []const u8) void {
|
||||
self.include_dirs.append(path) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn setBuildMode(self: &TestStep, mode: builtin.Mode) void {
|
||||
pub fn setBuildMode(self: *TestStep, mode: builtin.Mode) void {
|
||||
self.build_mode = mode;
|
||||
}
|
||||
|
||||
pub fn linkSystemLibrary(self: &TestStep, name: []const u8) void {
|
||||
pub fn linkSystemLibrary(self: *TestStep, name: []const u8) void {
|
||||
self.link_libs.put(name) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn setNamePrefix(self: &TestStep, text: []const u8) void {
|
||||
pub fn setNamePrefix(self: *TestStep, text: []const u8) void {
|
||||
self.name_prefix = text;
|
||||
}
|
||||
|
||||
pub fn setFilter(self: &TestStep, text: ?[]const u8) void {
|
||||
pub fn setFilter(self: *TestStep, text: ?[]const u8) void {
|
||||
self.filter = text;
|
||||
}
|
||||
|
||||
pub fn setTarget(self: &TestStep, target_arch: builtin.Arch, target_os: builtin.Os, target_environ: builtin.Environ) void {
|
||||
pub fn setTarget(self: *TestStep, target_arch: builtin.Arch, target_os: builtin.Os, target_environ: builtin.Environ) void {
|
||||
self.target = Target{
|
||||
.Cross = CrossTarget{
|
||||
.arch = target_arch,
|
||||
@ -1642,11 +1642,11 @@ pub const TestStep = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn setExecCmd(self: &TestStep, args: []const ?[]const u8) void {
|
||||
pub fn setExecCmd(self: *TestStep, args: []const ?[]const u8) void {
|
||||
self.exec_cmd_args = args;
|
||||
}
|
||||
|
||||
fn make(step: &Step) !void {
|
||||
fn make(step: *Step) !void {
|
||||
const self = @fieldParentPtr(TestStep, "step", step);
|
||||
const builder = self.builder;
|
||||
|
||||
@ -1739,13 +1739,13 @@ pub const TestStep = struct {
|
||||
|
||||
pub const CommandStep = struct {
|
||||
step: Step,
|
||||
builder: &Builder,
|
||||
builder: *Builder,
|
||||
argv: [][]const u8,
|
||||
cwd: ?[]const u8,
|
||||
env_map: &const BufMap,
|
||||
env_map: *const BufMap,
|
||||
|
||||
/// ::argv is copied.
|
||||
pub fn create(builder: &Builder, cwd: ?[]const u8, env_map: &const BufMap, argv: []const []const u8) &CommandStep {
|
||||
pub fn create(builder: *Builder, cwd: ?[]const u8, env_map: *const BufMap, argv: []const []const u8) *CommandStep {
|
||||
const self = builder.allocator.create(CommandStep) catch unreachable;
|
||||
self.* = CommandStep{
|
||||
.builder = builder,
|
||||
@ -1759,7 +1759,7 @@ pub const CommandStep = struct {
|
||||
return self;
|
||||
}
|
||||
|
||||
fn make(step: &Step) !void {
|
||||
fn make(step: *Step) !void {
|
||||
const self = @fieldParentPtr(CommandStep, "step", step);
|
||||
|
||||
const cwd = if (self.cwd) |cwd| self.builder.pathFromRoot(cwd) else self.builder.build_root;
|
||||
@ -1769,13 +1769,13 @@ pub const CommandStep = struct {
|
||||
|
||||
const InstallArtifactStep = struct {
|
||||
step: Step,
|
||||
builder: &Builder,
|
||||
artifact: &LibExeObjStep,
|
||||
builder: *Builder,
|
||||
artifact: *LibExeObjStep,
|
||||
dest_file: []const u8,
|
||||
|
||||
const Self = this;
|
||||
|
||||
pub fn create(builder: &Builder, artifact: &LibExeObjStep) &Self {
|
||||
pub fn create(builder: *Builder, artifact: *LibExeObjStep) *Self {
|
||||
const self = builder.allocator.create(Self) catch unreachable;
|
||||
const dest_dir = switch (artifact.kind) {
|
||||
LibExeObjStep.Kind.Obj => unreachable,
|
||||
@ -1797,7 +1797,7 @@ const InstallArtifactStep = struct {
|
||||
return self;
|
||||
}
|
||||
|
||||
fn make(step: &Step) !void {
|
||||
fn make(step: *Step) !void {
|
||||
const self = @fieldParentPtr(Self, "step", step);
|
||||
const builder = self.builder;
|
||||
|
||||
@ -1818,11 +1818,11 @@ const InstallArtifactStep = struct {
|
||||
|
||||
pub const InstallFileStep = struct {
|
||||
step: Step,
|
||||
builder: &Builder,
|
||||
builder: *Builder,
|
||||
src_path: []const u8,
|
||||
dest_path: []const u8,
|
||||
|
||||
pub fn init(builder: &Builder, src_path: []const u8, dest_path: []const u8) InstallFileStep {
|
||||
pub fn init(builder: *Builder, src_path: []const u8, dest_path: []const u8) InstallFileStep {
|
||||
return InstallFileStep{
|
||||
.builder = builder,
|
||||
.step = Step.init(builder.fmt("install {}", src_path), builder.allocator, make),
|
||||
@ -1831,7 +1831,7 @@ pub const InstallFileStep = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn make(step: &Step) !void {
|
||||
fn make(step: *Step) !void {
|
||||
const self = @fieldParentPtr(InstallFileStep, "step", step);
|
||||
try self.builder.copyFile(self.src_path, self.dest_path);
|
||||
}
|
||||
@ -1839,11 +1839,11 @@ pub const InstallFileStep = struct {
|
||||
|
||||
pub const WriteFileStep = struct {
|
||||
step: Step,
|
||||
builder: &Builder,
|
||||
builder: *Builder,
|
||||
file_path: []const u8,
|
||||
data: []const u8,
|
||||
|
||||
pub fn init(builder: &Builder, file_path: []const u8, data: []const u8) WriteFileStep {
|
||||
pub fn init(builder: *Builder, file_path: []const u8, data: []const u8) WriteFileStep {
|
||||
return WriteFileStep{
|
||||
.builder = builder,
|
||||
.step = Step.init(builder.fmt("writefile {}", file_path), builder.allocator, make),
|
||||
@ -1852,7 +1852,7 @@ pub const WriteFileStep = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn make(step: &Step) !void {
|
||||
fn make(step: *Step) !void {
|
||||
const self = @fieldParentPtr(WriteFileStep, "step", step);
|
||||
const full_path = self.builder.pathFromRoot(self.file_path);
|
||||
const full_path_dir = os.path.dirname(full_path);
|
||||
@ -1869,10 +1869,10 @@ pub const WriteFileStep = struct {
|
||||
|
||||
pub const LogStep = struct {
|
||||
step: Step,
|
||||
builder: &Builder,
|
||||
builder: *Builder,
|
||||
data: []const u8,
|
||||
|
||||
pub fn init(builder: &Builder, data: []const u8) LogStep {
|
||||
pub fn init(builder: *Builder, data: []const u8) LogStep {
|
||||
return LogStep{
|
||||
.builder = builder,
|
||||
.step = Step.init(builder.fmt("log {}", data), builder.allocator, make),
|
||||
@ -1880,7 +1880,7 @@ pub const LogStep = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn make(step: &Step) error!void {
|
||||
fn make(step: *Step) error!void {
|
||||
const self = @fieldParentPtr(LogStep, "step", step);
|
||||
warn("{}", self.data);
|
||||
}
|
||||
@ -1888,10 +1888,10 @@ pub const LogStep = struct {
|
||||
|
||||
pub const RemoveDirStep = struct {
|
||||
step: Step,
|
||||
builder: &Builder,
|
||||
builder: *Builder,
|
||||
dir_path: []const u8,
|
||||
|
||||
pub fn init(builder: &Builder, dir_path: []const u8) RemoveDirStep {
|
||||
pub fn init(builder: *Builder, dir_path: []const u8) RemoveDirStep {
|
||||
return RemoveDirStep{
|
||||
.builder = builder,
|
||||
.step = Step.init(builder.fmt("RemoveDir {}", dir_path), builder.allocator, make),
|
||||
@ -1899,7 +1899,7 @@ pub const RemoveDirStep = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn make(step: &Step) !void {
|
||||
fn make(step: *Step) !void {
|
||||
const self = @fieldParentPtr(RemoveDirStep, "step", step);
|
||||
|
||||
const full_path = self.builder.pathFromRoot(self.dir_path);
|
||||
@ -1912,39 +1912,39 @@ pub const RemoveDirStep = struct {
|
||||
|
||||
pub const Step = struct {
|
||||
name: []const u8,
|
||||
makeFn: fn(self: &Step) error!void,
|
||||
dependencies: ArrayList(&Step),
|
||||
makeFn: fn (self: *Step) error!void,
|
||||
dependencies: ArrayList(*Step),
|
||||
loop_flag: bool,
|
||||
done_flag: bool,
|
||||
|
||||
pub fn init(name: []const u8, allocator: &Allocator, makeFn: fn(&Step) error!void) Step {
|
||||
pub fn init(name: []const u8, allocator: *Allocator, makeFn: fn (*Step) error!void) Step {
|
||||
return Step{
|
||||
.name = name,
|
||||
.makeFn = makeFn,
|
||||
.dependencies = ArrayList(&Step).init(allocator),
|
||||
.dependencies = ArrayList(*Step).init(allocator),
|
||||
.loop_flag = false,
|
||||
.done_flag = false,
|
||||
};
|
||||
}
|
||||
pub fn initNoOp(name: []const u8, allocator: &Allocator) Step {
|
||||
pub fn initNoOp(name: []const u8, allocator: *Allocator) Step {
|
||||
return init(name, allocator, makeNoOp);
|
||||
}
|
||||
|
||||
pub fn make(self: &Step) !void {
|
||||
pub fn make(self: *Step) !void {
|
||||
if (self.done_flag) return;
|
||||
|
||||
try self.makeFn(self);
|
||||
self.done_flag = true;
|
||||
}
|
||||
|
||||
pub fn dependOn(self: &Step, other: &Step) void {
|
||||
pub fn dependOn(self: *Step, other: *Step) void {
|
||||
self.dependencies.append(other) catch unreachable;
|
||||
}
|
||||
|
||||
fn makeNoOp(self: &Step) error!void {}
|
||||
fn makeNoOp(self: *Step) error!void {}
|
||||
};
|
||||
|
||||
fn doAtomicSymLinks(allocator: &Allocator, output_path: []const u8, filename_major_only: []const u8, filename_name_only: []const u8) !void {
|
||||
fn doAtomicSymLinks(allocator: *Allocator, output_path: []const u8, filename_major_only: []const u8, filename_name_only: []const u8) !void {
|
||||
const out_dir = os.path.dirname(output_path);
|
||||
const out_basename = os.path.basename(output_path);
|
||||
// sym link for libfoo.so.1 to libfoo.so.1.2.3
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
extern "c" fn __error() &c_int;
|
||||
pub extern "c" fn _NSGetExecutablePath(buf: &u8, bufsize: &u32) c_int;
|
||||
extern "c" fn __error() *c_int;
|
||||
pub extern "c" fn _NSGetExecutablePath(buf: [*]u8, bufsize: *u32) c_int;
|
||||
|
||||
pub extern "c" fn __getdirentries64(fd: c_int, buf_ptr: &u8, buf_len: usize, basep: &i64) usize;
|
||||
pub extern "c" fn __getdirentries64(fd: c_int, buf_ptr: [*]u8, buf_len: usize, basep: *i64) usize;
|
||||
|
||||
pub extern "c" fn mach_absolute_time() u64;
|
||||
pub extern "c" fn mach_timebase_info(tinfo: ?&mach_timebase_info_data) void;
|
||||
pub extern "c" fn mach_timebase_info(tinfo: ?*mach_timebase_info_data) void;
|
||||
|
||||
pub use @import("../os/darwin_errno.zig");
|
||||
|
||||
@ -60,7 +60,7 @@ pub const sigset_t = u32;
|
||||
|
||||
/// Renamed from `sigaction` to `Sigaction` to avoid conflict with function name.
|
||||
pub const Sigaction = extern struct {
|
||||
handler: extern fn(c_int) void,
|
||||
handler: extern fn (c_int) void,
|
||||
sa_mask: sigset_t,
|
||||
sa_flags: c_int,
|
||||
};
|
||||
|
||||
@ -9,53 +9,55 @@ pub use switch (builtin.os) {
|
||||
};
|
||||
const empty_import = @import("../empty.zig");
|
||||
|
||||
// TODO https://github.com/ziglang/zig/issues/265 on this whole file
|
||||
|
||||
pub extern "c" fn abort() noreturn;
|
||||
pub extern "c" fn exit(code: c_int) noreturn;
|
||||
pub extern "c" fn isatty(fd: c_int) c_int;
|
||||
pub extern "c" fn close(fd: c_int) c_int;
|
||||
pub extern "c" fn fstat(fd: c_int, buf: &Stat) c_int;
|
||||
pub extern "c" fn @"fstat$INODE64"(fd: c_int, buf: &Stat) c_int;
|
||||
pub extern "c" fn fstat(fd: c_int, buf: *Stat) c_int;
|
||||
pub extern "c" fn @"fstat$INODE64"(fd: c_int, buf: *Stat) c_int;
|
||||
pub extern "c" fn lseek(fd: c_int, offset: isize, whence: c_int) isize;
|
||||
pub extern "c" fn open(path: &const u8, oflag: c_int, ...) c_int;
|
||||
pub extern "c" fn open(path: [*]const u8, oflag: c_int, ...) c_int;
|
||||
pub extern "c" fn raise(sig: c_int) c_int;
|
||||
pub extern "c" fn read(fd: c_int, buf: &c_void, nbyte: usize) isize;
|
||||
pub extern "c" fn stat(noalias path: &const u8, noalias buf: &Stat) c_int;
|
||||
pub extern "c" fn write(fd: c_int, buf: &const c_void, nbyte: usize) isize;
|
||||
pub extern "c" fn mmap(addr: ?&c_void, len: usize, prot: c_int, flags: c_int, fd: c_int, offset: isize) ?&c_void;
|
||||
pub extern "c" fn munmap(addr: &c_void, len: usize) c_int;
|
||||
pub extern "c" fn unlink(path: &const u8) c_int;
|
||||
pub extern "c" fn getcwd(buf: &u8, size: usize) ?&u8;
|
||||
pub extern "c" fn waitpid(pid: c_int, stat_loc: &c_int, options: c_int) c_int;
|
||||
pub extern "c" fn read(fd: c_int, buf: [*]c_void, nbyte: usize) isize;
|
||||
pub extern "c" fn stat(noalias path: [*]const u8, noalias buf: *Stat) c_int;
|
||||
pub extern "c" fn write(fd: c_int, buf: [*]const c_void, nbyte: usize) isize;
|
||||
pub extern "c" fn mmap(addr: ?[*]c_void, len: usize, prot: c_int, flags: c_int, fd: c_int, offset: isize) ?[*]c_void;
|
||||
pub extern "c" fn munmap(addr: [*]c_void, len: usize) c_int;
|
||||
pub extern "c" fn unlink(path: [*]const u8) c_int;
|
||||
pub extern "c" fn getcwd(buf: [*]u8, size: usize) ?[*]u8;
|
||||
pub extern "c" fn waitpid(pid: c_int, stat_loc: *c_int, options: c_int) c_int;
|
||||
pub extern "c" fn fork() c_int;
|
||||
pub extern "c" fn access(path: &const u8, mode: c_uint) c_int;
|
||||
pub extern "c" fn pipe(fds: &c_int) c_int;
|
||||
pub extern "c" fn mkdir(path: &const u8, mode: c_uint) c_int;
|
||||
pub extern "c" fn symlink(existing: &const u8, new: &const u8) c_int;
|
||||
pub extern "c" fn rename(old: &const u8, new: &const u8) c_int;
|
||||
pub extern "c" fn chdir(path: &const u8) c_int;
|
||||
pub extern "c" fn execve(path: &const u8, argv: &const ?&const u8, envp: &const ?&const u8) c_int;
|
||||
pub extern "c" fn access(path: [*]const u8, mode: c_uint) c_int;
|
||||
pub extern "c" fn pipe(fds: *[2]c_int) c_int;
|
||||
pub extern "c" fn mkdir(path: [*]const u8, mode: c_uint) c_int;
|
||||
pub extern "c" fn symlink(existing: [*]const u8, new: [*]const u8) c_int;
|
||||
pub extern "c" fn rename(old: [*]const u8, new: [*]const u8) c_int;
|
||||
pub extern "c" fn chdir(path: [*]const u8) c_int;
|
||||
pub extern "c" fn execve(path: [*]const u8, argv: [*]const ?[*]const u8, envp: [*]const ?[*]const u8) c_int;
|
||||
pub extern "c" fn dup(fd: c_int) c_int;
|
||||
pub extern "c" fn dup2(old_fd: c_int, new_fd: c_int) c_int;
|
||||
pub extern "c" fn readlink(noalias path: &const u8, noalias buf: &u8, bufsize: usize) isize;
|
||||
pub extern "c" fn realpath(noalias file_name: &const u8, noalias resolved_name: &u8) ?&u8;
|
||||
pub extern "c" fn sigprocmask(how: c_int, noalias set: &const sigset_t, noalias oset: ?&sigset_t) c_int;
|
||||
pub extern "c" fn gettimeofday(tv: ?&timeval, tz: ?&timezone) c_int;
|
||||
pub extern "c" fn sigaction(sig: c_int, noalias act: &const Sigaction, noalias oact: ?&Sigaction) c_int;
|
||||
pub extern "c" fn nanosleep(rqtp: &const timespec, rmtp: ?×pec) c_int;
|
||||
pub extern "c" fn readlink(noalias path: [*]const u8, noalias buf: [*]u8, bufsize: usize) isize;
|
||||
pub extern "c" fn realpath(noalias file_name: [*]const u8, noalias resolved_name: [*]u8) ?[*]u8;
|
||||
pub extern "c" fn sigprocmask(how: c_int, noalias set: *const sigset_t, noalias oset: ?*sigset_t) c_int;
|
||||
pub extern "c" fn gettimeofday(tv: ?*timeval, tz: ?*timezone) c_int;
|
||||
pub extern "c" fn sigaction(sig: c_int, noalias act: *const Sigaction, noalias oact: ?*Sigaction) c_int;
|
||||
pub extern "c" fn nanosleep(rqtp: *const timespec, rmtp: ?*timespec) c_int;
|
||||
pub extern "c" fn setreuid(ruid: c_uint, euid: c_uint) c_int;
|
||||
pub extern "c" fn setregid(rgid: c_uint, egid: c_uint) c_int;
|
||||
pub extern "c" fn rmdir(path: &const u8) c_int;
|
||||
pub extern "c" fn rmdir(path: [*]const u8) c_int;
|
||||
|
||||
pub extern "c" fn aligned_alloc(alignment: usize, size: usize) ?&c_void;
|
||||
pub extern "c" fn malloc(usize) ?&c_void;
|
||||
pub extern "c" fn realloc(&c_void, usize) ?&c_void;
|
||||
pub extern "c" fn free(&c_void) void;
|
||||
pub extern "c" fn posix_memalign(memptr: &&c_void, alignment: usize, size: usize) c_int;
|
||||
pub extern "c" fn aligned_alloc(alignment: usize, size: usize) ?[*]c_void;
|
||||
pub extern "c" fn malloc(usize) ?[*]c_void;
|
||||
pub extern "c" fn realloc([*]c_void, usize) ?[*]c_void;
|
||||
pub extern "c" fn free([*]c_void) void;
|
||||
pub extern "c" fn posix_memalign(memptr: *[*]c_void, alignment: usize, size: usize) c_int;
|
||||
|
||||
pub extern "pthread" fn pthread_create(noalias newthread: &pthread_t, noalias attr: ?&const pthread_attr_t, start_routine: extern fn(?&c_void) ?&c_void, noalias arg: ?&c_void) c_int;
|
||||
pub extern "pthread" fn pthread_attr_init(attr: &pthread_attr_t) c_int;
|
||||
pub extern "pthread" fn pthread_attr_setstack(attr: &pthread_attr_t, stackaddr: &c_void, stacksize: usize) c_int;
|
||||
pub extern "pthread" fn pthread_attr_destroy(attr: &pthread_attr_t) c_int;
|
||||
pub extern "pthread" fn pthread_join(thread: pthread_t, arg_return: ?&?&c_void) c_int;
|
||||
pub extern "pthread" fn pthread_create(noalias newthread: *pthread_t, noalias attr: ?*const pthread_attr_t, start_routine: extern fn (?*c_void) ?*c_void, noalias arg: ?*c_void) c_int;
|
||||
pub extern "pthread" fn pthread_attr_init(attr: *pthread_attr_t) c_int;
|
||||
pub extern "pthread" fn pthread_attr_setstack(attr: *pthread_attr_t, stackaddr: [*]c_void, stacksize: usize) c_int;
|
||||
pub extern "pthread" fn pthread_attr_destroy(attr: *pthread_attr_t) c_int;
|
||||
pub extern "pthread" fn pthread_join(thread: pthread_t, arg_return: ?*?*c_void) c_int;
|
||||
|
||||
pub const pthread_t = &@OpaqueType();
|
||||
pub const pthread_t = *@OpaqueType();
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
pub use @import("../os/linux/errno.zig");
|
||||
|
||||
pub extern "c" fn getrandom(buf_ptr: &u8, buf_len: usize, flags: c_uint) c_int;
|
||||
extern "c" fn __errno_location() &c_int;
|
||||
pub extern "c" fn getrandom(buf_ptr: [*]u8, buf_len: usize, flags: c_uint) c_int;
|
||||
extern "c" fn __errno_location() *c_int;
|
||||
pub const _errno = __errno_location;
|
||||
|
||||
pub const pthread_attr_t = extern struct {
|
||||
|
||||
@ -1 +1 @@
|
||||
pub extern "c" fn _errno() &c_int;
|
||||
pub extern "c" fn _errno() *c_int;
|
||||
|
||||
@ -49,16 +49,16 @@ fn Blake2s(comptime out_len: usize) type {
|
||||
};
|
||||
|
||||
const sigma = [10][16]u8{
|
||||
[]const u8 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
|
||||
[]const u8 { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
|
||||
[]const u8 { 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },
|
||||
[]const u8 { 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 },
|
||||
[]const u8 { 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },
|
||||
[]const u8 { 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 },
|
||||
[]const u8 { 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },
|
||||
[]const u8 { 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 },
|
||||
[]const u8 { 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },
|
||||
[]const u8 { 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 },
|
||||
[]const u8{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
|
||||
[]const u8{ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
|
||||
[]const u8{ 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },
|
||||
[]const u8{ 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 },
|
||||
[]const u8{ 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },
|
||||
[]const u8{ 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 },
|
||||
[]const u8{ 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },
|
||||
[]const u8{ 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 },
|
||||
[]const u8{ 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },
|
||||
[]const u8{ 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 },
|
||||
};
|
||||
|
||||
h: [8]u32,
|
||||
@ -75,7 +75,7 @@ fn Blake2s(comptime out_len: usize) type {
|
||||
return s;
|
||||
}
|
||||
|
||||
pub fn reset(d: &Self) void {
|
||||
pub fn reset(d: *Self) void {
|
||||
mem.copy(u32, d.h[0..], iv[0..]);
|
||||
|
||||
// No key plus default parameters
|
||||
@ -90,7 +90,7 @@ fn Blake2s(comptime out_len: usize) type {
|
||||
d.final(out);
|
||||
}
|
||||
|
||||
pub fn update(d: &Self, b: []const u8) void {
|
||||
pub fn update(d: *Self, b: []const u8) void {
|
||||
var off: usize = 0;
|
||||
|
||||
// Partial buffer exists from previous update. Copy into buffer then hash.
|
||||
@ -105,7 +105,7 @@ fn Blake2s(comptime out_len: usize) type {
|
||||
// Full middle blocks.
|
||||
while (off + 64 <= b.len) : (off += 64) {
|
||||
d.t += 64;
|
||||
d.round(b[off..off + 64], false);
|
||||
d.round(b[off .. off + 64], false);
|
||||
}
|
||||
|
||||
// Copy any remainder for next pass.
|
||||
@ -113,28 +113,28 @@ fn Blake2s(comptime out_len: usize) type {
|
||||
d.buf_len += u8(b[off..].len);
|
||||
}
|
||||
|
||||
pub fn final(d: &Self, out: []u8) void {
|
||||
pub fn final(d: *Self, out: []u8) void {
|
||||
debug.assert(out.len >= out_len / 8);
|
||||
|
||||
mem.set(u8, d.buf[d.buf_len..], 0);
|
||||
d.t += d.buf_len;
|
||||
d.round(d.buf[0..], true);
|
||||
|
||||
const rr = d.h[0..out_len / 32];
|
||||
const rr = d.h[0 .. out_len / 32];
|
||||
|
||||
for (rr) |s, j| {
|
||||
mem.writeInt(out[4 * j..4 * j + 4], s, builtin.Endian.Little);
|
||||
mem.writeInt(out[4 * j .. 4 * j + 4], s, builtin.Endian.Little);
|
||||
}
|
||||
}
|
||||
|
||||
fn round(d: &Self, b: []const u8, last: bool) void {
|
||||
fn round(d: *Self, b: []const u8, last: bool) void {
|
||||
debug.assert(b.len == 64);
|
||||
|
||||
var m: [16]u32 = undefined;
|
||||
var v: [16]u32 = undefined;
|
||||
|
||||
for (m) |*r, i| {
|
||||
r.* = mem.readIntLE(u32, b[4 * i..4 * i + 4]);
|
||||
r.* = mem.readIntLE(u32, b[4 * i .. 4 * i + 4]);
|
||||
}
|
||||
|
||||
var k: usize = 0;
|
||||
@ -282,222 +282,18 @@ fn Blake2b(comptime out_len: usize) type {
|
||||
};
|
||||
|
||||
const sigma = [12][16]u8{
|
||||
[]const u8{
|
||||
0,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
10,
|
||||
11,
|
||||
12,
|
||||
13,
|
||||
14,
|
||||
15,
|
||||
},
|
||||
[]const u8{
|
||||
14,
|
||||
10,
|
||||
4,
|
||||
8,
|
||||
9,
|
||||
15,
|
||||
13,
|
||||
6,
|
||||
1,
|
||||
12,
|
||||
0,
|
||||
2,
|
||||
11,
|
||||
7,
|
||||
5,
|
||||
3,
|
||||
},
|
||||
[]const u8{
|
||||
11,
|
||||
8,
|
||||
12,
|
||||
0,
|
||||
5,
|
||||
2,
|
||||
15,
|
||||
13,
|
||||
10,
|
||||
14,
|
||||
3,
|
||||
6,
|
||||
7,
|
||||
1,
|
||||
9,
|
||||
4,
|
||||
},
|
||||
[]const u8{
|
||||
7,
|
||||
9,
|
||||
3,
|
||||
1,
|
||||
13,
|
||||
12,
|
||||
11,
|
||||
14,
|
||||
2,
|
||||
6,
|
||||
5,
|
||||
10,
|
||||
4,
|
||||
0,
|
||||
15,
|
||||
8,
|
||||
},
|
||||
[]const u8{
|
||||
9,
|
||||
0,
|
||||
5,
|
||||
7,
|
||||
2,
|
||||
4,
|
||||
10,
|
||||
15,
|
||||
14,
|
||||
1,
|
||||
11,
|
||||
12,
|
||||
6,
|
||||
8,
|
||||
3,
|
||||
13,
|
||||
},
|
||||
[]const u8{
|
||||
2,
|
||||
12,
|
||||
6,
|
||||
10,
|
||||
0,
|
||||
11,
|
||||
8,
|
||||
3,
|
||||
4,
|
||||
13,
|
||||
7,
|
||||
5,
|
||||
15,
|
||||
14,
|
||||
1,
|
||||
9,
|
||||
},
|
||||
[]const u8{
|
||||
12,
|
||||
5,
|
||||
1,
|
||||
15,
|
||||
14,
|
||||
13,
|
||||
4,
|
||||
10,
|
||||
0,
|
||||
7,
|
||||
6,
|
||||
3,
|
||||
9,
|
||||
2,
|
||||
8,
|
||||
11,
|
||||
},
|
||||
[]const u8{
|
||||
13,
|
||||
11,
|
||||
7,
|
||||
14,
|
||||
12,
|
||||
1,
|
||||
3,
|
||||
9,
|
||||
5,
|
||||
0,
|
||||
15,
|
||||
4,
|
||||
8,
|
||||
6,
|
||||
2,
|
||||
10,
|
||||
},
|
||||
[]const u8{
|
||||
6,
|
||||
15,
|
||||
14,
|
||||
9,
|
||||
11,
|
||||
3,
|
||||
0,
|
||||
8,
|
||||
12,
|
||||
2,
|
||||
13,
|
||||
7,
|
||||
1,
|
||||
4,
|
||||
10,
|
||||
5,
|
||||
},
|
||||
[]const u8{
|
||||
10,
|
||||
2,
|
||||
8,
|
||||
4,
|
||||
7,
|
||||
6,
|
||||
1,
|
||||
5,
|
||||
15,
|
||||
11,
|
||||
9,
|
||||
14,
|
||||
3,
|
||||
12,
|
||||
13,
|
||||
0,
|
||||
},
|
||||
[]const u8{
|
||||
0,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
10,
|
||||
11,
|
||||
12,
|
||||
13,
|
||||
14,
|
||||
15,
|
||||
},
|
||||
[]const u8{
|
||||
14,
|
||||
10,
|
||||
4,
|
||||
8,
|
||||
9,
|
||||
15,
|
||||
13,
|
||||
6,
|
||||
1,
|
||||
12,
|
||||
0,
|
||||
2,
|
||||
11,
|
||||
7,
|
||||
5,
|
||||
3,
|
||||
},
|
||||
[]const u8{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
|
||||
[]const u8{ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
|
||||
[]const u8{ 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },
|
||||
[]const u8{ 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 },
|
||||
[]const u8{ 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },
|
||||
[]const u8{ 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 },
|
||||
[]const u8{ 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },
|
||||
[]const u8{ 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 },
|
||||
[]const u8{ 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },
|
||||
[]const u8{ 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 },
|
||||
[]const u8{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
|
||||
[]const u8{ 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
|
||||
};
|
||||
|
||||
h: [8]u64,
|
||||
@ -514,7 +310,7 @@ fn Blake2b(comptime out_len: usize) type {
|
||||
return s;
|
||||
}
|
||||
|
||||
pub fn reset(d: &Self) void {
|
||||
pub fn reset(d: *Self) void {
|
||||
mem.copy(u64, d.h[0..], iv[0..]);
|
||||
|
||||
// No key plus default parameters
|
||||
@ -529,7 +325,7 @@ fn Blake2b(comptime out_len: usize) type {
|
||||
d.final(out);
|
||||
}
|
||||
|
||||
pub fn update(d: &Self, b: []const u8) void {
|
||||
pub fn update(d: *Self, b: []const u8) void {
|
||||
var off: usize = 0;
|
||||
|
||||
// Partial buffer exists from previous update. Copy into buffer then hash.
|
||||
@ -544,7 +340,7 @@ fn Blake2b(comptime out_len: usize) type {
|
||||
// Full middle blocks.
|
||||
while (off + 128 <= b.len) : (off += 128) {
|
||||
d.t += 128;
|
||||
d.round(b[off..off + 128], false);
|
||||
d.round(b[off .. off + 128], false);
|
||||
}
|
||||
|
||||
// Copy any remainder for next pass.
|
||||
@ -552,26 +348,26 @@ fn Blake2b(comptime out_len: usize) type {
|
||||
d.buf_len += u8(b[off..].len);
|
||||
}
|
||||
|
||||
pub fn final(d: &Self, out: []u8) void {
|
||||
pub fn final(d: *Self, out: []u8) void {
|
||||
mem.set(u8, d.buf[d.buf_len..], 0);
|
||||
d.t += d.buf_len;
|
||||
d.round(d.buf[0..], true);
|
||||
|
||||
const rr = d.h[0..out_len / 64];
|
||||
const rr = d.h[0 .. out_len / 64];
|
||||
|
||||
for (rr) |s, j| {
|
||||
mem.writeInt(out[8 * j..8 * j + 8], s, builtin.Endian.Little);
|
||||
mem.writeInt(out[8 * j .. 8 * j + 8], s, builtin.Endian.Little);
|
||||
}
|
||||
}
|
||||
|
||||
fn round(d: &Self, b: []const u8, last: bool) void {
|
||||
fn round(d: *Self, b: []const u8, last: bool) void {
|
||||
debug.assert(b.len == 128);
|
||||
|
||||
var m: [16]u64 = undefined;
|
||||
var v: [16]u64 = undefined;
|
||||
|
||||
for (m) |*r, i| {
|
||||
r.* = mem.readIntLE(u64, b[8 * i..8 * i + 8]);
|
||||
r.* = mem.readIntLE(u64, b[8 * i .. 8 * i + 8]);
|
||||
}
|
||||
|
||||
var k: usize = 0;
|
||||
|
||||
@ -44,7 +44,7 @@ pub const Md5 = struct {
|
||||
return d;
|
||||
}
|
||||
|
||||
pub fn reset(d: &Self) void {
|
||||
pub fn reset(d: *Self) void {
|
||||
d.s[0] = 0x67452301;
|
||||
d.s[1] = 0xEFCDAB89;
|
||||
d.s[2] = 0x98BADCFE;
|
||||
@ -59,7 +59,7 @@ pub const Md5 = struct {
|
||||
d.final(out);
|
||||
}
|
||||
|
||||
pub fn update(d: &Self, b: []const u8) void {
|
||||
pub fn update(d: *Self, b: []const u8) void {
|
||||
var off: usize = 0;
|
||||
|
||||
// Partial buffer exists from previous update. Copy into buffer then hash.
|
||||
@ -73,7 +73,7 @@ pub const Md5 = struct {
|
||||
|
||||
// Full middle blocks.
|
||||
while (off + 64 <= b.len) : (off += 64) {
|
||||
d.round(b[off..off + 64]);
|
||||
d.round(b[off .. off + 64]);
|
||||
}
|
||||
|
||||
// Copy any remainder for next pass.
|
||||
@ -84,7 +84,7 @@ pub const Md5 = struct {
|
||||
d.total_len +%= b.len;
|
||||
}
|
||||
|
||||
pub fn final(d: &Self, out: []u8) void {
|
||||
pub fn final(d: *Self, out: []u8) void {
|
||||
debug.assert(out.len >= 16);
|
||||
|
||||
// The buffer here will never be completely full.
|
||||
@ -112,11 +112,11 @@ pub const Md5 = struct {
|
||||
d.round(d.buf[0..]);
|
||||
|
||||
for (d.s) |s, j| {
|
||||
mem.writeInt(out[4 * j..4 * j + 4], s, builtin.Endian.Little);
|
||||
mem.writeInt(out[4 * j .. 4 * j + 4], s, builtin.Endian.Little);
|
||||
}
|
||||
}
|
||||
|
||||
fn round(d: &Self, b: []const u8) void {
|
||||
fn round(d: *Self, b: []const u8) void {
|
||||
debug.assert(b.len == 64);
|
||||
|
||||
var s: [16]u32 = undefined;
|
||||
|
||||
@ -43,7 +43,7 @@ pub const Sha1 = struct {
|
||||
return d;
|
||||
}
|
||||
|
||||
pub fn reset(d: &Self) void {
|
||||
pub fn reset(d: *Self) void {
|
||||
d.s[0] = 0x67452301;
|
||||
d.s[1] = 0xEFCDAB89;
|
||||
d.s[2] = 0x98BADCFE;
|
||||
@ -59,7 +59,7 @@ pub const Sha1 = struct {
|
||||
d.final(out);
|
||||
}
|
||||
|
||||
pub fn update(d: &Self, b: []const u8) void {
|
||||
pub fn update(d: *Self, b: []const u8) void {
|
||||
var off: usize = 0;
|
||||
|
||||
// Partial buffer exists from previous update. Copy into buffer then hash.
|
||||
@ -73,7 +73,7 @@ pub const Sha1 = struct {
|
||||
|
||||
// Full middle blocks.
|
||||
while (off + 64 <= b.len) : (off += 64) {
|
||||
d.round(b[off..off + 64]);
|
||||
d.round(b[off .. off + 64]);
|
||||
}
|
||||
|
||||
// Copy any remainder for next pass.
|
||||
@ -83,7 +83,7 @@ pub const Sha1 = struct {
|
||||
d.total_len += b.len;
|
||||
}
|
||||
|
||||
pub fn final(d: &Self, out: []u8) void {
|
||||
pub fn final(d: *Self, out: []u8) void {
|
||||
debug.assert(out.len >= 20);
|
||||
|
||||
// The buffer here will never be completely full.
|
||||
@ -111,11 +111,11 @@ pub const Sha1 = struct {
|
||||
d.round(d.buf[0..]);
|
||||
|
||||
for (d.s) |s, j| {
|
||||
mem.writeInt(out[4 * j..4 * j + 4], s, builtin.Endian.Big);
|
||||
mem.writeInt(out[4 * j .. 4 * j + 4], s, builtin.Endian.Big);
|
||||
}
|
||||
}
|
||||
|
||||
fn round(d: &Self, b: []const u8) void {
|
||||
fn round(d: *Self, b: []const u8) void {
|
||||
debug.assert(b.len == 64);
|
||||
|
||||
var s: [16]u32 = undefined;
|
||||
|
||||
@ -93,7 +93,7 @@ fn Sha2_32(comptime params: Sha2Params32) type {
|
||||
return d;
|
||||
}
|
||||
|
||||
pub fn reset(d: &Self) void {
|
||||
pub fn reset(d: *Self) void {
|
||||
d.s[0] = params.iv0;
|
||||
d.s[1] = params.iv1;
|
||||
d.s[2] = params.iv2;
|
||||
@ -112,7 +112,7 @@ fn Sha2_32(comptime params: Sha2Params32) type {
|
||||
d.final(out);
|
||||
}
|
||||
|
||||
pub fn update(d: &Self, b: []const u8) void {
|
||||
pub fn update(d: *Self, b: []const u8) void {
|
||||
var off: usize = 0;
|
||||
|
||||
// Partial buffer exists from previous update. Copy into buffer then hash.
|
||||
@ -126,7 +126,7 @@ fn Sha2_32(comptime params: Sha2Params32) type {
|
||||
|
||||
// Full middle blocks.
|
||||
while (off + 64 <= b.len) : (off += 64) {
|
||||
d.round(b[off..off + 64]);
|
||||
d.round(b[off .. off + 64]);
|
||||
}
|
||||
|
||||
// Copy any remainder for next pass.
|
||||
@ -136,7 +136,7 @@ fn Sha2_32(comptime params: Sha2Params32) type {
|
||||
d.total_len += b.len;
|
||||
}
|
||||
|
||||
pub fn final(d: &Self, out: []u8) void {
|
||||
pub fn final(d: *Self, out: []u8) void {
|
||||
debug.assert(out.len >= params.out_len / 8);
|
||||
|
||||
// The buffer here will never be completely full.
|
||||
@ -164,14 +164,14 @@ fn Sha2_32(comptime params: Sha2Params32) type {
|
||||
d.round(d.buf[0..]);
|
||||
|
||||
// May truncate for possible 224 output
|
||||
const rr = d.s[0..params.out_len / 32];
|
||||
const rr = d.s[0 .. params.out_len / 32];
|
||||
|
||||
for (rr) |s, j| {
|
||||
mem.writeInt(out[4 * j..4 * j + 4], s, builtin.Endian.Big);
|
||||
mem.writeInt(out[4 * j .. 4 * j + 4], s, builtin.Endian.Big);
|
||||
}
|
||||
}
|
||||
|
||||
fn round(d: &Self, b: []const u8) void {
|
||||
fn round(d: *Self, b: []const u8) void {
|
||||
debug.assert(b.len == 64);
|
||||
|
||||
var s: [64]u32 = undefined;
|
||||
@ -434,7 +434,7 @@ fn Sha2_64(comptime params: Sha2Params64) type {
|
||||
return d;
|
||||
}
|
||||
|
||||
pub fn reset(d: &Self) void {
|
||||
pub fn reset(d: *Self) void {
|
||||
d.s[0] = params.iv0;
|
||||
d.s[1] = params.iv1;
|
||||
d.s[2] = params.iv2;
|
||||
@ -453,7 +453,7 @@ fn Sha2_64(comptime params: Sha2Params64) type {
|
||||
d.final(out);
|
||||
}
|
||||
|
||||
pub fn update(d: &Self, b: []const u8) void {
|
||||
pub fn update(d: *Self, b: []const u8) void {
|
||||
var off: usize = 0;
|
||||
|
||||
// Partial buffer exists from previous update. Copy into buffer then hash.
|
||||
@ -467,7 +467,7 @@ fn Sha2_64(comptime params: Sha2Params64) type {
|
||||
|
||||
// Full middle blocks.
|
||||
while (off + 128 <= b.len) : (off += 128) {
|
||||
d.round(b[off..off + 128]);
|
||||
d.round(b[off .. off + 128]);
|
||||
}
|
||||
|
||||
// Copy any remainder for next pass.
|
||||
@ -477,7 +477,7 @@ fn Sha2_64(comptime params: Sha2Params64) type {
|
||||
d.total_len += b.len;
|
||||
}
|
||||
|
||||
pub fn final(d: &Self, out: []u8) void {
|
||||
pub fn final(d: *Self, out: []u8) void {
|
||||
debug.assert(out.len >= params.out_len / 8);
|
||||
|
||||
// The buffer here will never be completely full.
|
||||
@ -505,14 +505,14 @@ fn Sha2_64(comptime params: Sha2Params64) type {
|
||||
d.round(d.buf[0..]);
|
||||
|
||||
// May truncate for possible 384 output
|
||||
const rr = d.s[0..params.out_len / 64];
|
||||
const rr = d.s[0 .. params.out_len / 64];
|
||||
|
||||
for (rr) |s, j| {
|
||||
mem.writeInt(out[8 * j..8 * j + 8], s, builtin.Endian.Big);
|
||||
mem.writeInt(out[8 * j .. 8 * j + 8], s, builtin.Endian.Big);
|
||||
}
|
||||
}
|
||||
|
||||
fn round(d: &Self, b: []const u8) void {
|
||||
fn round(d: *Self, b: []const u8) void {
|
||||
debug.assert(b.len == 128);
|
||||
|
||||
var s: [80]u64 = undefined;
|
||||
|
||||
@ -26,7 +26,7 @@ fn Keccak(comptime bits: usize, comptime delim: u8) type {
|
||||
return d;
|
||||
}
|
||||
|
||||
pub fn reset(d: &Self) void {
|
||||
pub fn reset(d: *Self) void {
|
||||
mem.set(u8, d.s[0..], 0);
|
||||
d.offset = 0;
|
||||
d.rate = 200 - (bits / 4);
|
||||
@ -38,7 +38,7 @@ fn Keccak(comptime bits: usize, comptime delim: u8) type {
|
||||
d.final(out);
|
||||
}
|
||||
|
||||
pub fn update(d: &Self, b: []const u8) void {
|
||||
pub fn update(d: *Self, b: []const u8) void {
|
||||
var ip: usize = 0;
|
||||
var len = b.len;
|
||||
var rate = d.rate - d.offset;
|
||||
@ -46,7 +46,7 @@ fn Keccak(comptime bits: usize, comptime delim: u8) type {
|
||||
|
||||
// absorb
|
||||
while (len >= rate) {
|
||||
for (d.s[offset..offset + rate]) |*r, i|
|
||||
for (d.s[offset .. offset + rate]) |*r, i|
|
||||
r.* ^= b[ip..][i];
|
||||
|
||||
keccak_f(1600, d.s[0..]);
|
||||
@ -57,13 +57,13 @@ fn Keccak(comptime bits: usize, comptime delim: u8) type {
|
||||
offset = 0;
|
||||
}
|
||||
|
||||
for (d.s[offset..offset + len]) |*r, i|
|
||||
for (d.s[offset .. offset + len]) |*r, i|
|
||||
r.* ^= b[ip..][i];
|
||||
|
||||
d.offset = offset + len;
|
||||
}
|
||||
|
||||
pub fn final(d: &Self, out: []u8) void {
|
||||
pub fn final(d: *Self, out: []u8) void {
|
||||
// padding
|
||||
d.s[d.offset] ^= delim;
|
||||
d.s[d.rate - 1] ^= 0x80;
|
||||
@ -193,7 +193,7 @@ fn keccak_f(comptime F: usize, d: []u8) void {
|
||||
var c = []const u64{0} ** 5;
|
||||
|
||||
for (s) |*r, i| {
|
||||
r.* = mem.readIntLE(u64, d[8 * i..8 * i + 8]);
|
||||
r.* = mem.readIntLE(u64, d[8 * i .. 8 * i + 8]);
|
||||
}
|
||||
|
||||
comptime var x: usize = 0;
|
||||
@ -240,7 +240,7 @@ fn keccak_f(comptime F: usize, d: []u8) void {
|
||||
}
|
||||
|
||||
for (s) |r, i| {
|
||||
mem.writeInt(d[8 * i..8 * i + 8], r, builtin.Endian.Little);
|
||||
mem.writeInt(d[8 * i .. 8 * i + 8], r, builtin.Endian.Little);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -14,7 +14,7 @@ pub fn assertEqualHash(comptime Hasher: var, comptime expected: []const u8, inpu
|
||||
pub fn assertEqual(comptime expected: []const u8, input: []const u8) void {
|
||||
var expected_bytes: [expected.len / 2]u8 = undefined;
|
||||
for (expected_bytes) |*r, i| {
|
||||
r.* = fmt.parseInt(u8, expected[2 * i..2 * i + 2], 16) catch unreachable;
|
||||
r.* = fmt.parseInt(u8, expected[2 * i .. 2 * i + 2], 16) catch unreachable;
|
||||
}
|
||||
|
||||
debug.assert(mem.eql(u8, expected_bytes, input));
|
||||
|
||||
@ -15,8 +15,8 @@ const BytesToHash = 1024 * MiB;
|
||||
|
||||
pub fn main() !void {
|
||||
var stdout_file = try std.io.getStdOut();
|
||||
var stdout_out_stream = std.io.FileOutStream.init(&stdout_file);
|
||||
const stdout = &stdout_out_stream.stream;
|
||||
var stdout_out_stream = std.io.FileOutStream.init(*stdout_file);
|
||||
const stdout = *stdout_out_stream.stream;
|
||||
|
||||
var block: [HashFunction.block_size]u8 = undefined;
|
||||
std.mem.set(u8, block[0..], 0);
|
||||
|
||||
28
std/cstr.zig
28
std/cstr.zig
@ -9,13 +9,13 @@ pub const line_sep = switch (builtin.os) {
|
||||
else => "\n",
|
||||
};
|
||||
|
||||
pub fn len(ptr: &const u8) usize {
|
||||
pub fn len(ptr: [*]const u8) usize {
|
||||
var count: usize = 0;
|
||||
while (ptr[count] != 0) : (count += 1) {}
|
||||
return count;
|
||||
}
|
||||
|
||||
pub fn cmp(a: &const u8, b: &const u8) i8 {
|
||||
pub fn cmp(a: [*]const u8, b: [*]const u8) i8 {
|
||||
var index: usize = 0;
|
||||
while (a[index] == b[index] and a[index] != 0) : (index += 1) {}
|
||||
if (a[index] > b[index]) {
|
||||
@ -27,11 +27,11 @@ pub fn cmp(a: &const u8, b: &const u8) i8 {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn toSliceConst(str: &const u8) []const u8 {
|
||||
pub fn toSliceConst(str: [*]const u8) []const u8 {
|
||||
return str[0..len(str)];
|
||||
}
|
||||
|
||||
pub fn toSlice(str: &u8) []u8 {
|
||||
pub fn toSlice(str: [*]u8) []u8 {
|
||||
return str[0..len(str)];
|
||||
}
|
||||
|
||||
@ -47,7 +47,7 @@ fn testCStrFnsImpl() void {
|
||||
|
||||
/// Returns a mutable slice with 1 more byte of length which is a null byte.
|
||||
/// Caller owns the returned memory.
|
||||
pub fn addNullByte(allocator: &mem.Allocator, slice: []const u8) ![]u8 {
|
||||
pub fn addNullByte(allocator: *mem.Allocator, slice: []const u8) ![]u8 {
|
||||
const result = try allocator.alloc(u8, slice.len + 1);
|
||||
mem.copy(u8, result, slice);
|
||||
result[slice.len] = 0;
|
||||
@ -55,13 +55,13 @@ pub fn addNullByte(allocator: &mem.Allocator, slice: []const u8) ![]u8 {
|
||||
}
|
||||
|
||||
pub const NullTerminated2DArray = struct {
|
||||
allocator: &mem.Allocator,
|
||||
allocator: *mem.Allocator,
|
||||
byte_count: usize,
|
||||
ptr: ?&?&u8,
|
||||
ptr: ?[*]?[*]u8,
|
||||
|
||||
/// Takes N lists of strings, concatenates the lists together, and adds a null terminator
|
||||
/// Caller must deinit result
|
||||
pub fn fromSlices(allocator: &mem.Allocator, slices: []const []const []const u8) !NullTerminated2DArray {
|
||||
pub fn fromSlices(allocator: *mem.Allocator, slices: []const []const []const u8) !NullTerminated2DArray {
|
||||
var new_len: usize = 1; // 1 for the list null
|
||||
var byte_count: usize = 0;
|
||||
for (slices) |slice| {
|
||||
@ -75,16 +75,16 @@ pub const NullTerminated2DArray = struct {
|
||||
const index_size = @sizeOf(usize) * new_len; // size of the ptrs
|
||||
byte_count += index_size;
|
||||
|
||||
const buf = try allocator.alignedAlloc(u8, @alignOf(?&u8), byte_count);
|
||||
const buf = try allocator.alignedAlloc(u8, @alignOf(?*u8), byte_count);
|
||||
errdefer allocator.free(buf);
|
||||
|
||||
var write_index = index_size;
|
||||
const index_buf = ([]?&u8)(buf);
|
||||
const index_buf = ([]?[*]u8)(buf);
|
||||
|
||||
var i: usize = 0;
|
||||
for (slices) |slice| {
|
||||
for (slice) |inner| {
|
||||
index_buf[i] = &buf[write_index];
|
||||
index_buf[i] = buf.ptr + write_index;
|
||||
i += 1;
|
||||
mem.copy(u8, buf[write_index..], inner);
|
||||
write_index += inner.len;
|
||||
@ -97,12 +97,12 @@ pub const NullTerminated2DArray = struct {
|
||||
return NullTerminated2DArray{
|
||||
.allocator = allocator,
|
||||
.byte_count = byte_count,
|
||||
.ptr = @ptrCast(?&?&u8, buf.ptr),
|
||||
.ptr = @ptrCast(?[*]?[*]u8, buf.ptr),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: &NullTerminated2DArray) void {
|
||||
const buf = @ptrCast(&u8, self.ptr);
|
||||
pub fn deinit(self: *NullTerminated2DArray) void {
|
||||
const buf = @ptrCast([*]u8, self.ptr);
|
||||
self.allocator.free(buf[0..self.byte_count]);
|
||||
}
|
||||
};
|
||||
|
||||
@ -7,12 +7,12 @@ pub const FailingAllocator = struct {
|
||||
allocator: mem.Allocator,
|
||||
index: usize,
|
||||
fail_index: usize,
|
||||
internal_allocator: &mem.Allocator,
|
||||
internal_allocator: *mem.Allocator,
|
||||
allocated_bytes: usize,
|
||||
freed_bytes: usize,
|
||||
deallocations: usize,
|
||||
|
||||
pub fn init(allocator: &mem.Allocator, fail_index: usize) FailingAllocator {
|
||||
pub fn init(allocator: *mem.Allocator, fail_index: usize) FailingAllocator {
|
||||
return FailingAllocator{
|
||||
.internal_allocator = allocator,
|
||||
.fail_index = fail_index,
|
||||
@ -28,7 +28,7 @@ pub const FailingAllocator = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn alloc(allocator: &mem.Allocator, n: usize, alignment: u29) ![]u8 {
|
||||
fn alloc(allocator: *mem.Allocator, n: usize, alignment: u29) ![]u8 {
|
||||
const self = @fieldParentPtr(FailingAllocator, "allocator", allocator);
|
||||
if (self.index == self.fail_index) {
|
||||
return error.OutOfMemory;
|
||||
@ -39,7 +39,7 @@ pub const FailingAllocator = struct {
|
||||
return result;
|
||||
}
|
||||
|
||||
fn realloc(allocator: &mem.Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
||||
fn realloc(allocator: *mem.Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
||||
const self = @fieldParentPtr(FailingAllocator, "allocator", allocator);
|
||||
if (new_size <= old_mem.len) {
|
||||
self.freed_bytes += old_mem.len - new_size;
|
||||
@ -55,7 +55,7 @@ pub const FailingAllocator = struct {
|
||||
return result;
|
||||
}
|
||||
|
||||
fn free(allocator: &mem.Allocator, bytes: []u8) void {
|
||||
fn free(allocator: *mem.Allocator, bytes: []u8) void {
|
||||
const self = @fieldParentPtr(FailingAllocator, "allocator", allocator);
|
||||
self.freed_bytes += bytes.len;
|
||||
self.deallocations += 1;
|
||||
|
||||
@ -16,12 +16,12 @@ pub const FailingAllocator = @import("failing_allocator.zig").FailingAllocator;
|
||||
/// TODO atomic/multithread support
|
||||
var stderr_file: os.File = undefined;
|
||||
var stderr_file_out_stream: io.FileOutStream = undefined;
|
||||
var stderr_stream: ?&io.OutStream(io.FileOutStream.Error) = null;
|
||||
var stderr_stream: ?*io.OutStream(io.FileOutStream.Error) = null;
|
||||
pub fn warn(comptime fmt: []const u8, args: ...) void {
|
||||
const stderr = getStderrStream() catch return;
|
||||
stderr.print(fmt, args) catch return;
|
||||
}
|
||||
fn getStderrStream() !&io.OutStream(io.FileOutStream.Error) {
|
||||
fn getStderrStream() !*io.OutStream(io.FileOutStream.Error) {
|
||||
if (stderr_stream) |st| {
|
||||
return st;
|
||||
} else {
|
||||
@ -33,8 +33,8 @@ fn getStderrStream() !&io.OutStream(io.FileOutStream.Error) {
|
||||
}
|
||||
}
|
||||
|
||||
var self_debug_info: ?&ElfStackTrace = null;
|
||||
pub fn getSelfDebugInfo() !&ElfStackTrace {
|
||||
var self_debug_info: ?*ElfStackTrace = null;
|
||||
pub fn getSelfDebugInfo() !*ElfStackTrace {
|
||||
if (self_debug_info) |info| {
|
||||
return info;
|
||||
} else {
|
||||
@ -58,7 +58,7 @@ pub fn dumpCurrentStackTrace(start_addr: ?usize) void {
|
||||
}
|
||||
|
||||
/// Tries to print a stack trace to stderr, unbuffered, and ignores any error returned.
|
||||
pub fn dumpStackTrace(stack_trace: &const builtin.StackTrace) void {
|
||||
pub fn dumpStackTrace(stack_trace: *const builtin.StackTrace) void {
|
||||
const stderr = getStderrStream() catch return;
|
||||
const debug_info = getSelfDebugInfo() catch |err| {
|
||||
stderr.print("Unable to dump stack trace: Unable to open debug info: {}\n", @errorName(err)) catch return;
|
||||
@ -104,7 +104,7 @@ pub fn panic(comptime format: []const u8, args: ...) noreturn {
|
||||
|
||||
var panicking: u8 = 0; // TODO make this a bool
|
||||
|
||||
pub fn panicExtra(trace: ?&const builtin.StackTrace, first_trace_addr: ?usize, comptime format: []const u8, args: ...) noreturn {
|
||||
pub fn panicExtra(trace: ?*const builtin.StackTrace, first_trace_addr: ?usize, comptime format: []const u8, args: ...) noreturn {
|
||||
@setCold(true);
|
||||
|
||||
if (@atomicRmw(u8, &panicking, builtin.AtomicRmwOp.Xchg, 1, builtin.AtomicOrder.SeqCst) == 1) {
|
||||
@ -130,7 +130,7 @@ const WHITE = "\x1b[37;1m";
|
||||
const DIM = "\x1b[2m";
|
||||
const RESET = "\x1b[0m";
|
||||
|
||||
pub fn writeStackTrace(stack_trace: &const builtin.StackTrace, out_stream: var, allocator: &mem.Allocator, debug_info: &ElfStackTrace, tty_color: bool) !void {
|
||||
pub fn writeStackTrace(stack_trace: *const builtin.StackTrace, out_stream: var, allocator: *mem.Allocator, debug_info: *ElfStackTrace, tty_color: bool) !void {
|
||||
var frame_index: usize = undefined;
|
||||
var frames_left: usize = undefined;
|
||||
if (stack_trace.index < stack_trace.instruction_addresses.len) {
|
||||
@ -150,7 +150,7 @@ pub fn writeStackTrace(stack_trace: &const builtin.StackTrace, out_stream: var,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn writeCurrentStackTrace(out_stream: var, allocator: &mem.Allocator, debug_info: &ElfStackTrace, tty_color: bool, start_addr: ?usize) !void {
|
||||
pub fn writeCurrentStackTrace(out_stream: var, allocator: *mem.Allocator, debug_info: *ElfStackTrace, tty_color: bool, start_addr: ?usize) !void {
|
||||
const AddressState = union(enum) {
|
||||
NotLookingForStartAddress,
|
||||
LookingForStartAddress: usize,
|
||||
@ -166,8 +166,8 @@ pub fn writeCurrentStackTrace(out_stream: var, allocator: &mem.Allocator, debug_
|
||||
}
|
||||
|
||||
var fp = @ptrToInt(@frameAddress());
|
||||
while (fp != 0) : (fp = @intToPtr(&const usize, fp).*) {
|
||||
const return_address = @intToPtr(&const usize, fp + @sizeOf(usize)).*;
|
||||
while (fp != 0) : (fp = @intToPtr(*const usize, fp).*) {
|
||||
const return_address = @intToPtr(*const usize, fp + @sizeOf(usize)).*;
|
||||
|
||||
switch (addr_state) {
|
||||
AddressState.NotLookingForStartAddress => {},
|
||||
@ -183,7 +183,7 @@ pub fn writeCurrentStackTrace(out_stream: var, allocator: &mem.Allocator, debug_
|
||||
}
|
||||
}
|
||||
|
||||
fn printSourceAtAddress(debug_info: &ElfStackTrace, out_stream: var, address: usize) !void {
|
||||
fn printSourceAtAddress(debug_info: *ElfStackTrace, out_stream: var, address: usize) !void {
|
||||
const ptr_hex = "0x{x}";
|
||||
|
||||
switch (builtin.os) {
|
||||
@ -236,7 +236,7 @@ fn printSourceAtAddress(debug_info: &ElfStackTrace, out_stream: var, address: us
|
||||
}
|
||||
}
|
||||
|
||||
pub fn openSelfDebugInfo(allocator: &mem.Allocator) !&ElfStackTrace {
|
||||
pub fn openSelfDebugInfo(allocator: *mem.Allocator) !*ElfStackTrace {
|
||||
switch (builtin.object_format) {
|
||||
builtin.ObjectFormat.elf => {
|
||||
const st = try allocator.create(ElfStackTrace);
|
||||
@ -289,7 +289,7 @@ pub fn openSelfDebugInfo(allocator: &mem.Allocator) !&ElfStackTrace {
|
||||
}
|
||||
}
|
||||
|
||||
fn printLineFromFile(allocator: &mem.Allocator, out_stream: var, line_info: &const LineInfo) !void {
|
||||
fn printLineFromFile(allocator: *mem.Allocator, out_stream: var, line_info: *const LineInfo) !void {
|
||||
var f = try os.File.openRead(allocator, line_info.file_name);
|
||||
defer f.close();
|
||||
// TODO fstat and make sure that the file has the correct size
|
||||
@ -325,32 +325,32 @@ pub const ElfStackTrace = switch (builtin.os) {
|
||||
builtin.Os.macosx => struct {
|
||||
symbol_table: macho.SymbolTable,
|
||||
|
||||
pub fn close(self: &ElfStackTrace) void {
|
||||
pub fn close(self: *ElfStackTrace) void {
|
||||
self.symbol_table.deinit();
|
||||
}
|
||||
},
|
||||
else => struct {
|
||||
self_exe_file: os.File,
|
||||
elf: elf.Elf,
|
||||
debug_info: &elf.SectionHeader,
|
||||
debug_abbrev: &elf.SectionHeader,
|
||||
debug_str: &elf.SectionHeader,
|
||||
debug_line: &elf.SectionHeader,
|
||||
debug_ranges: ?&elf.SectionHeader,
|
||||
debug_info: *elf.SectionHeader,
|
||||
debug_abbrev: *elf.SectionHeader,
|
||||
debug_str: *elf.SectionHeader,
|
||||
debug_line: *elf.SectionHeader,
|
||||
debug_ranges: ?*elf.SectionHeader,
|
||||
abbrev_table_list: ArrayList(AbbrevTableHeader),
|
||||
compile_unit_list: ArrayList(CompileUnit),
|
||||
|
||||
pub fn allocator(self: &const ElfStackTrace) &mem.Allocator {
|
||||
pub fn allocator(self: *const ElfStackTrace) *mem.Allocator {
|
||||
return self.abbrev_table_list.allocator;
|
||||
}
|
||||
|
||||
pub fn readString(self: &ElfStackTrace) ![]u8 {
|
||||
pub fn readString(self: *ElfStackTrace) ![]u8 {
|
||||
var in_file_stream = io.FileInStream.init(&self.self_exe_file);
|
||||
const in_stream = &in_file_stream.stream;
|
||||
return readStringRaw(self.allocator(), in_stream);
|
||||
}
|
||||
|
||||
pub fn close(self: &ElfStackTrace) void {
|
||||
pub fn close(self: *ElfStackTrace) void {
|
||||
self.self_exe_file.close();
|
||||
self.elf.close();
|
||||
}
|
||||
@ -365,7 +365,7 @@ const PcRange = struct {
|
||||
const CompileUnit = struct {
|
||||
version: u16,
|
||||
is_64: bool,
|
||||
die: &Die,
|
||||
die: *Die,
|
||||
index: usize,
|
||||
pc_range: ?PcRange,
|
||||
};
|
||||
@ -408,7 +408,7 @@ const Constant = struct {
|
||||
payload: []u8,
|
||||
signed: bool,
|
||||
|
||||
fn asUnsignedLe(self: &const Constant) !u64 {
|
||||
fn asUnsignedLe(self: *const Constant) !u64 {
|
||||
if (self.payload.len > @sizeOf(u64)) return error.InvalidDebugInfo;
|
||||
if (self.signed) return error.InvalidDebugInfo;
|
||||
return mem.readInt(self.payload, u64, builtin.Endian.Little);
|
||||
@ -425,14 +425,14 @@ const Die = struct {
|
||||
value: FormValue,
|
||||
};
|
||||
|
||||
fn getAttr(self: &const Die, id: u64) ?&const FormValue {
|
||||
fn getAttr(self: *const Die, id: u64) ?*const FormValue {
|
||||
for (self.attrs.toSliceConst()) |*attr| {
|
||||
if (attr.id == id) return &attr.value;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
fn getAttrAddr(self: &const Die, id: u64) !u64 {
|
||||
fn getAttrAddr(self: *const Die, id: u64) !u64 {
|
||||
const form_value = self.getAttr(id) ?? return error.MissingDebugInfo;
|
||||
return switch (form_value.*) {
|
||||
FormValue.Address => |value| value,
|
||||
@ -440,7 +440,7 @@ const Die = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn getAttrSecOffset(self: &const Die, id: u64) !u64 {
|
||||
fn getAttrSecOffset(self: *const Die, id: u64) !u64 {
|
||||
const form_value = self.getAttr(id) ?? return error.MissingDebugInfo;
|
||||
return switch (form_value.*) {
|
||||
FormValue.Const => |value| value.asUnsignedLe(),
|
||||
@ -449,7 +449,7 @@ const Die = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn getAttrUnsignedLe(self: &const Die, id: u64) !u64 {
|
||||
fn getAttrUnsignedLe(self: *const Die, id: u64) !u64 {
|
||||
const form_value = self.getAttr(id) ?? return error.MissingDebugInfo;
|
||||
return switch (form_value.*) {
|
||||
FormValue.Const => |value| value.asUnsignedLe(),
|
||||
@ -457,7 +457,7 @@ const Die = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn getAttrString(self: &const Die, st: &ElfStackTrace, id: u64) ![]u8 {
|
||||
fn getAttrString(self: *const Die, st: *ElfStackTrace, id: u64) ![]u8 {
|
||||
const form_value = self.getAttr(id) ?? return error.MissingDebugInfo;
|
||||
return switch (form_value.*) {
|
||||
FormValue.String => |value| value,
|
||||
@ -478,9 +478,9 @@ const LineInfo = struct {
|
||||
line: usize,
|
||||
column: usize,
|
||||
file_name: []u8,
|
||||
allocator: &mem.Allocator,
|
||||
allocator: *mem.Allocator,
|
||||
|
||||
fn deinit(self: &const LineInfo) void {
|
||||
fn deinit(self: *const LineInfo) void {
|
||||
self.allocator.free(self.file_name);
|
||||
}
|
||||
};
|
||||
@ -496,7 +496,7 @@ const LineNumberProgram = struct {
|
||||
|
||||
target_address: usize,
|
||||
include_dirs: []const []const u8,
|
||||
file_entries: &ArrayList(FileEntry),
|
||||
file_entries: *ArrayList(FileEntry),
|
||||
|
||||
prev_address: usize,
|
||||
prev_file: usize,
|
||||
@ -506,7 +506,7 @@ const LineNumberProgram = struct {
|
||||
prev_basic_block: bool,
|
||||
prev_end_sequence: bool,
|
||||
|
||||
pub fn init(is_stmt: bool, include_dirs: []const []const u8, file_entries: &ArrayList(FileEntry), target_address: usize) LineNumberProgram {
|
||||
pub fn init(is_stmt: bool, include_dirs: []const []const u8, file_entries: *ArrayList(FileEntry), target_address: usize) LineNumberProgram {
|
||||
return LineNumberProgram{
|
||||
.address = 0,
|
||||
.file = 1,
|
||||
@ -528,7 +528,7 @@ const LineNumberProgram = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn checkLineMatch(self: &LineNumberProgram) !?LineInfo {
|
||||
pub fn checkLineMatch(self: *LineNumberProgram) !?LineInfo {
|
||||
if (self.target_address >= self.prev_address and self.target_address < self.address) {
|
||||
const file_entry = if (self.prev_file == 0) {
|
||||
return error.MissingDebugInfo;
|
||||
@ -562,7 +562,7 @@ const LineNumberProgram = struct {
|
||||
}
|
||||
};
|
||||
|
||||
fn readStringRaw(allocator: &mem.Allocator, in_stream: var) ![]u8 {
|
||||
fn readStringRaw(allocator: *mem.Allocator, in_stream: var) ![]u8 {
|
||||
var buf = ArrayList(u8).init(allocator);
|
||||
while (true) {
|
||||
const byte = try in_stream.readByte();
|
||||
@ -572,30 +572,30 @@ fn readStringRaw(allocator: &mem.Allocator, in_stream: var) ![]u8 {
|
||||
return buf.toSlice();
|
||||
}
|
||||
|
||||
fn getString(st: &ElfStackTrace, offset: u64) ![]u8 {
|
||||
fn getString(st: *ElfStackTrace, offset: u64) ![]u8 {
|
||||
const pos = st.debug_str.offset + offset;
|
||||
try st.self_exe_file.seekTo(pos);
|
||||
return st.readString();
|
||||
}
|
||||
|
||||
fn readAllocBytes(allocator: &mem.Allocator, in_stream: var, size: usize) ![]u8 {
|
||||
fn readAllocBytes(allocator: *mem.Allocator, in_stream: var, size: usize) ![]u8 {
|
||||
const buf = try allocator.alloc(u8, size);
|
||||
errdefer allocator.free(buf);
|
||||
if ((try in_stream.read(buf)) < size) return error.EndOfFile;
|
||||
return buf;
|
||||
}
|
||||
|
||||
fn parseFormValueBlockLen(allocator: &mem.Allocator, in_stream: var, size: usize) !FormValue {
|
||||
fn parseFormValueBlockLen(allocator: *mem.Allocator, in_stream: var, size: usize) !FormValue {
|
||||
const buf = try readAllocBytes(allocator, in_stream, size);
|
||||
return FormValue{ .Block = buf };
|
||||
}
|
||||
|
||||
fn parseFormValueBlock(allocator: &mem.Allocator, in_stream: var, size: usize) !FormValue {
|
||||
fn parseFormValueBlock(allocator: *mem.Allocator, in_stream: var, size: usize) !FormValue {
|
||||
const block_len = try in_stream.readVarInt(builtin.Endian.Little, usize, size);
|
||||
return parseFormValueBlockLen(allocator, in_stream, block_len);
|
||||
}
|
||||
|
||||
fn parseFormValueConstant(allocator: &mem.Allocator, in_stream: var, signed: bool, size: usize) !FormValue {
|
||||
fn parseFormValueConstant(allocator: *mem.Allocator, in_stream: var, signed: bool, size: usize) !FormValue {
|
||||
return FormValue{
|
||||
.Const = Constant{
|
||||
.signed = signed,
|
||||
@ -612,12 +612,12 @@ fn parseFormValueTargetAddrSize(in_stream: var) !u64 {
|
||||
return if (@sizeOf(usize) == 4) u64(try in_stream.readIntLe(u32)) else if (@sizeOf(usize) == 8) try in_stream.readIntLe(u64) else unreachable;
|
||||
}
|
||||
|
||||
fn parseFormValueRefLen(allocator: &mem.Allocator, in_stream: var, size: usize) !FormValue {
|
||||
fn parseFormValueRefLen(allocator: *mem.Allocator, in_stream: var, size: usize) !FormValue {
|
||||
const buf = try readAllocBytes(allocator, in_stream, size);
|
||||
return FormValue{ .Ref = buf };
|
||||
}
|
||||
|
||||
fn parseFormValueRef(allocator: &mem.Allocator, in_stream: var, comptime T: type) !FormValue {
|
||||
fn parseFormValueRef(allocator: *mem.Allocator, in_stream: var, comptime T: type) !FormValue {
|
||||
const block_len = try in_stream.readIntLe(T);
|
||||
return parseFormValueRefLen(allocator, in_stream, block_len);
|
||||
}
|
||||
@ -632,7 +632,7 @@ const ParseFormValueError = error{
|
||||
OutOfMemory,
|
||||
};
|
||||
|
||||
fn parseFormValue(allocator: &mem.Allocator, in_stream: var, form_id: u64, is_64: bool) ParseFormValueError!FormValue {
|
||||
fn parseFormValue(allocator: *mem.Allocator, in_stream: var, form_id: u64, is_64: bool) ParseFormValueError!FormValue {
|
||||
return switch (form_id) {
|
||||
DW.FORM_addr => FormValue{ .Address = try parseFormValueTargetAddrSize(in_stream) },
|
||||
DW.FORM_block1 => parseFormValueBlock(allocator, in_stream, 1),
|
||||
@ -682,7 +682,7 @@ fn parseFormValue(allocator: &mem.Allocator, in_stream: var, form_id: u64, is_64
|
||||
};
|
||||
}
|
||||
|
||||
fn parseAbbrevTable(st: &ElfStackTrace) !AbbrevTable {
|
||||
fn parseAbbrevTable(st: *ElfStackTrace) !AbbrevTable {
|
||||
const in_file = &st.self_exe_file;
|
||||
var in_file_stream = io.FileInStream.init(in_file);
|
||||
const in_stream = &in_file_stream.stream;
|
||||
@ -712,7 +712,7 @@ fn parseAbbrevTable(st: &ElfStackTrace) !AbbrevTable {
|
||||
|
||||
/// Gets an already existing AbbrevTable given the abbrev_offset, or if not found,
|
||||
/// seeks in the stream and parses it.
|
||||
fn getAbbrevTable(st: &ElfStackTrace, abbrev_offset: u64) !&const AbbrevTable {
|
||||
fn getAbbrevTable(st: *ElfStackTrace, abbrev_offset: u64) !*const AbbrevTable {
|
||||
for (st.abbrev_table_list.toSlice()) |*header| {
|
||||
if (header.offset == abbrev_offset) {
|
||||
return &header.table;
|
||||
@ -726,14 +726,14 @@ fn getAbbrevTable(st: &ElfStackTrace, abbrev_offset: u64) !&const AbbrevTable {
|
||||
return &st.abbrev_table_list.items[st.abbrev_table_list.len - 1].table;
|
||||
}
|
||||
|
||||
fn getAbbrevTableEntry(abbrev_table: &const AbbrevTable, abbrev_code: u64) ?&const AbbrevTableEntry {
|
||||
fn getAbbrevTableEntry(abbrev_table: *const AbbrevTable, abbrev_code: u64) ?*const AbbrevTableEntry {
|
||||
for (abbrev_table.toSliceConst()) |*table_entry| {
|
||||
if (table_entry.abbrev_code == abbrev_code) return table_entry;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
fn parseDie(st: &ElfStackTrace, abbrev_table: &const AbbrevTable, is_64: bool) !Die {
|
||||
fn parseDie(st: *ElfStackTrace, abbrev_table: *const AbbrevTable, is_64: bool) !Die {
|
||||
const in_file = &st.self_exe_file;
|
||||
var in_file_stream = io.FileInStream.init(in_file);
|
||||
const in_stream = &in_file_stream.stream;
|
||||
@ -755,7 +755,7 @@ fn parseDie(st: &ElfStackTrace, abbrev_table: &const AbbrevTable, is_64: bool) !
|
||||
return result;
|
||||
}
|
||||
|
||||
fn getLineNumberInfo(st: &ElfStackTrace, compile_unit: &const CompileUnit, target_address: usize) !LineInfo {
|
||||
fn getLineNumberInfo(st: *ElfStackTrace, compile_unit: *const CompileUnit, target_address: usize) !LineInfo {
|
||||
const compile_unit_cwd = try compile_unit.die.getAttrString(st, DW.AT_comp_dir);
|
||||
|
||||
const in_file = &st.self_exe_file;
|
||||
@ -934,7 +934,7 @@ fn getLineNumberInfo(st: &ElfStackTrace, compile_unit: &const CompileUnit, targe
|
||||
return error.MissingDebugInfo;
|
||||
}
|
||||
|
||||
fn scanAllCompileUnits(st: &ElfStackTrace) !void {
|
||||
fn scanAllCompileUnits(st: *ElfStackTrace) !void {
|
||||
const debug_info_end = st.debug_info.offset + st.debug_info.size;
|
||||
var this_unit_offset = st.debug_info.offset;
|
||||
var cu_index: usize = 0;
|
||||
@ -1005,7 +1005,7 @@ fn scanAllCompileUnits(st: &ElfStackTrace) !void {
|
||||
}
|
||||
}
|
||||
|
||||
fn findCompileUnit(st: &ElfStackTrace, target_address: u64) !&const CompileUnit {
|
||||
fn findCompileUnit(st: *ElfStackTrace, target_address: u64) !*const CompileUnit {
|
||||
var in_file_stream = io.FileInStream.init(&st.self_exe_file);
|
||||
const in_stream = &in_file_stream.stream;
|
||||
for (st.compile_unit_list.toSlice()) |*compile_unit| {
|
||||
@ -1039,7 +1039,7 @@ fn findCompileUnit(st: &ElfStackTrace, target_address: u64) !&const CompileUnit
|
||||
return error.MissingDebugInfo;
|
||||
}
|
||||
|
||||
fn readInitialLength(comptime E: type, in_stream: &io.InStream(E), is_64: &bool) !u64 {
|
||||
fn readInitialLength(comptime E: type, in_stream: *io.InStream(E), is_64: *bool) !u64 {
|
||||
const first_32_bits = try in_stream.readIntLe(u32);
|
||||
is_64.* = (first_32_bits == 0xffffffff);
|
||||
if (is_64.*) {
|
||||
@ -1096,10 +1096,10 @@ var global_fixed_allocator = std.heap.FixedBufferAllocator.init(global_allocator
|
||||
var global_allocator_mem: [100 * 1024]u8 = undefined;
|
||||
|
||||
// TODO make thread safe
|
||||
var debug_info_allocator: ?&mem.Allocator = null;
|
||||
var debug_info_allocator: ?*mem.Allocator = null;
|
||||
var debug_info_direct_allocator: std.heap.DirectAllocator = undefined;
|
||||
var debug_info_arena_allocator: std.heap.ArenaAllocator = undefined;
|
||||
fn getDebugInfoAllocator() &mem.Allocator {
|
||||
fn getDebugInfoAllocator() *mem.Allocator {
|
||||
if (debug_info_allocator) |a| return a;
|
||||
|
||||
debug_info_direct_allocator = std.heap.DirectAllocator.init();
|
||||
|
||||
18
std/elf.zig
18
std/elf.zig
@ -338,7 +338,7 @@ pub const SectionHeader = struct {
|
||||
};
|
||||
|
||||
pub const Elf = struct {
|
||||
in_file: &os.File,
|
||||
in_file: *os.File,
|
||||
auto_close_stream: bool,
|
||||
is_64: bool,
|
||||
endian: builtin.Endian,
|
||||
@ -348,20 +348,20 @@ pub const Elf = struct {
|
||||
program_header_offset: u64,
|
||||
section_header_offset: u64,
|
||||
string_section_index: u64,
|
||||
string_section: &SectionHeader,
|
||||
string_section: *SectionHeader,
|
||||
section_headers: []SectionHeader,
|
||||
allocator: &mem.Allocator,
|
||||
allocator: *mem.Allocator,
|
||||
prealloc_file: os.File,
|
||||
|
||||
/// Call close when done.
|
||||
pub fn openPath(elf: &Elf, allocator: &mem.Allocator, path: []const u8) !void {
|
||||
pub fn openPath(elf: *Elf, allocator: *mem.Allocator, path: []const u8) !void {
|
||||
try elf.prealloc_file.open(path);
|
||||
try elf.openFile(allocator, &elf.prealloc_file);
|
||||
try elf.openFile(allocator, *elf.prealloc_file);
|
||||
elf.auto_close_stream = true;
|
||||
}
|
||||
|
||||
/// Call close when done.
|
||||
pub fn openFile(elf: &Elf, allocator: &mem.Allocator, file: &os.File) !void {
|
||||
pub fn openFile(elf: *Elf, allocator: *mem.Allocator, file: *os.File) !void {
|
||||
elf.allocator = allocator;
|
||||
elf.in_file = file;
|
||||
elf.auto_close_stream = false;
|
||||
@ -503,13 +503,13 @@ pub const Elf = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn close(elf: &Elf) void {
|
||||
pub fn close(elf: *Elf) void {
|
||||
elf.allocator.free(elf.section_headers);
|
||||
|
||||
if (elf.auto_close_stream) elf.in_file.close();
|
||||
}
|
||||
|
||||
pub fn findSection(elf: &Elf, name: []const u8) !?&SectionHeader {
|
||||
pub fn findSection(elf: *Elf, name: []const u8) !?*SectionHeader {
|
||||
var file_stream = io.FileInStream.init(elf.in_file);
|
||||
const in = &file_stream.stream;
|
||||
|
||||
@ -533,7 +533,7 @@ pub const Elf = struct {
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn seekToSection(elf: &Elf, elf_section: &SectionHeader) !void {
|
||||
pub fn seekToSection(elf: *Elf, elf_section: *SectionHeader) !void {
|
||||
try elf.in_file.seekTo(elf_section.offset);
|
||||
}
|
||||
};
|
||||
|
||||
@ -6,9 +6,9 @@ const mem = std.mem;
|
||||
const posix = std.os.posix;
|
||||
|
||||
pub const TcpServer = struct {
|
||||
handleRequestFn: async<&mem.Allocator> fn(&TcpServer, &const std.net.Address, &const std.os.File) void,
|
||||
handleRequestFn: async<*mem.Allocator> fn (*TcpServer, *const std.net.Address, *const std.os.File) void,
|
||||
|
||||
loop: &Loop,
|
||||
loop: *Loop,
|
||||
sockfd: i32,
|
||||
accept_coro: ?promise,
|
||||
listen_address: std.net.Address,
|
||||
@ -17,7 +17,7 @@ pub const TcpServer = struct {
|
||||
|
||||
const PromiseNode = std.LinkedList(promise).Node;
|
||||
|
||||
pub fn init(loop: &Loop) !TcpServer {
|
||||
pub fn init(loop: *Loop) !TcpServer {
|
||||
const sockfd = try std.os.posixSocket(posix.AF_INET, posix.SOCK_STREAM | posix.SOCK_CLOEXEC | posix.SOCK_NONBLOCK, posix.PROTO_tcp);
|
||||
errdefer std.os.close(sockfd);
|
||||
|
||||
@ -32,7 +32,7 @@ pub const TcpServer = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn listen(self: &TcpServer, address: &const std.net.Address, handleRequestFn: async<&mem.Allocator> fn(&TcpServer, &const std.net.Address, &const std.os.File) void) !void {
|
||||
pub fn listen(self: *TcpServer, address: *const std.net.Address, handleRequestFn: async<*mem.Allocator> fn (*TcpServer, *const std.net.Address, *const std.os.File) void) !void {
|
||||
self.handleRequestFn = handleRequestFn;
|
||||
|
||||
try std.os.posixBind(self.sockfd, &address.os_addr);
|
||||
@ -46,13 +46,13 @@ pub const TcpServer = struct {
|
||||
errdefer self.loop.removeFd(self.sockfd);
|
||||
}
|
||||
|
||||
pub fn deinit(self: &TcpServer) void {
|
||||
pub fn deinit(self: *TcpServer) void {
|
||||
self.loop.removeFd(self.sockfd);
|
||||
if (self.accept_coro) |accept_coro| cancel accept_coro;
|
||||
std.os.close(self.sockfd);
|
||||
}
|
||||
|
||||
pub async fn handler(self: &TcpServer) void {
|
||||
pub async fn handler(self: *TcpServer) void {
|
||||
while (true) {
|
||||
var accepted_addr: std.net.Address = undefined;
|
||||
if (std.os.posixAccept(self.sockfd, &accepted_addr.os_addr, posix.SOCK_NONBLOCK | posix.SOCK_CLOEXEC)) |accepted_fd| {
|
||||
@ -92,11 +92,11 @@ pub const TcpServer = struct {
|
||||
};
|
||||
|
||||
pub const Loop = struct {
|
||||
allocator: &mem.Allocator,
|
||||
allocator: *mem.Allocator,
|
||||
epollfd: i32,
|
||||
keep_running: bool,
|
||||
|
||||
fn init(allocator: &mem.Allocator) !Loop {
|
||||
fn init(allocator: *mem.Allocator) !Loop {
|
||||
const epollfd = try std.os.linuxEpollCreate(std.os.linux.EPOLL_CLOEXEC);
|
||||
return Loop{
|
||||
.keep_running = true,
|
||||
@ -105,7 +105,7 @@ pub const Loop = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn addFd(self: &Loop, fd: i32, prom: promise) !void {
|
||||
pub fn addFd(self: *Loop, fd: i32, prom: promise) !void {
|
||||
var ev = std.os.linux.epoll_event{
|
||||
.events = std.os.linux.EPOLLIN | std.os.linux.EPOLLOUT | std.os.linux.EPOLLET,
|
||||
.data = std.os.linux.epoll_data{ .ptr = @ptrToInt(prom) },
|
||||
@ -113,23 +113,23 @@ pub const Loop = struct {
|
||||
try std.os.linuxEpollCtl(self.epollfd, std.os.linux.EPOLL_CTL_ADD, fd, &ev);
|
||||
}
|
||||
|
||||
pub fn removeFd(self: &Loop, fd: i32) void {
|
||||
pub fn removeFd(self: *Loop, fd: i32) void {
|
||||
std.os.linuxEpollCtl(self.epollfd, std.os.linux.EPOLL_CTL_DEL, fd, undefined) catch {};
|
||||
}
|
||||
async fn waitFd(self: &Loop, fd: i32) !void {
|
||||
async fn waitFd(self: *Loop, fd: i32) !void {
|
||||
defer self.removeFd(fd);
|
||||
suspend |p| {
|
||||
try self.addFd(fd, p);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stop(self: &Loop) void {
|
||||
pub fn stop(self: *Loop) void {
|
||||
// TODO make atomic
|
||||
self.keep_running = false;
|
||||
// TODO activate an fd in the epoll set
|
||||
}
|
||||
|
||||
pub fn run(self: &Loop) void {
|
||||
pub fn run(self: *Loop) void {
|
||||
while (self.keep_running) {
|
||||
var events: [16]std.os.linux.epoll_event = undefined;
|
||||
const count = std.os.linuxEpollWait(self.epollfd, events[0..], -1);
|
||||
@ -141,7 +141,7 @@ pub const Loop = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub async fn connect(loop: &Loop, _address: &const std.net.Address) !std.os.File {
|
||||
pub async fn connect(loop: *Loop, _address: *const std.net.Address) !std.os.File {
|
||||
var address = _address.*; // TODO https://github.com/ziglang/zig/issues/733
|
||||
|
||||
const sockfd = try std.os.posixSocket(posix.AF_INET, posix.SOCK_STREAM | posix.SOCK_CLOEXEC | posix.SOCK_NONBLOCK, posix.PROTO_tcp);
|
||||
@ -163,7 +163,7 @@ test "listen on a port, send bytes, receive bytes" {
|
||||
tcp_server: TcpServer,
|
||||
|
||||
const Self = this;
|
||||
async<&mem.Allocator> fn handler(tcp_server: &TcpServer, _addr: &const std.net.Address, _socket: &const std.os.File) void {
|
||||
async<*mem.Allocator> fn handler(tcp_server: *TcpServer, _addr: *const std.net.Address, _socket: *const std.os.File) void {
|
||||
const self = @fieldParentPtr(Self, "tcp_server", tcp_server);
|
||||
var socket = _socket.*; // TODO https://github.com/ziglang/zig/issues/733
|
||||
defer socket.close();
|
||||
@ -177,7 +177,7 @@ test "listen on a port, send bytes, receive bytes" {
|
||||
cancel p;
|
||||
}
|
||||
}
|
||||
async fn errorableHandler(self: &Self, _addr: &const std.net.Address, _socket: &const std.os.File) !void {
|
||||
async fn errorableHandler(self: *Self, _addr: *const std.net.Address, _socket: *const std.os.File) !void {
|
||||
const addr = _addr.*; // TODO https://github.com/ziglang/zig/issues/733
|
||||
var socket = _socket.*; // TODO https://github.com/ziglang/zig/issues/733
|
||||
|
||||
@ -199,7 +199,7 @@ test "listen on a port, send bytes, receive bytes" {
|
||||
defer cancel p;
|
||||
loop.run();
|
||||
}
|
||||
async fn doAsyncTest(loop: &Loop, address: &const std.net.Address) void {
|
||||
async fn doAsyncTest(loop: *Loop, address: *const std.net.Address) void {
|
||||
errdefer @panic("test failure");
|
||||
|
||||
var socket_file = try await try async event.connect(loop, address);
|
||||
|
||||
@ -21,7 +21,7 @@ pub const RoundMode = enum {
|
||||
|
||||
/// Round a FloatDecimal as returned by errol3 to the specified fractional precision.
|
||||
/// All digits after the specified precision should be considered invalid.
|
||||
pub fn roundToPrecision(float_decimal: &FloatDecimal, precision: usize, mode: RoundMode) void {
|
||||
pub fn roundToPrecision(float_decimal: *FloatDecimal, precision: usize, mode: RoundMode) void {
|
||||
// The round digit refers to the index which we should look at to determine
|
||||
// whether we need to round to match the specified precision.
|
||||
var round_digit: usize = 0;
|
||||
@ -59,8 +59,8 @@ pub fn roundToPrecision(float_decimal: &FloatDecimal, precision: usize, mode: Ro
|
||||
float_decimal.exp += 1;
|
||||
|
||||
// Re-size the buffer to use the reserved leading byte.
|
||||
const one_before = @intToPtr(&u8, @ptrToInt(&float_decimal.digits[0]) - 1);
|
||||
float_decimal.digits = one_before[0..float_decimal.digits.len + 1];
|
||||
const one_before = @intToPtr(*u8, @ptrToInt(&float_decimal.digits[0]) - 1);
|
||||
float_decimal.digits = one_before[0 .. float_decimal.digits.len + 1];
|
||||
float_decimal.digits[0] = '1';
|
||||
return;
|
||||
}
|
||||
@ -84,7 +84,7 @@ pub fn errol3(value: f64, buffer: []u8) FloatDecimal {
|
||||
const i = tableLowerBound(bits);
|
||||
if (i < enum3.len and enum3[i] == bits) {
|
||||
const data = enum3_data[i];
|
||||
const digits = buffer[1..data.str.len + 1];
|
||||
const digits = buffer[1 .. data.str.len + 1];
|
||||
mem.copy(u8, digits, data.str);
|
||||
return FloatDecimal{
|
||||
.digits = digits,
|
||||
@ -98,7 +98,6 @@ pub fn errol3(value: f64, buffer: []u8) FloatDecimal {
|
||||
/// Uncorrected Errol3 double to ASCII conversion.
|
||||
fn errol3u(val: f64, buffer: []u8) FloatDecimal {
|
||||
// check if in integer or fixed range
|
||||
|
||||
if (val > 9.007199254740992e15 and val < 3.40282366920938e+38) {
|
||||
return errolInt(val, buffer);
|
||||
} else if (val >= 16.0 and val < 9.007199254740992e15) {
|
||||
@ -218,7 +217,7 @@ fn tableLowerBound(k: u64) usize {
|
||||
/// @in: The HP number.
|
||||
/// @val: The double.
|
||||
/// &returns: The HP number.
|
||||
fn hpProd(in: &const HP, val: f64) HP {
|
||||
fn hpProd(in: *const HP, val: f64) HP {
|
||||
var hi: f64 = undefined;
|
||||
var lo: f64 = undefined;
|
||||
split(in.val, &hi, &lo);
|
||||
@ -240,7 +239,7 @@ fn hpProd(in: &const HP, val: f64) HP {
|
||||
/// @val: The double.
|
||||
/// @hi: The high bits.
|
||||
/// @lo: The low bits.
|
||||
fn split(val: f64, hi: &f64, lo: &f64) void {
|
||||
fn split(val: f64, hi: *f64, lo: *f64) void {
|
||||
hi.* = gethi(val);
|
||||
lo.* = val - hi.*;
|
||||
}
|
||||
@ -253,7 +252,7 @@ fn gethi(in: f64) f64 {
|
||||
|
||||
/// Normalize the number by factoring in the error.
|
||||
/// @hp: The float pair.
|
||||
fn hpNormalize(hp: &HP) void {
|
||||
fn hpNormalize(hp: *HP) void {
|
||||
// Required to avoid segfaults causing buffer overrun during errol3 digit output termination.
|
||||
@setFloatMode(this, @import("builtin").FloatMode.Strict);
|
||||
|
||||
@ -265,7 +264,7 @@ fn hpNormalize(hp: &HP) void {
|
||||
|
||||
/// Divide the high-precision number by ten.
|
||||
/// @hp: The high-precision number
|
||||
fn hpDiv10(hp: &HP) void {
|
||||
fn hpDiv10(hp: *HP) void {
|
||||
var val = hp.val;
|
||||
|
||||
hp.val /= 10.0;
|
||||
@ -281,7 +280,7 @@ fn hpDiv10(hp: &HP) void {
|
||||
|
||||
/// Multiply the high-precision number by ten.
|
||||
/// @hp: The high-precision number
|
||||
fn hpMul10(hp: &HP) void {
|
||||
fn hpMul10(hp: *HP) void {
|
||||
const val = hp.val;
|
||||
|
||||
hp.val *= 10.0;
|
||||
@ -420,7 +419,7 @@ fn fpprev(val: f64) f64 {
|
||||
return @bitCast(f64, @bitCast(u64, val) -% 1);
|
||||
}
|
||||
|
||||
pub const c_digits_lut = []u8 {
|
||||
pub const c_digits_lut = []u8{
|
||||
'0', '0', '0', '1', '0', '2', '0', '3', '0', '4', '0', '5', '0', '6',
|
||||
'0', '7', '0', '8', '0', '9', '1', '0', '1', '1', '1', '2', '1', '3',
|
||||
'1', '4', '1', '5', '1', '6', '1', '7', '1', '8', '1', '9', '2', '0',
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -11,7 +11,7 @@ const max_int_digits = 65;
|
||||
/// Renders fmt string with args, calling output with slices of bytes.
|
||||
/// If `output` returns an error, the error is returned from `format` and
|
||||
/// `output` is not called again.
|
||||
pub fn format(context: var, comptime Errors: type, output: fn(@typeOf(context), []const u8) Errors!void, comptime fmt: []const u8, args: ...) Errors!void {
|
||||
pub fn format(context: var, comptime Errors: type, output: fn (@typeOf(context), []const u8) Errors!void, comptime fmt: []const u8, args: ...) Errors!void {
|
||||
const State = enum {
|
||||
Start,
|
||||
OpenBrace,
|
||||
@ -107,7 +107,7 @@ pub fn format(context: var, comptime Errors: type, output: fn(@typeOf(context),
|
||||
'}' => {
|
||||
return output(context, args[next_arg]);
|
||||
},
|
||||
'0' ... '9' => {
|
||||
'0'...'9' => {
|
||||
width_start = i;
|
||||
state = State.BufWidth;
|
||||
},
|
||||
@ -127,7 +127,7 @@ pub fn format(context: var, comptime Errors: type, output: fn(@typeOf(context),
|
||||
state = State.Start;
|
||||
start_index = i + 1;
|
||||
},
|
||||
'0' ... '9' => {
|
||||
'0'...'9' => {
|
||||
width_start = i;
|
||||
state = State.IntegerWidth;
|
||||
},
|
||||
@ -141,7 +141,7 @@ pub fn format(context: var, comptime Errors: type, output: fn(@typeOf(context),
|
||||
state = State.Start;
|
||||
start_index = i + 1;
|
||||
},
|
||||
'0' ... '9' => {},
|
||||
'0'...'9' => {},
|
||||
else => @compileError("Unexpected character in format string: " ++ []u8{c}),
|
||||
},
|
||||
State.FloatScientific => switch (c) {
|
||||
@ -151,7 +151,7 @@ pub fn format(context: var, comptime Errors: type, output: fn(@typeOf(context),
|
||||
state = State.Start;
|
||||
start_index = i + 1;
|
||||
},
|
||||
'0' ... '9' => {
|
||||
'0'...'9' => {
|
||||
width_start = i;
|
||||
state = State.FloatScientificWidth;
|
||||
},
|
||||
@ -165,7 +165,7 @@ pub fn format(context: var, comptime Errors: type, output: fn(@typeOf(context),
|
||||
state = State.Start;
|
||||
start_index = i + 1;
|
||||
},
|
||||
'0' ... '9' => {},
|
||||
'0'...'9' => {},
|
||||
else => @compileError("Unexpected character in format string: " ++ []u8{c}),
|
||||
},
|
||||
State.Float => switch (c) {
|
||||
@ -175,7 +175,7 @@ pub fn format(context: var, comptime Errors: type, output: fn(@typeOf(context),
|
||||
state = State.Start;
|
||||
start_index = i + 1;
|
||||
},
|
||||
'0' ... '9' => {
|
||||
'0'...'9' => {
|
||||
width_start = i;
|
||||
state = State.FloatWidth;
|
||||
},
|
||||
@ -189,7 +189,7 @@ pub fn format(context: var, comptime Errors: type, output: fn(@typeOf(context),
|
||||
state = State.Start;
|
||||
start_index = i + 1;
|
||||
},
|
||||
'0' ... '9' => {},
|
||||
'0'...'9' => {},
|
||||
else => @compileError("Unexpected character in format string: " ++ []u8{c}),
|
||||
},
|
||||
State.BufWidth => switch (c) {
|
||||
@ -200,7 +200,7 @@ pub fn format(context: var, comptime Errors: type, output: fn(@typeOf(context),
|
||||
state = State.Start;
|
||||
start_index = i + 1;
|
||||
},
|
||||
'0' ... '9' => {},
|
||||
'0'...'9' => {},
|
||||
else => @compileError("Unexpected character in format string: " ++ []u8{c}),
|
||||
},
|
||||
State.Character => switch (c) {
|
||||
@ -223,7 +223,7 @@ pub fn format(context: var, comptime Errors: type, output: fn(@typeOf(context),
|
||||
radix = 1024;
|
||||
state = State.BytesBase;
|
||||
},
|
||||
'0' ... '9' => {
|
||||
'0'...'9' => {
|
||||
width_start = i;
|
||||
state = State.BytesWidth;
|
||||
},
|
||||
@ -236,7 +236,7 @@ pub fn format(context: var, comptime Errors: type, output: fn(@typeOf(context),
|
||||
state = State.Start;
|
||||
start_index = i + 1;
|
||||
},
|
||||
'0' ... '9' => {
|
||||
'0'...'9' => {
|
||||
width_start = i;
|
||||
state = State.BytesWidth;
|
||||
},
|
||||
@ -250,7 +250,7 @@ pub fn format(context: var, comptime Errors: type, output: fn(@typeOf(context),
|
||||
state = State.Start;
|
||||
start_index = i + 1;
|
||||
},
|
||||
'0' ... '9' => {},
|
||||
'0'...'9' => {},
|
||||
else => @compileError("Unexpected character in format string: " ++ []u8{c}),
|
||||
},
|
||||
}
|
||||
@ -268,7 +268,7 @@ pub fn format(context: var, comptime Errors: type, output: fn(@typeOf(context),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn formatValue(value: var, context: var, comptime Errors: type, output: fn(@typeOf(context), []const u8) Errors!void) Errors!void {
|
||||
pub fn formatValue(value: var, context: var, comptime Errors: type, output: fn (@typeOf(context), []const u8) Errors!void) Errors!void {
|
||||
const T = @typeOf(value);
|
||||
switch (@typeId(T)) {
|
||||
builtin.TypeId.Int => {
|
||||
@ -317,11 +317,11 @@ pub fn formatValue(value: var, context: var, comptime Errors: type, output: fn(@
|
||||
}
|
||||
}
|
||||
|
||||
pub fn formatAsciiChar(c: u8, context: var, comptime Errors: type, output: fn(@typeOf(context), []const u8) Errors!void) Errors!void {
|
||||
pub fn formatAsciiChar(c: u8, context: var, comptime Errors: type, output: fn (@typeOf(context), []const u8) Errors!void) Errors!void {
|
||||
return output(context, (&c)[0..1]);
|
||||
}
|
||||
|
||||
pub fn formatBuf(buf: []const u8, width: usize, context: var, comptime Errors: type, output: fn(@typeOf(context), []const u8) Errors!void) Errors!void {
|
||||
pub fn formatBuf(buf: []const u8, width: usize, context: var, comptime Errors: type, output: fn (@typeOf(context), []const u8) Errors!void) Errors!void {
|
||||
try output(context, buf);
|
||||
|
||||
var leftover_padding = if (width > buf.len) (width - buf.len) else return;
|
||||
@ -334,7 +334,7 @@ pub fn formatBuf(buf: []const u8, width: usize, context: var, comptime Errors: t
|
||||
// Print a float in scientific notation to the specified precision. Null uses full precision.
|
||||
// It should be the case that every full precision, printed value can be re-parsed back to the
|
||||
// same type unambiguously.
|
||||
pub fn formatFloatScientific(value: var, maybe_precision: ?usize, context: var, comptime Errors: type, output: fn(@typeOf(context), []const u8) Errors!void) Errors!void {
|
||||
pub fn formatFloatScientific(value: var, maybe_precision: ?usize, context: var, comptime Errors: type, output: fn (@typeOf(context), []const u8) Errors!void) Errors!void {
|
||||
var x = f64(value);
|
||||
|
||||
// Errol doesn't handle these special cases.
|
||||
@ -423,7 +423,7 @@ pub fn formatFloatScientific(value: var, maybe_precision: ?usize, context: var,
|
||||
|
||||
// Print a float of the format x.yyyyy where the number of y is specified by the precision argument.
|
||||
// By default floats are printed at full precision (no rounding).
|
||||
pub fn formatFloatDecimal(value: var, maybe_precision: ?usize, context: var, comptime Errors: type, output: fn(@typeOf(context), []const u8) Errors!void) Errors!void {
|
||||
pub fn formatFloatDecimal(value: var, maybe_precision: ?usize, context: var, comptime Errors: type, output: fn (@typeOf(context), []const u8) Errors!void) Errors!void {
|
||||
var x = f64(value);
|
||||
|
||||
// Errol doesn't handle these special cases.
|
||||
@ -512,7 +512,7 @@ pub fn formatFloatDecimal(value: var, maybe_precision: ?usize, context: var, com
|
||||
// Remaining fractional portion, zero-padding if insufficient.
|
||||
debug.assert(precision >= printed);
|
||||
if (num_digits_whole_no_pad + precision - printed < float_decimal.digits.len) {
|
||||
try output(context, float_decimal.digits[num_digits_whole_no_pad..num_digits_whole_no_pad + precision - printed]);
|
||||
try output(context, float_decimal.digits[num_digits_whole_no_pad .. num_digits_whole_no_pad + precision - printed]);
|
||||
return;
|
||||
} else {
|
||||
try output(context, float_decimal.digits[num_digits_whole_no_pad..]);
|
||||
@ -562,9 +562,14 @@ pub fn formatFloatDecimal(value: var, maybe_precision: ?usize, context: var, com
|
||||
}
|
||||
}
|
||||
|
||||
pub fn formatBytes(value: var, width: ?usize, comptime radix: usize,
|
||||
context: var, comptime Errors: type, output: fn(@typeOf(context), []const u8)Errors!void) Errors!void
|
||||
{
|
||||
pub fn formatBytes(
|
||||
value: var,
|
||||
width: ?usize,
|
||||
comptime radix: usize,
|
||||
context: var,
|
||||
comptime Errors: type,
|
||||
output: fn (@typeOf(context), []const u8) Errors!void,
|
||||
) Errors!void {
|
||||
if (value == 0) {
|
||||
return output(context, "0B");
|
||||
}
|
||||
@ -585,16 +590,22 @@ pub fn formatBytes(value: var, width: ?usize, comptime radix: usize,
|
||||
}
|
||||
|
||||
const buf = switch (radix) {
|
||||
1000 => []u8 { suffix, 'B' },
|
||||
1024 => []u8 { suffix, 'i', 'B' },
|
||||
1000 => []u8{ suffix, 'B' },
|
||||
1024 => []u8{ suffix, 'i', 'B' },
|
||||
else => unreachable,
|
||||
};
|
||||
return output(context, buf);
|
||||
}
|
||||
|
||||
pub fn formatInt(value: var, base: u8, uppercase: bool, width: usize,
|
||||
context: var, comptime Errors: type, output: fn(@typeOf(context), []const u8)Errors!void) Errors!void
|
||||
{
|
||||
pub fn formatInt(
|
||||
value: var,
|
||||
base: u8,
|
||||
uppercase: bool,
|
||||
width: usize,
|
||||
context: var,
|
||||
comptime Errors: type,
|
||||
output: fn (@typeOf(context), []const u8) Errors!void,
|
||||
) Errors!void {
|
||||
if (@typeOf(value).is_signed) {
|
||||
return formatIntSigned(value, base, uppercase, width, context, Errors, output);
|
||||
} else {
|
||||
@ -602,7 +613,7 @@ pub fn formatInt(value: var, base: u8, uppercase: bool, width: usize,
|
||||
}
|
||||
}
|
||||
|
||||
fn formatIntSigned(value: var, base: u8, uppercase: bool, width: usize, context: var, comptime Errors: type, output: fn(@typeOf(context), []const u8) Errors!void) Errors!void {
|
||||
fn formatIntSigned(value: var, base: u8, uppercase: bool, width: usize, context: var, comptime Errors: type, output: fn (@typeOf(context), []const u8) Errors!void) Errors!void {
|
||||
const uint = @IntType(false, @typeOf(value).bit_count);
|
||||
if (value < 0) {
|
||||
const minus_sign: u8 = '-';
|
||||
@ -621,7 +632,7 @@ fn formatIntSigned(value: var, base: u8, uppercase: bool, width: usize, context:
|
||||
}
|
||||
}
|
||||
|
||||
fn formatIntUnsigned(value: var, base: u8, uppercase: bool, width: usize, context: var, comptime Errors: type, output: fn(@typeOf(context), []const u8) Errors!void) Errors!void {
|
||||
fn formatIntUnsigned(value: var, base: u8, uppercase: bool, width: usize, context: var, comptime Errors: type, output: fn (@typeOf(context), []const u8) Errors!void) Errors!void {
|
||||
// max_int_digits accounts for the minus sign. when printing an unsigned
|
||||
// number we don't need to do that.
|
||||
var buf: [max_int_digits - 1]u8 = undefined;
|
||||
@ -650,7 +661,7 @@ fn formatIntUnsigned(value: var, base: u8, uppercase: bool, width: usize, contex
|
||||
mem.set(u8, buf[0..index], '0');
|
||||
return output(context, buf);
|
||||
} else {
|
||||
const padded_buf = buf[index - padding..];
|
||||
const padded_buf = buf[index - padding ..];
|
||||
mem.set(u8, padded_buf[0..padding], '0');
|
||||
return output(context, padded_buf);
|
||||
}
|
||||
@ -668,7 +679,7 @@ const FormatIntBuf = struct {
|
||||
out_buf: []u8,
|
||||
index: usize,
|
||||
};
|
||||
fn formatIntCallback(context: &FormatIntBuf, bytes: []const u8) (error{}!void) {
|
||||
fn formatIntCallback(context: *FormatIntBuf, bytes: []const u8) (error{}!void) {
|
||||
mem.copy(u8, context.out_buf[context.index..], bytes);
|
||||
context.index += bytes.len;
|
||||
}
|
||||
@ -717,9 +728,9 @@ pub fn parseUnsigned(comptime T: type, buf: []const u8, radix: u8) ParseUnsigned
|
||||
|
||||
pub fn charToDigit(c: u8, radix: u8) (error{InvalidCharacter}!u8) {
|
||||
const value = switch (c) {
|
||||
'0' ... '9' => c - '0',
|
||||
'A' ... 'Z' => c - 'A' + 10,
|
||||
'a' ... 'z' => c - 'a' + 10,
|
||||
'0'...'9' => c - '0',
|
||||
'A'...'Z' => c - 'A' + 10,
|
||||
'a'...'z' => c - 'a' + 10,
|
||||
else => return error.InvalidCharacter,
|
||||
};
|
||||
|
||||
@ -730,8 +741,8 @@ pub fn charToDigit(c: u8, radix: u8) (error{InvalidCharacter}!u8) {
|
||||
|
||||
fn digitToChar(digit: u8, uppercase: bool) u8 {
|
||||
return switch (digit) {
|
||||
0 ... 9 => digit + '0',
|
||||
10 ... 35 => digit + ((if (uppercase) u8('A') else u8('a')) - 10),
|
||||
0...9 => digit + '0',
|
||||
10...35 => digit + ((if (uppercase) u8('A') else u8('a')) - 10),
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
@ -740,7 +751,7 @@ const BufPrintContext = struct {
|
||||
remaining: []u8,
|
||||
};
|
||||
|
||||
fn bufPrintWrite(context: &BufPrintContext, bytes: []const u8) !void {
|
||||
fn bufPrintWrite(context: *BufPrintContext, bytes: []const u8) !void {
|
||||
if (context.remaining.len < bytes.len) return error.BufferTooSmall;
|
||||
mem.copy(u8, context.remaining, bytes);
|
||||
context.remaining = context.remaining[bytes.len..];
|
||||
@ -749,18 +760,17 @@ fn bufPrintWrite(context: &BufPrintContext, bytes: []const u8) !void {
|
||||
pub fn bufPrint(buf: []u8, comptime fmt: []const u8, args: ...) ![]u8 {
|
||||
var context = BufPrintContext{ .remaining = buf };
|
||||
try format(&context, error{BufferTooSmall}, bufPrintWrite, fmt, args);
|
||||
return buf[0..buf.len - context.remaining.len];
|
||||
return buf[0 .. buf.len - context.remaining.len];
|
||||
}
|
||||
|
||||
pub fn allocPrint(allocator: &mem.Allocator, comptime fmt: []const u8, args: ...) ![]u8 {
|
||||
pub fn allocPrint(allocator: *mem.Allocator, comptime fmt: []const u8, args: ...) ![]u8 {
|
||||
var size: usize = 0;
|
||||
format(&size, error{}, countSize, fmt, args) catch |err| switch (err) {
|
||||
};
|
||||
format(&size, error{}, countSize, fmt, args) catch |err| switch (err) {};
|
||||
const buf = try allocator.alloc(u8, size);
|
||||
return bufPrint(buf, fmt, args);
|
||||
}
|
||||
|
||||
fn countSize(size: &usize, bytes: []const u8) (error{}!void) {
|
||||
fn countSize(size: *usize, bytes: []const u8) (error{}!void) {
|
||||
size.* += bytes.len;
|
||||
}
|
||||
|
||||
@ -1043,8 +1053,7 @@ test "fmt.format" {
|
||||
fn testFmt(expected: []const u8, comptime template: []const u8, args: ...) !void {
|
||||
var buf: [100]u8 = undefined;
|
||||
const result = try bufPrint(buf[0..], template, args);
|
||||
if (mem.eql(u8, result, expected))
|
||||
return;
|
||||
if (mem.eql(u8, result, expected)) return;
|
||||
|
||||
std.debug.warn("\n====== expected this output: =========\n");
|
||||
std.debug.warn("{}", expected);
|
||||
@ -1082,10 +1091,7 @@ test "fmt.trim" {
|
||||
|
||||
pub fn isWhiteSpace(byte: u8) bool {
|
||||
return switch (byte) {
|
||||
' ',
|
||||
'\t',
|
||||
'\n',
|
||||
'\r' => true,
|
||||
' ', '\t', '\n', '\r' => true,
|
||||
else => false,
|
||||
};
|
||||
}
|
||||
|
||||
@ -13,14 +13,12 @@ pub const Adler32 = struct {
|
||||
adler: u32,
|
||||
|
||||
pub fn init() Adler32 {
|
||||
return Adler32 {
|
||||
.adler = 1,
|
||||
};
|
||||
return Adler32{ .adler = 1 };
|
||||
}
|
||||
|
||||
// This fast variant is taken from zlib. It reduces the required modulos and unrolls longer
|
||||
// buffer inputs and should be much quicker.
|
||||
pub fn update(self: &Adler32, input: []const u8) void {
|
||||
pub fn update(self: *Adler32, input: []const u8) void {
|
||||
var s1 = self.adler & 0xffff;
|
||||
var s2 = (self.adler >> 16) & 0xffff;
|
||||
|
||||
@ -33,8 +31,7 @@ pub const Adler32 = struct {
|
||||
if (s2 >= base) {
|
||||
s2 -= base;
|
||||
}
|
||||
}
|
||||
else if (input.len < 16) {
|
||||
} else if (input.len < 16) {
|
||||
for (input) |b| {
|
||||
s1 +%= b;
|
||||
s2 +%= s1;
|
||||
@ -44,8 +41,7 @@ pub const Adler32 = struct {
|
||||
}
|
||||
|
||||
s2 %= base;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
var i: usize = 0;
|
||||
while (i + nmax <= input.len) : (i += nmax) {
|
||||
const n = nmax / 16; // note: 16 | nmax
|
||||
@ -81,7 +77,7 @@ pub const Adler32 = struct {
|
||||
self.adler = s1 | (s2 << 16);
|
||||
}
|
||||
|
||||
pub fn final(self: &Adler32) u32 {
|
||||
pub fn final(self: *Adler32) u32 {
|
||||
return self.adler;
|
||||
}
|
||||
|
||||
@ -98,15 +94,14 @@ test "adler32 sanity" {
|
||||
}
|
||||
|
||||
test "adler32 long" {
|
||||
const long1 = []u8 {1} ** 1024;
|
||||
const long1 = []u8{1} ** 1024;
|
||||
debug.assert(Adler32.hash(long1[0..]) == 0x06780401);
|
||||
|
||||
const long2 = []u8 {1} ** 1025;
|
||||
const long2 = []u8{1} ** 1025;
|
||||
debug.assert(Adler32.hash(long2[0..]) == 0x0a7a0402);
|
||||
}
|
||||
|
||||
test "adler32 very long" {
|
||||
const long = []u8 {1} ** 5553;
|
||||
const long = []u8{1} ** 5553;
|
||||
debug.assert(Adler32.hash(long[0..]) == 0x707f15b2);
|
||||
}
|
||||
|
||||
|
||||
@ -58,10 +58,10 @@ pub fn Crc32WithPoly(comptime poly: u32) type {
|
||||
return Self{ .crc = 0xffffffff };
|
||||
}
|
||||
|
||||
pub fn update(self: &Self, input: []const u8) void {
|
||||
pub fn update(self: *Self, input: []const u8) void {
|
||||
var i: usize = 0;
|
||||
while (i + 8 <= input.len) : (i += 8) {
|
||||
const p = input[i..i + 8];
|
||||
const p = input[i .. i + 8];
|
||||
|
||||
// Unrolling this way gives ~50Mb/s increase
|
||||
self.crc ^= (u32(p[0]) << 0);
|
||||
@ -69,7 +69,6 @@ pub fn Crc32WithPoly(comptime poly: u32) type {
|
||||
self.crc ^= (u32(p[2]) << 16);
|
||||
self.crc ^= (u32(p[3]) << 24);
|
||||
|
||||
|
||||
self.crc =
|
||||
lookup_tables[0][p[7]] ^
|
||||
lookup_tables[1][p[6]] ^
|
||||
@ -77,8 +76,8 @@ pub fn Crc32WithPoly(comptime poly: u32) type {
|
||||
lookup_tables[3][p[4]] ^
|
||||
lookup_tables[4][@truncate(u8, self.crc >> 24)] ^
|
||||
lookup_tables[5][@truncate(u8, self.crc >> 16)] ^
|
||||
lookup_tables[6][@truncate(u8, self.crc >> 8)] ^
|
||||
lookup_tables[7][@truncate(u8, self.crc >> 0)];
|
||||
lookup_tables[6][@truncate(u8, self.crc >> 8)] ^
|
||||
lookup_tables[7][@truncate(u8, self.crc >> 0)];
|
||||
}
|
||||
|
||||
while (i < input.len) : (i += 1) {
|
||||
@ -87,7 +86,7 @@ pub fn Crc32WithPoly(comptime poly: u32) type {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn final(self: &Self) u32 {
|
||||
pub fn final(self: *Self) u32 {
|
||||
return ~self.crc;
|
||||
}
|
||||
|
||||
@ -144,14 +143,14 @@ pub fn Crc32SmallWithPoly(comptime poly: u32) type {
|
||||
return Self{ .crc = 0xffffffff };
|
||||
}
|
||||
|
||||
pub fn update(self: &Self, input: []const u8) void {
|
||||
pub fn update(self: *Self, input: []const u8) void {
|
||||
for (input) |b| {
|
||||
self.crc = lookup_table[@truncate(u4, self.crc ^ (b >> 0))] ^ (self.crc >> 4);
|
||||
self.crc = lookup_table[@truncate(u4, self.crc ^ (b >> 4))] ^ (self.crc >> 4);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn final(self: &Self) u32 {
|
||||
pub fn final(self: *Self) u32 {
|
||||
return ~self.crc;
|
||||
}
|
||||
|
||||
|
||||
@ -7,7 +7,7 @@
|
||||
const std = @import("../index.zig");
|
||||
const debug = std.debug;
|
||||
|
||||
pub const Fnv1a_32 = Fnv1a(u32, 0x01000193 , 0x811c9dc5);
|
||||
pub const Fnv1a_32 = Fnv1a(u32, 0x01000193, 0x811c9dc5);
|
||||
pub const Fnv1a_64 = Fnv1a(u64, 0x100000001b3, 0xcbf29ce484222325);
|
||||
pub const Fnv1a_128 = Fnv1a(u128, 0x1000000000000000000013b, 0x6c62272e07bb014262b821756295c58d);
|
||||
|
||||
@ -18,19 +18,17 @@ fn Fnv1a(comptime T: type, comptime prime: T, comptime offset: T) type {
|
||||
value: T,
|
||||
|
||||
pub fn init() Self {
|
||||
return Self {
|
||||
.value = offset,
|
||||
};
|
||||
return Self{ .value = offset };
|
||||
}
|
||||
|
||||
pub fn update(self: &Self, input: []const u8) void {
|
||||
pub fn update(self: *Self, input: []const u8) void {
|
||||
for (input) |b| {
|
||||
self.value ^= b;
|
||||
self.value *%= prime;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn final(self: &Self) T {
|
||||
pub fn final(self: *Self) T {
|
||||
return self.value;
|
||||
}
|
||||
|
||||
|
||||
@ -45,7 +45,7 @@ fn SipHash(comptime T: type, comptime c_rounds: usize, comptime d_rounds: usize)
|
||||
const k0 = mem.readInt(key[0..8], u64, Endian.Little);
|
||||
const k1 = mem.readInt(key[8..16], u64, Endian.Little);
|
||||
|
||||
var d = Self {
|
||||
var d = Self{
|
||||
.v0 = k0 ^ 0x736f6d6570736575,
|
||||
.v1 = k1 ^ 0x646f72616e646f6d,
|
||||
.v2 = k0 ^ 0x6c7967656e657261,
|
||||
@ -63,7 +63,7 @@ fn SipHash(comptime T: type, comptime c_rounds: usize, comptime d_rounds: usize)
|
||||
return d;
|
||||
}
|
||||
|
||||
pub fn update(d: &Self, b: []const u8) void {
|
||||
pub fn update(d: *Self, b: []const u8) void {
|
||||
var off: usize = 0;
|
||||
|
||||
// Partial from previous.
|
||||
@ -76,7 +76,7 @@ fn SipHash(comptime T: type, comptime c_rounds: usize, comptime d_rounds: usize)
|
||||
|
||||
// Full middle blocks.
|
||||
while (off + 8 <= b.len) : (off += 8) {
|
||||
d.round(b[off..off + 8]);
|
||||
d.round(b[off .. off + 8]);
|
||||
}
|
||||
|
||||
// Remainder for next pass.
|
||||
@ -85,7 +85,7 @@ fn SipHash(comptime T: type, comptime c_rounds: usize, comptime d_rounds: usize)
|
||||
d.msg_len +%= @truncate(u8, b.len);
|
||||
}
|
||||
|
||||
pub fn final(d: &Self) T {
|
||||
pub fn final(d: *Self) T {
|
||||
// Padding
|
||||
mem.set(u8, d.buf[d.buf_len..], 0);
|
||||
d.buf[7] = d.msg_len;
|
||||
@ -118,7 +118,7 @@ fn SipHash(comptime T: type, comptime c_rounds: usize, comptime d_rounds: usize)
|
||||
return (u128(b2) << 64) | b1;
|
||||
}
|
||||
|
||||
fn round(d: &Self, b: []const u8) void {
|
||||
fn round(d: *Self, b: []const u8) void {
|
||||
debug.assert(b.len == 8);
|
||||
|
||||
const m = mem.readInt(b[0..], u64, Endian.Little);
|
||||
@ -132,7 +132,7 @@ fn SipHash(comptime T: type, comptime c_rounds: usize, comptime d_rounds: usize)
|
||||
d.v0 ^= m;
|
||||
}
|
||||
|
||||
fn sipRound(d: &Self) void {
|
||||
fn sipRound(d: *Self) void {
|
||||
d.v0 +%= d.v1;
|
||||
d.v1 = math.rotl(u64, d.v1, u64(13));
|
||||
d.v1 ^= d.v0;
|
||||
@ -162,7 +162,7 @@ fn SipHash(comptime T: type, comptime c_rounds: usize, comptime d_rounds: usize)
|
||||
const test_key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f";
|
||||
|
||||
test "siphash64-2-4 sanity" {
|
||||
const vectors = [][]const u8 {
|
||||
const vectors = [][]const u8{
|
||||
"\x31\x0e\x0e\xdd\x47\xdb\x6f\x72", // ""
|
||||
"\xfd\x67\xdc\x93\xc5\x39\xf8\x74", // "\x00"
|
||||
"\x5a\x4f\xa9\xd9\x09\x80\x6c\x0d", // "\x00\x01" ... etc
|
||||
@ -241,7 +241,7 @@ test "siphash64-2-4 sanity" {
|
||||
}
|
||||
|
||||
test "siphash128-2-4 sanity" {
|
||||
const vectors = [][]const u8 {
|
||||
const vectors = [][]const u8{
|
||||
"\xa3\x81\x7f\x04\xba\x25\xa8\xe6\x6d\xf6\x72\x14\xc7\x55\x02\x93",
|
||||
"\xda\x87\xc1\xd8\x6b\x99\xaf\x44\x34\x76\x59\x11\x9b\x22\xfc\x45",
|
||||
"\x81\x77\x22\x8d\xa4\xa4\x5d\xc7\xfc\xa3\x8b\xde\xf6\x0a\xff\xe4",
|
||||
|
||||
@ -9,12 +9,12 @@ const builtin = @import("builtin");
|
||||
const want_modification_safety = builtin.mode != builtin.Mode.ReleaseFast;
|
||||
const debug_u32 = if (want_modification_safety) u32 else void;
|
||||
|
||||
pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn(key: K) u32, comptime eql: fn(a: K, b: K) bool) type {
|
||||
pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn (key: K) u32, comptime eql: fn (a: K, b: K) bool) type {
|
||||
return struct {
|
||||
entries: []Entry,
|
||||
size: usize,
|
||||
max_distance_from_start_index: usize,
|
||||
allocator: &Allocator,
|
||||
allocator: *Allocator,
|
||||
// this is used to detect bugs where a hashtable is edited while an iterator is running.
|
||||
modification_count: debug_u32,
|
||||
|
||||
@ -28,7 +28,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn(key: K) u32
|
||||
};
|
||||
|
||||
pub const Iterator = struct {
|
||||
hm: &const Self,
|
||||
hm: *const Self,
|
||||
// how many items have we returned
|
||||
count: usize,
|
||||
// iterator through the entry array
|
||||
@ -36,7 +36,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn(key: K) u32
|
||||
// used to detect concurrent modification
|
||||
initial_modification_count: debug_u32,
|
||||
|
||||
pub fn next(it: &Iterator) ?&Entry {
|
||||
pub fn next(it: *Iterator) ?*Entry {
|
||||
if (want_modification_safety) {
|
||||
assert(it.initial_modification_count == it.hm.modification_count); // concurrent modification
|
||||
}
|
||||
@ -53,7 +53,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn(key: K) u32
|
||||
}
|
||||
|
||||
// Reset the iterator to the initial index
|
||||
pub fn reset(it: &Iterator) void {
|
||||
pub fn reset(it: *Iterator) void {
|
||||
it.count = 0;
|
||||
it.index = 0;
|
||||
// Resetting the modification count too
|
||||
@ -61,7 +61,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn(key: K) u32
|
||||
}
|
||||
};
|
||||
|
||||
pub fn init(allocator: &Allocator) Self {
|
||||
pub fn init(allocator: *Allocator) Self {
|
||||
return Self{
|
||||
.entries = []Entry{},
|
||||
.allocator = allocator,
|
||||
@ -71,11 +71,11 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn(key: K) u32
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(hm: &const Self) void {
|
||||
pub fn deinit(hm: *const Self) void {
|
||||
hm.allocator.free(hm.entries);
|
||||
}
|
||||
|
||||
pub fn clear(hm: &Self) void {
|
||||
pub fn clear(hm: *Self) void {
|
||||
for (hm.entries) |*entry| {
|
||||
entry.used = false;
|
||||
}
|
||||
@ -84,12 +84,12 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn(key: K) u32
|
||||
hm.incrementModificationCount();
|
||||
}
|
||||
|
||||
pub fn count(hm: &const Self) usize {
|
||||
pub fn count(hm: *const Self) usize {
|
||||
return hm.size;
|
||||
}
|
||||
|
||||
/// Returns the value that was already there.
|
||||
pub fn put(hm: &Self, key: K, value: &const V) !?V {
|
||||
pub fn put(hm: *Self, key: K, value: *const V) !?V {
|
||||
if (hm.entries.len == 0) {
|
||||
try hm.initCapacity(16);
|
||||
}
|
||||
@ -111,18 +111,18 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn(key: K) u32
|
||||
return hm.internalPut(key, value);
|
||||
}
|
||||
|
||||
pub fn get(hm: &const Self, key: K) ?&Entry {
|
||||
pub fn get(hm: *const Self, key: K) ?*Entry {
|
||||
if (hm.entries.len == 0) {
|
||||
return null;
|
||||
}
|
||||
return hm.internalGet(key);
|
||||
}
|
||||
|
||||
pub fn contains(hm: &const Self, key: K) bool {
|
||||
pub fn contains(hm: *const Self, key: K) bool {
|
||||
return hm.get(key) != null;
|
||||
}
|
||||
|
||||
pub fn remove(hm: &Self, key: K) ?&Entry {
|
||||
pub fn remove(hm: *Self, key: K) ?*Entry {
|
||||
if (hm.entries.len == 0) return null;
|
||||
hm.incrementModificationCount();
|
||||
const start_index = hm.keyToIndex(key);
|
||||
@ -154,7 +154,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn(key: K) u32
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn iterator(hm: &const Self) Iterator {
|
||||
pub fn iterator(hm: *const Self) Iterator {
|
||||
return Iterator{
|
||||
.hm = hm,
|
||||
.count = 0,
|
||||
@ -163,7 +163,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn(key: K) u32
|
||||
};
|
||||
}
|
||||
|
||||
fn initCapacity(hm: &Self, capacity: usize) !void {
|
||||
fn initCapacity(hm: *Self, capacity: usize) !void {
|
||||
hm.entries = try hm.allocator.alloc(Entry, capacity);
|
||||
hm.size = 0;
|
||||
hm.max_distance_from_start_index = 0;
|
||||
@ -172,14 +172,14 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn(key: K) u32
|
||||
}
|
||||
}
|
||||
|
||||
fn incrementModificationCount(hm: &Self) void {
|
||||
fn incrementModificationCount(hm: *Self) void {
|
||||
if (want_modification_safety) {
|
||||
hm.modification_count +%= 1;
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the value that was already there.
|
||||
fn internalPut(hm: &Self, orig_key: K, orig_value: &const V) ?V {
|
||||
fn internalPut(hm: *Self, orig_key: K, orig_value: *const V) ?V {
|
||||
var key = orig_key;
|
||||
var value = orig_value.*;
|
||||
const start_index = hm.keyToIndex(key);
|
||||
@ -231,7 +231,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn(key: K) u32
|
||||
unreachable; // put into a full map
|
||||
}
|
||||
|
||||
fn internalGet(hm: &const Self, key: K) ?&Entry {
|
||||
fn internalGet(hm: *const Self, key: K) ?*Entry {
|
||||
const start_index = hm.keyToIndex(key);
|
||||
{
|
||||
var roll_over: usize = 0;
|
||||
@ -246,7 +246,7 @@ pub fn HashMap(comptime K: type, comptime V: type, comptime hash: fn(key: K) u32
|
||||
return null;
|
||||
}
|
||||
|
||||
fn keyToIndex(hm: &const Self, key: K) usize {
|
||||
fn keyToIndex(hm: *const Self, key: K) usize {
|
||||
return usize(hash(key)) % hm.entries.len;
|
||||
}
|
||||
};
|
||||
|
||||
98
std/heap.zig
98
std/heap.zig
@ -16,15 +16,15 @@ var c_allocator_state = Allocator{
|
||||
.freeFn = cFree,
|
||||
};
|
||||
|
||||
fn cAlloc(self: &Allocator, n: usize, alignment: u29) ![]u8 {
|
||||
fn cAlloc(self: *Allocator, n: usize, alignment: u29) ![]u8 {
|
||||
assert(alignment <= @alignOf(c_longdouble));
|
||||
return if (c.malloc(n)) |buf| @ptrCast(&u8, buf)[0..n] else error.OutOfMemory;
|
||||
return if (c.malloc(n)) |buf| @ptrCast([*]u8, buf)[0..n] else error.OutOfMemory;
|
||||
}
|
||||
|
||||
fn cRealloc(self: &Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
||||
const old_ptr = @ptrCast(&c_void, old_mem.ptr);
|
||||
fn cRealloc(self: *Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
||||
const old_ptr = @ptrCast([*]c_void, old_mem.ptr);
|
||||
if (c.realloc(old_ptr, new_size)) |buf| {
|
||||
return @ptrCast(&u8, buf)[0..new_size];
|
||||
return @ptrCast(*u8, buf)[0..new_size];
|
||||
} else if (new_size <= old_mem.len) {
|
||||
return old_mem[0..new_size];
|
||||
} else {
|
||||
@ -32,8 +32,8 @@ fn cRealloc(self: &Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![
|
||||
}
|
||||
}
|
||||
|
||||
fn cFree(self: &Allocator, old_mem: []u8) void {
|
||||
const old_ptr = @ptrCast(&c_void, old_mem.ptr);
|
||||
fn cFree(self: *Allocator, old_mem: []u8) void {
|
||||
const old_ptr = @ptrCast([*]c_void, old_mem.ptr);
|
||||
c.free(old_ptr);
|
||||
}
|
||||
|
||||
@ -55,7 +55,7 @@ pub const DirectAllocator = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: &DirectAllocator) void {
|
||||
pub fn deinit(self: *DirectAllocator) void {
|
||||
switch (builtin.os) {
|
||||
Os.windows => if (self.heap_handle) |heap_handle| {
|
||||
_ = os.windows.HeapDestroy(heap_handle);
|
||||
@ -64,19 +64,17 @@ pub const DirectAllocator = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn alloc(allocator: &Allocator, n: usize, alignment: u29) ![]u8 {
|
||||
fn alloc(allocator: *Allocator, n: usize, alignment: u29) ![]u8 {
|
||||
const self = @fieldParentPtr(DirectAllocator, "allocator", allocator);
|
||||
|
||||
switch (builtin.os) {
|
||||
Os.linux,
|
||||
Os.macosx,
|
||||
Os.ios => {
|
||||
Os.linux, Os.macosx, Os.ios => {
|
||||
const p = os.posix;
|
||||
const alloc_size = if (alignment <= os.page_size) n else n + alignment;
|
||||
const addr = p.mmap(null, alloc_size, p.PROT_READ | p.PROT_WRITE, p.MAP_PRIVATE | p.MAP_ANONYMOUS, -1, 0);
|
||||
if (addr == p.MAP_FAILED) return error.OutOfMemory;
|
||||
|
||||
if (alloc_size == n) return @intToPtr(&u8, addr)[0..n];
|
||||
if (alloc_size == n) return @intToPtr([*]u8, addr)[0..n];
|
||||
|
||||
var aligned_addr = addr & ~usize(alignment - 1);
|
||||
aligned_addr += alignment;
|
||||
@ -95,7 +93,7 @@ pub const DirectAllocator = struct {
|
||||
//It is impossible that there is an unoccupied page at the top of our
|
||||
// mmap.
|
||||
|
||||
return @intToPtr(&u8, aligned_addr)[0..n];
|
||||
return @intToPtr([*]u8, aligned_addr)[0..n];
|
||||
},
|
||||
Os.windows => {
|
||||
const amt = n + alignment + @sizeOf(usize);
|
||||
@ -110,20 +108,18 @@ pub const DirectAllocator = struct {
|
||||
const march_forward_bytes = if (rem == 0) 0 else (alignment - rem);
|
||||
const adjusted_addr = root_addr + march_forward_bytes;
|
||||
const record_addr = adjusted_addr + n;
|
||||
@intToPtr(&align(1) usize, record_addr).* = root_addr;
|
||||
return @intToPtr(&u8, adjusted_addr)[0..n];
|
||||
@intToPtr(*align(1) usize, record_addr).* = root_addr;
|
||||
return @intToPtr([*]u8, adjusted_addr)[0..n];
|
||||
},
|
||||
else => @compileError("Unsupported OS"),
|
||||
}
|
||||
}
|
||||
|
||||
fn realloc(allocator: &Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
||||
fn realloc(allocator: *Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
||||
const self = @fieldParentPtr(DirectAllocator, "allocator", allocator);
|
||||
|
||||
switch (builtin.os) {
|
||||
Os.linux,
|
||||
Os.macosx,
|
||||
Os.ios => {
|
||||
Os.linux, Os.macosx, Os.ios => {
|
||||
if (new_size <= old_mem.len) {
|
||||
const base_addr = @ptrToInt(old_mem.ptr);
|
||||
const old_addr_end = base_addr + old_mem.len;
|
||||
@ -143,13 +139,13 @@ pub const DirectAllocator = struct {
|
||||
Os.windows => {
|
||||
const old_adjusted_addr = @ptrToInt(old_mem.ptr);
|
||||
const old_record_addr = old_adjusted_addr + old_mem.len;
|
||||
const root_addr = @intToPtr(&align(1) usize, old_record_addr).*;
|
||||
const old_ptr = @intToPtr(os.windows.LPVOID, root_addr);
|
||||
const root_addr = @intToPtr(*align(1) usize, old_record_addr).*;
|
||||
const old_ptr = @intToPtr([*]c_void, root_addr);
|
||||
const amt = new_size + alignment + @sizeOf(usize);
|
||||
const new_ptr = os.windows.HeapReAlloc(??self.heap_handle, 0, old_ptr, amt) ?? blk: {
|
||||
if (new_size > old_mem.len) return error.OutOfMemory;
|
||||
const new_record_addr = old_record_addr - new_size + old_mem.len;
|
||||
@intToPtr(&align(1) usize, new_record_addr).* = root_addr;
|
||||
@intToPtr(*align(1) usize, new_record_addr).* = root_addr;
|
||||
return old_mem[0..new_size];
|
||||
};
|
||||
const offset = old_adjusted_addr - root_addr;
|
||||
@ -157,26 +153,24 @@ pub const DirectAllocator = struct {
|
||||
const new_adjusted_addr = new_root_addr + offset;
|
||||
assert(new_adjusted_addr % alignment == 0);
|
||||
const new_record_addr = new_adjusted_addr + new_size;
|
||||
@intToPtr(&align(1) usize, new_record_addr).* = new_root_addr;
|
||||
return @intToPtr(&u8, new_adjusted_addr)[0..new_size];
|
||||
@intToPtr(*align(1) usize, new_record_addr).* = new_root_addr;
|
||||
return @intToPtr([*]u8, new_adjusted_addr)[0..new_size];
|
||||
},
|
||||
else => @compileError("Unsupported OS"),
|
||||
}
|
||||
}
|
||||
|
||||
fn free(allocator: &Allocator, bytes: []u8) void {
|
||||
fn free(allocator: *Allocator, bytes: []u8) void {
|
||||
const self = @fieldParentPtr(DirectAllocator, "allocator", allocator);
|
||||
|
||||
switch (builtin.os) {
|
||||
Os.linux,
|
||||
Os.macosx,
|
||||
Os.ios => {
|
||||
Os.linux, Os.macosx, Os.ios => {
|
||||
_ = os.posix.munmap(@ptrToInt(bytes.ptr), bytes.len);
|
||||
},
|
||||
Os.windows => {
|
||||
const record_addr = @ptrToInt(bytes.ptr) + bytes.len;
|
||||
const root_addr = @intToPtr(&align(1) usize, record_addr).*;
|
||||
const ptr = @intToPtr(os.windows.LPVOID, root_addr);
|
||||
const root_addr = @intToPtr(*align(1) usize, record_addr).*;
|
||||
const ptr = @intToPtr([*]c_void, root_addr);
|
||||
_ = os.windows.HeapFree(??self.heap_handle, 0, ptr);
|
||||
},
|
||||
else => @compileError("Unsupported OS"),
|
||||
@ -189,13 +183,13 @@ pub const DirectAllocator = struct {
|
||||
pub const ArenaAllocator = struct {
|
||||
pub allocator: Allocator,
|
||||
|
||||
child_allocator: &Allocator,
|
||||
child_allocator: *Allocator,
|
||||
buffer_list: std.LinkedList([]u8),
|
||||
end_index: usize,
|
||||
|
||||
const BufNode = std.LinkedList([]u8).Node;
|
||||
|
||||
pub fn init(child_allocator: &Allocator) ArenaAllocator {
|
||||
pub fn init(child_allocator: *Allocator) ArenaAllocator {
|
||||
return ArenaAllocator{
|
||||
.allocator = Allocator{
|
||||
.allocFn = alloc,
|
||||
@ -208,7 +202,7 @@ pub const ArenaAllocator = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: &ArenaAllocator) void {
|
||||
pub fn deinit(self: *ArenaAllocator) void {
|
||||
var it = self.buffer_list.first;
|
||||
while (it) |node| {
|
||||
// this has to occur before the free because the free frees node
|
||||
@ -218,7 +212,7 @@ pub const ArenaAllocator = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn createNode(self: &ArenaAllocator, prev_len: usize, minimum_size: usize) !&BufNode {
|
||||
fn createNode(self: *ArenaAllocator, prev_len: usize, minimum_size: usize) !*BufNode {
|
||||
const actual_min_size = minimum_size + @sizeOf(BufNode);
|
||||
var len = prev_len;
|
||||
while (true) {
|
||||
@ -239,7 +233,7 @@ pub const ArenaAllocator = struct {
|
||||
return buf_node;
|
||||
}
|
||||
|
||||
fn alloc(allocator: &Allocator, n: usize, alignment: u29) ![]u8 {
|
||||
fn alloc(allocator: *Allocator, n: usize, alignment: u29) ![]u8 {
|
||||
const self = @fieldParentPtr(ArenaAllocator, "allocator", allocator);
|
||||
|
||||
var cur_node = if (self.buffer_list.last) |last_node| last_node else try self.createNode(0, n + alignment);
|
||||
@ -260,7 +254,7 @@ pub const ArenaAllocator = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn realloc(allocator: &Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
||||
fn realloc(allocator: *Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
||||
if (new_size <= old_mem.len) {
|
||||
return old_mem[0..new_size];
|
||||
} else {
|
||||
@ -270,7 +264,7 @@ pub const ArenaAllocator = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn free(allocator: &Allocator, bytes: []u8) void {}
|
||||
fn free(allocator: *Allocator, bytes: []u8) void {}
|
||||
};
|
||||
|
||||
pub const FixedBufferAllocator = struct {
|
||||
@ -290,7 +284,7 @@ pub const FixedBufferAllocator = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn alloc(allocator: &Allocator, n: usize, alignment: u29) ![]u8 {
|
||||
fn alloc(allocator: *Allocator, n: usize, alignment: u29) ![]u8 {
|
||||
const self = @fieldParentPtr(FixedBufferAllocator, "allocator", allocator);
|
||||
const addr = @ptrToInt(self.buffer.ptr) + self.end_index;
|
||||
const rem = @rem(addr, alignment);
|
||||
@ -306,7 +300,7 @@ pub const FixedBufferAllocator = struct {
|
||||
return result;
|
||||
}
|
||||
|
||||
fn realloc(allocator: &Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
||||
fn realloc(allocator: *Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
||||
if (new_size <= old_mem.len) {
|
||||
return old_mem[0..new_size];
|
||||
} else {
|
||||
@ -316,7 +310,7 @@ pub const FixedBufferAllocator = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn free(allocator: &Allocator, bytes: []u8) void {}
|
||||
fn free(allocator: *Allocator, bytes: []u8) void {}
|
||||
};
|
||||
|
||||
/// lock free
|
||||
@ -337,7 +331,7 @@ pub const ThreadSafeFixedBufferAllocator = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn alloc(allocator: &Allocator, n: usize, alignment: u29) ![]u8 {
|
||||
fn alloc(allocator: *Allocator, n: usize, alignment: u29) ![]u8 {
|
||||
const self = @fieldParentPtr(ThreadSafeFixedBufferAllocator, "allocator", allocator);
|
||||
var end_index = @atomicLoad(usize, &self.end_index, builtin.AtomicOrder.SeqCst);
|
||||
while (true) {
|
||||
@ -353,7 +347,7 @@ pub const ThreadSafeFixedBufferAllocator = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn realloc(allocator: &Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
||||
fn realloc(allocator: *Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
||||
if (new_size <= old_mem.len) {
|
||||
return old_mem[0..new_size];
|
||||
} else {
|
||||
@ -363,7 +357,7 @@ pub const ThreadSafeFixedBufferAllocator = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn free(allocator: &Allocator, bytes: []u8) void {}
|
||||
fn free(allocator: *Allocator, bytes: []u8) void {}
|
||||
};
|
||||
|
||||
test "c_allocator" {
|
||||
@ -409,8 +403,8 @@ test "ThreadSafeFixedBufferAllocator" {
|
||||
try testAllocatorLargeAlignment(&fixed_buffer_allocator.allocator);
|
||||
}
|
||||
|
||||
fn testAllocator(allocator: &mem.Allocator) !void {
|
||||
var slice = try allocator.alloc(&i32, 100);
|
||||
fn testAllocator(allocator: *mem.Allocator) !void {
|
||||
var slice = try allocator.alloc(*i32, 100);
|
||||
|
||||
for (slice) |*item, i| {
|
||||
item.* = try allocator.create(i32);
|
||||
@ -421,16 +415,16 @@ fn testAllocator(allocator: &mem.Allocator) !void {
|
||||
allocator.destroy(item);
|
||||
}
|
||||
|
||||
slice = try allocator.realloc(&i32, slice, 20000);
|
||||
slice = try allocator.realloc(&i32, slice, 50);
|
||||
slice = try allocator.realloc(&i32, slice, 25);
|
||||
slice = try allocator.realloc(&i32, slice, 10);
|
||||
slice = try allocator.realloc(*i32, slice, 20000);
|
||||
slice = try allocator.realloc(*i32, slice, 50);
|
||||
slice = try allocator.realloc(*i32, slice, 25);
|
||||
slice = try allocator.realloc(*i32, slice, 10);
|
||||
|
||||
allocator.free(slice);
|
||||
}
|
||||
|
||||
fn testAllocatorLargeAlignment(allocator: &mem.Allocator) mem.Allocator.Error!void {
|
||||
//Maybe a platform's page_size is actually the same as or
|
||||
fn testAllocatorLargeAlignment(allocator: *mem.Allocator) mem.Allocator.Error!void {
|
||||
//Maybe a platform's page_size is actually the same as or
|
||||
// very near usize?
|
||||
if (os.page_size << 2 > @maxValue(usize)) return;
|
||||
|
||||
|
||||
82
std/io.zig
82
std/io.zig
@ -34,20 +34,20 @@ pub fn getStdIn() GetStdIoErrs!File {
|
||||
|
||||
/// Implementation of InStream trait for File
|
||||
pub const FileInStream = struct {
|
||||
file: &File,
|
||||
file: *File,
|
||||
stream: Stream,
|
||||
|
||||
pub const Error = @typeOf(File.read).ReturnType.ErrorSet;
|
||||
pub const Stream = InStream(Error);
|
||||
|
||||
pub fn init(file: &File) FileInStream {
|
||||
pub fn init(file: *File) FileInStream {
|
||||
return FileInStream{
|
||||
.file = file,
|
||||
.stream = Stream{ .readFn = readFn },
|
||||
};
|
||||
}
|
||||
|
||||
fn readFn(in_stream: &Stream, buffer: []u8) Error!usize {
|
||||
fn readFn(in_stream: *Stream, buffer: []u8) Error!usize {
|
||||
const self = @fieldParentPtr(FileInStream, "stream", in_stream);
|
||||
return self.file.read(buffer);
|
||||
}
|
||||
@ -55,20 +55,20 @@ pub const FileInStream = struct {
|
||||
|
||||
/// Implementation of OutStream trait for File
|
||||
pub const FileOutStream = struct {
|
||||
file: &File,
|
||||
file: *File,
|
||||
stream: Stream,
|
||||
|
||||
pub const Error = File.WriteError;
|
||||
pub const Stream = OutStream(Error);
|
||||
|
||||
pub fn init(file: &File) FileOutStream {
|
||||
pub fn init(file: *File) FileOutStream {
|
||||
return FileOutStream{
|
||||
.file = file,
|
||||
.stream = Stream{ .writeFn = writeFn },
|
||||
};
|
||||
}
|
||||
|
||||
fn writeFn(out_stream: &Stream, bytes: []const u8) !void {
|
||||
fn writeFn(out_stream: *Stream, bytes: []const u8) !void {
|
||||
const self = @fieldParentPtr(FileOutStream, "stream", out_stream);
|
||||
return self.file.write(bytes);
|
||||
}
|
||||
@ -82,12 +82,12 @@ pub fn InStream(comptime ReadError: type) type {
|
||||
/// Return the number of bytes read. If the number read is smaller than buf.len, it
|
||||
/// means the stream reached the end. Reaching the end of a stream is not an error
|
||||
/// condition.
|
||||
readFn: fn(self: &Self, buffer: []u8) Error!usize,
|
||||
readFn: fn (self: *Self, buffer: []u8) Error!usize,
|
||||
|
||||
/// Replaces `buffer` contents by reading from the stream until it is finished.
|
||||
/// If `buffer.len()` would exceed `max_size`, `error.StreamTooLong` is returned and
|
||||
/// the contents read from the stream are lost.
|
||||
pub fn readAllBuffer(self: &Self, buffer: &Buffer, max_size: usize) !void {
|
||||
pub fn readAllBuffer(self: *Self, buffer: *Buffer, max_size: usize) !void {
|
||||
try buffer.resize(0);
|
||||
|
||||
var actual_buf_len: usize = 0;
|
||||
@ -111,7 +111,7 @@ pub fn InStream(comptime ReadError: type) type {
|
||||
/// memory would be greater than `max_size`, returns `error.StreamTooLong`.
|
||||
/// Caller owns returned memory.
|
||||
/// If this function returns an error, the contents from the stream read so far are lost.
|
||||
pub fn readAllAlloc(self: &Self, allocator: &mem.Allocator, max_size: usize) ![]u8 {
|
||||
pub fn readAllAlloc(self: *Self, allocator: *mem.Allocator, max_size: usize) ![]u8 {
|
||||
var buf = Buffer.initNull(allocator);
|
||||
defer buf.deinit();
|
||||
|
||||
@ -123,7 +123,7 @@ pub fn InStream(comptime ReadError: type) type {
|
||||
/// Does not include the delimiter in the result.
|
||||
/// If `buffer.len()` would exceed `max_size`, `error.StreamTooLong` is returned and the contents
|
||||
/// read from the stream so far are lost.
|
||||
pub fn readUntilDelimiterBuffer(self: &Self, buffer: &Buffer, delimiter: u8, max_size: usize) !void {
|
||||
pub fn readUntilDelimiterBuffer(self: *Self, buffer: *Buffer, delimiter: u8, max_size: usize) !void {
|
||||
try buffer.resize(0);
|
||||
|
||||
while (true) {
|
||||
@ -145,7 +145,7 @@ pub fn InStream(comptime ReadError: type) type {
|
||||
/// memory would be greater than `max_size`, returns `error.StreamTooLong`.
|
||||
/// Caller owns returned memory.
|
||||
/// If this function returns an error, the contents from the stream read so far are lost.
|
||||
pub fn readUntilDelimiterAlloc(self: &Self, allocator: &mem.Allocator, delimiter: u8, max_size: usize) ![]u8 {
|
||||
pub fn readUntilDelimiterAlloc(self: *Self, allocator: *mem.Allocator, delimiter: u8, max_size: usize) ![]u8 {
|
||||
var buf = Buffer.initNull(allocator);
|
||||
defer buf.deinit();
|
||||
|
||||
@ -156,43 +156,43 @@ pub fn InStream(comptime ReadError: type) type {
|
||||
/// Returns the number of bytes read. If the number read is smaller than buf.len, it
|
||||
/// means the stream reached the end. Reaching the end of a stream is not an error
|
||||
/// condition.
|
||||
pub fn read(self: &Self, buffer: []u8) !usize {
|
||||
pub fn read(self: *Self, buffer: []u8) !usize {
|
||||
return self.readFn(self, buffer);
|
||||
}
|
||||
|
||||
/// Same as `read` but end of stream returns `error.EndOfStream`.
|
||||
pub fn readNoEof(self: &Self, buf: []u8) !void {
|
||||
pub fn readNoEof(self: *Self, buf: []u8) !void {
|
||||
const amt_read = try self.read(buf);
|
||||
if (amt_read < buf.len) return error.EndOfStream;
|
||||
}
|
||||
|
||||
/// Reads 1 byte from the stream or returns `error.EndOfStream`.
|
||||
pub fn readByte(self: &Self) !u8 {
|
||||
pub fn readByte(self: *Self) !u8 {
|
||||
var result: [1]u8 = undefined;
|
||||
try self.readNoEof(result[0..]);
|
||||
return result[0];
|
||||
}
|
||||
|
||||
/// Same as `readByte` except the returned byte is signed.
|
||||
pub fn readByteSigned(self: &Self) !i8 {
|
||||
pub fn readByteSigned(self: *Self) !i8 {
|
||||
return @bitCast(i8, try self.readByte());
|
||||
}
|
||||
|
||||
pub fn readIntLe(self: &Self, comptime T: type) !T {
|
||||
pub fn readIntLe(self: *Self, comptime T: type) !T {
|
||||
return self.readInt(builtin.Endian.Little, T);
|
||||
}
|
||||
|
||||
pub fn readIntBe(self: &Self, comptime T: type) !T {
|
||||
pub fn readIntBe(self: *Self, comptime T: type) !T {
|
||||
return self.readInt(builtin.Endian.Big, T);
|
||||
}
|
||||
|
||||
pub fn readInt(self: &Self, endian: builtin.Endian, comptime T: type) !T {
|
||||
pub fn readInt(self: *Self, endian: builtin.Endian, comptime T: type) !T {
|
||||
var bytes: [@sizeOf(T)]u8 = undefined;
|
||||
try self.readNoEof(bytes[0..]);
|
||||
return mem.readInt(bytes, T, endian);
|
||||
}
|
||||
|
||||
pub fn readVarInt(self: &Self, endian: builtin.Endian, comptime T: type, size: usize) !T {
|
||||
pub fn readVarInt(self: *Self, endian: builtin.Endian, comptime T: type, size: usize) !T {
|
||||
assert(size <= @sizeOf(T));
|
||||
assert(size <= 8);
|
||||
var input_buf: [8]u8 = undefined;
|
||||
@ -208,22 +208,22 @@ pub fn OutStream(comptime WriteError: type) type {
|
||||
const Self = this;
|
||||
pub const Error = WriteError;
|
||||
|
||||
writeFn: fn(self: &Self, bytes: []const u8) Error!void,
|
||||
writeFn: fn (self: *Self, bytes: []const u8) Error!void,
|
||||
|
||||
pub fn print(self: &Self, comptime format: []const u8, args: ...) !void {
|
||||
pub fn print(self: *Self, comptime format: []const u8, args: ...) !void {
|
||||
return std.fmt.format(self, Error, self.writeFn, format, args);
|
||||
}
|
||||
|
||||
pub fn write(self: &Self, bytes: []const u8) !void {
|
||||
pub fn write(self: *Self, bytes: []const u8) !void {
|
||||
return self.writeFn(self, bytes);
|
||||
}
|
||||
|
||||
pub fn writeByte(self: &Self, byte: u8) !void {
|
||||
pub fn writeByte(self: *Self, byte: u8) !void {
|
||||
const slice = (&byte)[0..1];
|
||||
return self.writeFn(self, slice);
|
||||
}
|
||||
|
||||
pub fn writeByteNTimes(self: &Self, byte: u8, n: usize) !void {
|
||||
pub fn writeByteNTimes(self: *Self, byte: u8, n: usize) !void {
|
||||
const slice = (&byte)[0..1];
|
||||
var i: usize = 0;
|
||||
while (i < n) : (i += 1) {
|
||||
@ -234,14 +234,14 @@ pub fn OutStream(comptime WriteError: type) type {
|
||||
}
|
||||
|
||||
/// `path` needs to be copied in memory to add a null terminating byte, hence the allocator.
|
||||
pub fn writeFile(allocator: &mem.Allocator, path: []const u8, data: []const u8) !void {
|
||||
pub fn writeFile(allocator: *mem.Allocator, path: []const u8, data: []const u8) !void {
|
||||
var file = try File.openWrite(allocator, path);
|
||||
defer file.close();
|
||||
try file.write(data);
|
||||
}
|
||||
|
||||
/// On success, caller owns returned buffer.
|
||||
pub fn readFileAlloc(allocator: &mem.Allocator, path: []const u8) ![]u8 {
|
||||
pub fn readFileAlloc(allocator: *mem.Allocator, path: []const u8) ![]u8 {
|
||||
var file = try File.openRead(allocator, path);
|
||||
defer file.close();
|
||||
|
||||
@ -265,13 +265,13 @@ pub fn BufferedInStreamCustom(comptime buffer_size: usize, comptime Error: type)
|
||||
|
||||
pub stream: Stream,
|
||||
|
||||
unbuffered_in_stream: &Stream,
|
||||
unbuffered_in_stream: *Stream,
|
||||
|
||||
buffer: [buffer_size]u8,
|
||||
start_index: usize,
|
||||
end_index: usize,
|
||||
|
||||
pub fn init(unbuffered_in_stream: &Stream) Self {
|
||||
pub fn init(unbuffered_in_stream: *Stream) Self {
|
||||
return Self{
|
||||
.unbuffered_in_stream = unbuffered_in_stream,
|
||||
.buffer = undefined,
|
||||
@ -287,7 +287,7 @@ pub fn BufferedInStreamCustom(comptime buffer_size: usize, comptime Error: type)
|
||||
};
|
||||
}
|
||||
|
||||
fn readFn(in_stream: &Stream, dest: []u8) !usize {
|
||||
fn readFn(in_stream: *Stream, dest: []u8) !usize {
|
||||
const self = @fieldParentPtr(Self, "stream", in_stream);
|
||||
|
||||
var dest_index: usize = 0;
|
||||
@ -338,12 +338,12 @@ pub fn BufferedOutStreamCustom(comptime buffer_size: usize, comptime OutStreamEr
|
||||
|
||||
pub stream: Stream,
|
||||
|
||||
unbuffered_out_stream: &Stream,
|
||||
unbuffered_out_stream: *Stream,
|
||||
|
||||
buffer: [buffer_size]u8,
|
||||
index: usize,
|
||||
|
||||
pub fn init(unbuffered_out_stream: &Stream) Self {
|
||||
pub fn init(unbuffered_out_stream: *Stream) Self {
|
||||
return Self{
|
||||
.unbuffered_out_stream = unbuffered_out_stream,
|
||||
.buffer = undefined,
|
||||
@ -352,12 +352,12 @@ pub fn BufferedOutStreamCustom(comptime buffer_size: usize, comptime OutStreamEr
|
||||
};
|
||||
}
|
||||
|
||||
pub fn flush(self: &Self) !void {
|
||||
pub fn flush(self: *Self) !void {
|
||||
try self.unbuffered_out_stream.write(self.buffer[0..self.index]);
|
||||
self.index = 0;
|
||||
}
|
||||
|
||||
fn writeFn(out_stream: &Stream, bytes: []const u8) !void {
|
||||
fn writeFn(out_stream: *Stream, bytes: []const u8) !void {
|
||||
const self = @fieldParentPtr(Self, "stream", out_stream);
|
||||
|
||||
if (bytes.len >= self.buffer.len) {
|
||||
@ -369,7 +369,7 @@ pub fn BufferedOutStreamCustom(comptime buffer_size: usize, comptime OutStreamEr
|
||||
while (src_index < bytes.len) {
|
||||
const dest_space_left = self.buffer.len - self.index;
|
||||
const copy_amt = math.min(dest_space_left, bytes.len - src_index);
|
||||
mem.copy(u8, self.buffer[self.index..], bytes[src_index..src_index + copy_amt]);
|
||||
mem.copy(u8, self.buffer[self.index..], bytes[src_index .. src_index + copy_amt]);
|
||||
self.index += copy_amt;
|
||||
assert(self.index <= self.buffer.len);
|
||||
if (self.index == self.buffer.len) {
|
||||
@ -383,20 +383,20 @@ pub fn BufferedOutStreamCustom(comptime buffer_size: usize, comptime OutStreamEr
|
||||
|
||||
/// Implementation of OutStream trait for Buffer
|
||||
pub const BufferOutStream = struct {
|
||||
buffer: &Buffer,
|
||||
buffer: *Buffer,
|
||||
stream: Stream,
|
||||
|
||||
pub const Error = error{OutOfMemory};
|
||||
pub const Stream = OutStream(Error);
|
||||
|
||||
pub fn init(buffer: &Buffer) BufferOutStream {
|
||||
pub fn init(buffer: *Buffer) BufferOutStream {
|
||||
return BufferOutStream{
|
||||
.buffer = buffer,
|
||||
.stream = Stream{ .writeFn = writeFn },
|
||||
};
|
||||
}
|
||||
|
||||
fn writeFn(out_stream: &Stream, bytes: []const u8) !void {
|
||||
fn writeFn(out_stream: *Stream, bytes: []const u8) !void {
|
||||
const self = @fieldParentPtr(BufferOutStream, "stream", out_stream);
|
||||
return self.buffer.append(bytes);
|
||||
}
|
||||
@ -407,7 +407,7 @@ pub const BufferedAtomicFile = struct {
|
||||
file_stream: FileOutStream,
|
||||
buffered_stream: BufferedOutStream(FileOutStream.Error),
|
||||
|
||||
pub fn create(allocator: &mem.Allocator, dest_path: []const u8) !&BufferedAtomicFile {
|
||||
pub fn create(allocator: *mem.Allocator, dest_path: []const u8) !*BufferedAtomicFile {
|
||||
// TODO with well defined copy elision we don't need this allocation
|
||||
var self = try allocator.create(BufferedAtomicFile);
|
||||
errdefer allocator.destroy(self);
|
||||
@ -427,18 +427,18 @@ pub const BufferedAtomicFile = struct {
|
||||
}
|
||||
|
||||
/// always call destroy, even after successful finish()
|
||||
pub fn destroy(self: &BufferedAtomicFile) void {
|
||||
pub fn destroy(self: *BufferedAtomicFile) void {
|
||||
const allocator = self.atomic_file.allocator;
|
||||
self.atomic_file.deinit();
|
||||
allocator.destroy(self);
|
||||
}
|
||||
|
||||
pub fn finish(self: &BufferedAtomicFile) !void {
|
||||
pub fn finish(self: *BufferedAtomicFile) !void {
|
||||
try self.buffered_stream.flush();
|
||||
try self.atomic_file.finish();
|
||||
}
|
||||
|
||||
pub fn stream(self: &BufferedAtomicFile) &OutStream(FileOutStream.Error) {
|
||||
pub fn stream(self: *BufferedAtomicFile) *OutStream(FileOutStream.Error) {
|
||||
return &self.buffered_stream.stream;
|
||||
}
|
||||
};
|
||||
|
||||
@ -41,7 +41,7 @@ test "write a file, read it, then delete it" {
|
||||
defer allocator.free(contents);
|
||||
|
||||
assert(mem.eql(u8, contents[0.."begin".len], "begin"));
|
||||
assert(mem.eql(u8, contents["begin".len..contents.len - "end".len], data));
|
||||
assert(mem.eql(u8, contents["begin".len .. contents.len - "end".len], data));
|
||||
assert(mem.eql(u8, contents[contents.len - "end".len ..], "end"));
|
||||
}
|
||||
try os.deleteFile(allocator, tmp_file_name);
|
||||
|
||||
146
std/json.zig
146
std/json.zig
@ -10,7 +10,7 @@ const u256 = @IntType(false, 256);
|
||||
|
||||
// A single token slice into the parent string.
|
||||
//
|
||||
// Use `token.slice()` on the inptu at the current position to get the current slice.
|
||||
// Use `token.slice()` on the input at the current position to get the current slice.
|
||||
pub const Token = struct {
|
||||
id: Id,
|
||||
// How many bytes do we skip before counting
|
||||
@ -76,8 +76,8 @@ pub const Token = struct {
|
||||
}
|
||||
|
||||
// Slice into the underlying input string.
|
||||
pub fn slice(self: &const Token, input: []const u8, i: usize) []const u8 {
|
||||
return input[i + self.offset - self.count..i + self.offset];
|
||||
pub fn slice(self: *const Token, input: []const u8, i: usize) []const u8 {
|
||||
return input[i + self.offset - self.count .. i + self.offset];
|
||||
}
|
||||
};
|
||||
|
||||
@ -115,7 +115,7 @@ pub const StreamingJsonParser = struct {
|
||||
return p;
|
||||
}
|
||||
|
||||
pub fn reset(p: &StreamingJsonParser) void {
|
||||
pub fn reset(p: *StreamingJsonParser) void {
|
||||
p.state = State.TopLevelBegin;
|
||||
p.count = 0;
|
||||
// Set before ever read in main transition function
|
||||
@ -205,7 +205,7 @@ pub const StreamingJsonParser = struct {
|
||||
// tokens. token2 is always null if token1 is null.
|
||||
//
|
||||
// There is currently no error recovery on a bad stream.
|
||||
pub fn feed(p: &StreamingJsonParser, c: u8, token1: &?Token, token2: &?Token) Error!void {
|
||||
pub fn feed(p: *StreamingJsonParser, c: u8, token1: *?Token, token2: *?Token) Error!void {
|
||||
token1.* = null;
|
||||
token2.* = null;
|
||||
p.count += 1;
|
||||
@ -217,7 +217,7 @@ pub const StreamingJsonParser = struct {
|
||||
}
|
||||
|
||||
// Perform a single transition on the state machine and return any possible token.
|
||||
fn transition(p: &StreamingJsonParser, c: u8, token: &?Token) Error!bool {
|
||||
fn transition(p: *StreamingJsonParser, c: u8, token: *?Token) Error!bool {
|
||||
switch (p.state) {
|
||||
State.TopLevelBegin => switch (c) {
|
||||
'{' => {
|
||||
@ -252,7 +252,7 @@ pub const StreamingJsonParser = struct {
|
||||
p.after_value_state = State.TopLevelEnd;
|
||||
p.count = 0;
|
||||
},
|
||||
'1' ... '9' => {
|
||||
'1'...'9' => {
|
||||
p.number_is_integer = true;
|
||||
p.state = State.NumberMaybeDigitOrDotOrExponent;
|
||||
p.after_value_state = State.TopLevelEnd;
|
||||
@ -281,10 +281,7 @@ pub const StreamingJsonParser = struct {
|
||||
p.after_value_state = State.TopLevelEnd;
|
||||
p.count = 0;
|
||||
},
|
||||
0x09,
|
||||
0x0A,
|
||||
0x0D,
|
||||
0x20 => {
|
||||
0x09, 0x0A, 0x0D, 0x20 => {
|
||||
// whitespace
|
||||
},
|
||||
else => {
|
||||
@ -293,10 +290,7 @@ pub const StreamingJsonParser = struct {
|
||||
},
|
||||
|
||||
State.TopLevelEnd => switch (c) {
|
||||
0x09,
|
||||
0x0A,
|
||||
0x0D,
|
||||
0x20 => {
|
||||
0x09, 0x0A, 0x0D, 0x20 => {
|
||||
// whitespace
|
||||
},
|
||||
else => {
|
||||
@ -392,7 +386,7 @@ pub const StreamingJsonParser = struct {
|
||||
p.state = State.NumberMaybeDotOrExponent;
|
||||
p.count = 0;
|
||||
},
|
||||
'1' ... '9' => {
|
||||
'1'...'9' => {
|
||||
p.state = State.NumberMaybeDigitOrDotOrExponent;
|
||||
p.count = 0;
|
||||
},
|
||||
@ -412,10 +406,7 @@ pub const StreamingJsonParser = struct {
|
||||
p.state = State.NullLiteral1;
|
||||
p.count = 0;
|
||||
},
|
||||
0x09,
|
||||
0x0A,
|
||||
0x0D,
|
||||
0x20 => {
|
||||
0x09, 0x0A, 0x0D, 0x20 => {
|
||||
// whitespace
|
||||
},
|
||||
else => {
|
||||
@ -461,7 +452,7 @@ pub const StreamingJsonParser = struct {
|
||||
p.state = State.NumberMaybeDotOrExponent;
|
||||
p.count = 0;
|
||||
},
|
||||
'1' ... '9' => {
|
||||
'1'...'9' => {
|
||||
p.state = State.NumberMaybeDigitOrDotOrExponent;
|
||||
p.count = 0;
|
||||
},
|
||||
@ -481,10 +472,7 @@ pub const StreamingJsonParser = struct {
|
||||
p.state = State.NullLiteral1;
|
||||
p.count = 0;
|
||||
},
|
||||
0x09,
|
||||
0x0A,
|
||||
0x0D,
|
||||
0x20 => {
|
||||
0x09, 0x0A, 0x0D, 0x20 => {
|
||||
// whitespace
|
||||
},
|
||||
else => {
|
||||
@ -533,10 +521,7 @@ pub const StreamingJsonParser = struct {
|
||||
|
||||
token.* = Token.initMarker(Token.Id.ObjectEnd);
|
||||
},
|
||||
0x09,
|
||||
0x0A,
|
||||
0x0D,
|
||||
0x20 => {
|
||||
0x09, 0x0A, 0x0D, 0x20 => {
|
||||
// whitespace
|
||||
},
|
||||
else => {
|
||||
@ -549,10 +534,7 @@ pub const StreamingJsonParser = struct {
|
||||
p.state = State.ValueBegin;
|
||||
p.after_string_state = State.ValueEnd;
|
||||
},
|
||||
0x09,
|
||||
0x0A,
|
||||
0x0D,
|
||||
0x20 => {
|
||||
0x09, 0x0A, 0x0D, 0x20 => {
|
||||
// whitespace
|
||||
},
|
||||
else => {
|
||||
@ -561,7 +543,7 @@ pub const StreamingJsonParser = struct {
|
||||
},
|
||||
|
||||
State.String => switch (c) {
|
||||
0x00 ... 0x1F => {
|
||||
0x00...0x1F => {
|
||||
return error.InvalidControlCharacter;
|
||||
},
|
||||
'"' => {
|
||||
@ -576,19 +558,16 @@ pub const StreamingJsonParser = struct {
|
||||
'\\' => {
|
||||
p.state = State.StringEscapeCharacter;
|
||||
},
|
||||
0x20,
|
||||
0x21,
|
||||
0x23 ... 0x5B,
|
||||
0x5D ... 0x7F => {
|
||||
0x20, 0x21, 0x23...0x5B, 0x5D...0x7F => {
|
||||
// non-control ascii
|
||||
},
|
||||
0xC0 ... 0xDF => {
|
||||
0xC0...0xDF => {
|
||||
p.state = State.StringUtf8Byte1;
|
||||
},
|
||||
0xE0 ... 0xEF => {
|
||||
0xE0...0xEF => {
|
||||
p.state = State.StringUtf8Byte2;
|
||||
},
|
||||
0xF0 ... 0xFF => {
|
||||
0xF0...0xFF => {
|
||||
p.state = State.StringUtf8Byte3;
|
||||
},
|
||||
else => {
|
||||
@ -620,14 +599,7 @@ pub const StreamingJsonParser = struct {
|
||||
// The current JSONTestSuite tests rely on both of this behaviour being present
|
||||
// however, so we default to the status quo where both are accepted until this
|
||||
// is further clarified.
|
||||
'"',
|
||||
'\\',
|
||||
'/',
|
||||
'b',
|
||||
'f',
|
||||
'n',
|
||||
'r',
|
||||
't' => {
|
||||
'"', '\\', '/', 'b', 'f', 'n', 'r', 't' => {
|
||||
p.string_has_escape = true;
|
||||
p.state = State.String;
|
||||
},
|
||||
@ -641,36 +613,28 @@ pub const StreamingJsonParser = struct {
|
||||
},
|
||||
|
||||
State.StringEscapeHexUnicode4 => switch (c) {
|
||||
'0' ... '9',
|
||||
'A' ... 'F',
|
||||
'a' ... 'f' => {
|
||||
'0'...'9', 'A'...'F', 'a'...'f' => {
|
||||
p.state = State.StringEscapeHexUnicode3;
|
||||
},
|
||||
else => return error.InvalidUnicodeHexSymbol,
|
||||
},
|
||||
|
||||
State.StringEscapeHexUnicode3 => switch (c) {
|
||||
'0' ... '9',
|
||||
'A' ... 'F',
|
||||
'a' ... 'f' => {
|
||||
'0'...'9', 'A'...'F', 'a'...'f' => {
|
||||
p.state = State.StringEscapeHexUnicode2;
|
||||
},
|
||||
else => return error.InvalidUnicodeHexSymbol,
|
||||
},
|
||||
|
||||
State.StringEscapeHexUnicode2 => switch (c) {
|
||||
'0' ... '9',
|
||||
'A' ... 'F',
|
||||
'a' ... 'f' => {
|
||||
'0'...'9', 'A'...'F', 'a'...'f' => {
|
||||
p.state = State.StringEscapeHexUnicode1;
|
||||
},
|
||||
else => return error.InvalidUnicodeHexSymbol,
|
||||
},
|
||||
|
||||
State.StringEscapeHexUnicode1 => switch (c) {
|
||||
'0' ... '9',
|
||||
'A' ... 'F',
|
||||
'a' ... 'f' => {
|
||||
'0'...'9', 'A'...'F', 'a'...'f' => {
|
||||
p.state = State.String;
|
||||
},
|
||||
else => return error.InvalidUnicodeHexSymbol,
|
||||
@ -682,7 +646,7 @@ pub const StreamingJsonParser = struct {
|
||||
'0' => {
|
||||
p.state = State.NumberMaybeDotOrExponent;
|
||||
},
|
||||
'1' ... '9' => {
|
||||
'1'...'9' => {
|
||||
p.state = State.NumberMaybeDigitOrDotOrExponent;
|
||||
},
|
||||
else => {
|
||||
@ -698,8 +662,7 @@ pub const StreamingJsonParser = struct {
|
||||
p.number_is_integer = false;
|
||||
p.state = State.NumberFractionalRequired;
|
||||
},
|
||||
'e',
|
||||
'E' => {
|
||||
'e', 'E' => {
|
||||
p.number_is_integer = false;
|
||||
p.state = State.NumberExponent;
|
||||
},
|
||||
@ -718,12 +681,11 @@ pub const StreamingJsonParser = struct {
|
||||
p.number_is_integer = false;
|
||||
p.state = State.NumberFractionalRequired;
|
||||
},
|
||||
'e',
|
||||
'E' => {
|
||||
'e', 'E' => {
|
||||
p.number_is_integer = false;
|
||||
p.state = State.NumberExponent;
|
||||
},
|
||||
'0' ... '9' => {
|
||||
'0'...'9' => {
|
||||
// another digit
|
||||
},
|
||||
else => {
|
||||
@ -737,7 +699,7 @@ pub const StreamingJsonParser = struct {
|
||||
State.NumberFractionalRequired => {
|
||||
p.complete = p.after_value_state == State.TopLevelEnd;
|
||||
switch (c) {
|
||||
'0' ... '9' => {
|
||||
'0'...'9' => {
|
||||
p.state = State.NumberFractional;
|
||||
},
|
||||
else => {
|
||||
@ -749,11 +711,10 @@ pub const StreamingJsonParser = struct {
|
||||
State.NumberFractional => {
|
||||
p.complete = p.after_value_state == State.TopLevelEnd;
|
||||
switch (c) {
|
||||
'0' ... '9' => {
|
||||
'0'...'9' => {
|
||||
// another digit
|
||||
},
|
||||
'e',
|
||||
'E' => {
|
||||
'e', 'E' => {
|
||||
p.number_is_integer = false;
|
||||
p.state = State.NumberExponent;
|
||||
},
|
||||
@ -768,8 +729,7 @@ pub const StreamingJsonParser = struct {
|
||||
State.NumberMaybeExponent => {
|
||||
p.complete = p.after_value_state == State.TopLevelEnd;
|
||||
switch (c) {
|
||||
'e',
|
||||
'E' => {
|
||||
'e', 'E' => {
|
||||
p.number_is_integer = false;
|
||||
p.state = State.NumberExponent;
|
||||
},
|
||||
@ -782,12 +742,11 @@ pub const StreamingJsonParser = struct {
|
||||
},
|
||||
|
||||
State.NumberExponent => switch (c) {
|
||||
'-',
|
||||
'+' => {
|
||||
'-', '+' => {
|
||||
p.complete = false;
|
||||
p.state = State.NumberExponentDigitsRequired;
|
||||
},
|
||||
'0' ... '9' => {
|
||||
'0'...'9' => {
|
||||
p.complete = p.after_value_state == State.TopLevelEnd;
|
||||
p.state = State.NumberExponentDigits;
|
||||
},
|
||||
@ -797,7 +756,7 @@ pub const StreamingJsonParser = struct {
|
||||
},
|
||||
|
||||
State.NumberExponentDigitsRequired => switch (c) {
|
||||
'0' ... '9' => {
|
||||
'0'...'9' => {
|
||||
p.complete = p.after_value_state == State.TopLevelEnd;
|
||||
p.state = State.NumberExponentDigits;
|
||||
},
|
||||
@ -809,7 +768,7 @@ pub const StreamingJsonParser = struct {
|
||||
State.NumberExponentDigits => {
|
||||
p.complete = p.after_value_state == State.TopLevelEnd;
|
||||
switch (c) {
|
||||
'0' ... '9' => {
|
||||
'0'...'9' => {
|
||||
// another digit
|
||||
},
|
||||
else => {
|
||||
@ -902,7 +861,7 @@ pub fn validate(s: []const u8) bool {
|
||||
var token1: ?Token = undefined;
|
||||
var token2: ?Token = undefined;
|
||||
|
||||
p.feed(c, &token1, &token2) catch |err| {
|
||||
p.feed(c, *token1, *token2) catch |err| {
|
||||
return false;
|
||||
};
|
||||
}
|
||||
@ -919,7 +878,7 @@ pub const ValueTree = struct {
|
||||
arena: ArenaAllocator,
|
||||
root: Value,
|
||||
|
||||
pub fn deinit(self: &ValueTree) void {
|
||||
pub fn deinit(self: *ValueTree) void {
|
||||
self.arena.deinit();
|
||||
}
|
||||
};
|
||||
@ -935,7 +894,7 @@ pub const Value = union(enum) {
|
||||
Array: ArrayList(Value),
|
||||
Object: ObjectMap,
|
||||
|
||||
pub fn dump(self: &const Value) void {
|
||||
pub fn dump(self: *const Value) void {
|
||||
switch (self.*) {
|
||||
Value.Null => {
|
||||
std.debug.warn("null");
|
||||
@ -982,7 +941,7 @@ pub const Value = union(enum) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dumpIndent(self: &const Value, indent: usize) void {
|
||||
pub fn dumpIndent(self: *const Value, indent: usize) void {
|
||||
if (indent == 0) {
|
||||
self.dump();
|
||||
} else {
|
||||
@ -990,7 +949,7 @@ pub const Value = union(enum) {
|
||||
}
|
||||
}
|
||||
|
||||
fn dumpIndentLevel(self: &const Value, indent: usize, level: usize) void {
|
||||
fn dumpIndentLevel(self: *const Value, indent: usize, level: usize) void {
|
||||
switch (self.*) {
|
||||
Value.Null => {
|
||||
std.debug.warn("null");
|
||||
@ -1054,7 +1013,7 @@ pub const Value = union(enum) {
|
||||
|
||||
// A non-stream JSON parser which constructs a tree of Value's.
|
||||
pub const JsonParser = struct {
|
||||
allocator: &Allocator,
|
||||
allocator: *Allocator,
|
||||
state: State,
|
||||
copy_strings: bool,
|
||||
// Stores parent nodes and un-combined Values.
|
||||
@ -1067,7 +1026,7 @@ pub const JsonParser = struct {
|
||||
Simple,
|
||||
};
|
||||
|
||||
pub fn init(allocator: &Allocator, copy_strings: bool) JsonParser {
|
||||
pub fn init(allocator: *Allocator, copy_strings: bool) JsonParser {
|
||||
return JsonParser{
|
||||
.allocator = allocator,
|
||||
.state = State.Simple,
|
||||
@ -1076,16 +1035,16 @@ pub const JsonParser = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(p: &JsonParser) void {
|
||||
pub fn deinit(p: *JsonParser) void {
|
||||
p.stack.deinit();
|
||||
}
|
||||
|
||||
pub fn reset(p: &JsonParser) void {
|
||||
pub fn reset(p: *JsonParser) void {
|
||||
p.state = State.Simple;
|
||||
p.stack.shrink(0);
|
||||
}
|
||||
|
||||
pub fn parse(p: &JsonParser, input: []const u8) !ValueTree {
|
||||
pub fn parse(p: *JsonParser, input: []const u8) !ValueTree {
|
||||
var mp = StreamingJsonParser.init();
|
||||
|
||||
var arena = ArenaAllocator.init(p.allocator);
|
||||
@ -1131,7 +1090,7 @@ pub const JsonParser = struct {
|
||||
|
||||
// Even though p.allocator exists, we take an explicit allocator so that allocation state
|
||||
// can be cleaned up on error correctly during a `parse` on call.
|
||||
fn transition(p: &JsonParser, allocator: &Allocator, input: []const u8, i: usize, token: &const Token) !void {
|
||||
fn transition(p: *JsonParser, allocator: *Allocator, input: []const u8, i: usize, token: *const Token) !void {
|
||||
switch (p.state) {
|
||||
State.ObjectKey => switch (token.id) {
|
||||
Token.Id.ObjectEnd => {
|
||||
@ -1257,15 +1216,14 @@ pub const JsonParser = struct {
|
||||
Token.Id.Null => {
|
||||
try p.stack.append(Value.Null);
|
||||
},
|
||||
Token.Id.ObjectEnd,
|
||||
Token.Id.ArrayEnd => {
|
||||
Token.Id.ObjectEnd, Token.Id.ArrayEnd => {
|
||||
unreachable;
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn pushToParent(p: &JsonParser, value: &const Value) !void {
|
||||
fn pushToParent(p: *JsonParser, value: *const Value) !void {
|
||||
switch (p.stack.at(p.stack.len - 1)) {
|
||||
// Object Parent -> [ ..., object, <key>, value ]
|
||||
Value.String => |key| {
|
||||
@ -1286,14 +1244,14 @@ pub const JsonParser = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn parseString(p: &JsonParser, allocator: &Allocator, token: &const Token, input: []const u8, i: usize) !Value {
|
||||
fn parseString(p: *JsonParser, allocator: *Allocator, token: *const Token, input: []const u8, i: usize) !Value {
|
||||
// TODO: We don't strictly have to copy values which do not contain any escape
|
||||
// characters if flagged with the option.
|
||||
const slice = token.slice(input, i);
|
||||
return Value{ .String = try mem.dupe(p.allocator, u8, slice) };
|
||||
}
|
||||
|
||||
fn parseNumber(p: &JsonParser, token: &const Token, input: []const u8, i: usize) !Value {
|
||||
fn parseNumber(p: *JsonParser, token: *const Token, input: []const u8, i: usize) !Value {
|
||||
return if (token.number_is_integer)
|
||||
Value{ .Integer = try std.fmt.parseInt(i64, token.slice(input, i), 10) }
|
||||
else
|
||||
|
||||
@ -81,9 +81,7 @@ test "y_array_with_several_null" {
|
||||
}
|
||||
|
||||
test "y_array_with_trailing_space" {
|
||||
ok(
|
||||
"[2] "
|
||||
);
|
||||
ok("[2] ");
|
||||
}
|
||||
|
||||
test "y_number_0e+1" {
|
||||
@ -579,9 +577,7 @@ test "y_structure_true_in_array" {
|
||||
}
|
||||
|
||||
test "y_structure_whitespace_array" {
|
||||
ok(
|
||||
" [] "
|
||||
);
|
||||
ok(" [] ");
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
@ -696,7 +692,6 @@ test "n_array_newlines_unclosed" {
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
test "n_array_number_and_comma" {
|
||||
err(
|
||||
\\[1,]
|
||||
@ -971,7 +966,6 @@ test "n_number_invalid-utf-8-in-int" {
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
test "n_number_++" {
|
||||
err(
|
||||
\\[++1234]
|
||||
@ -1228,7 +1222,7 @@ test "n_object_unterminated-value" {
|
||||
err(
|
||||
\\{"a":"a
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
test "n_object_with_single_string" {
|
||||
err(
|
||||
@ -1243,9 +1237,7 @@ test "n_object_with_trailing_garbage" {
|
||||
}
|
||||
|
||||
test "n_single_space" {
|
||||
err(
|
||||
" "
|
||||
);
|
||||
err(" ");
|
||||
}
|
||||
|
||||
test "n_string_1_surrogate_then_escape" {
|
||||
@ -1279,9 +1271,7 @@ test "n_string_accentuated_char_no_quotes" {
|
||||
}
|
||||
|
||||
test "n_string_backslash_00" {
|
||||
err(
|
||||
\\["\ | ||||