mirror of
https://github.com/ziglang/zig.git
synced 2026-02-12 20:37:54 +00:00
Merge pull request #19826 from jacobly0/outdirarg
Run: add output directory arguments
This commit is contained in:
commit
6bc0cef607
@ -15,44 +15,44 @@ pub fn syscall1(number: usize, arg1: usize) usize {
|
||||
// the below code, this is not used. A literal `%` can be
|
||||
// obtained by escaping it with a double percent: `%%`.
|
||||
// Often multiline string syntax comes in handy here.
|
||||
\\syscall
|
||||
// Next is the output. It is possible in the future Zig will
|
||||
// support multiple outputs, depending on how
|
||||
// https://github.com/ziglang/zig/issues/215 is resolved.
|
||||
// It is allowed for there to be no outputs, in which case
|
||||
// this colon would be directly followed by the colon for the inputs.
|
||||
\\syscall
|
||||
// Next is the output. It is possible in the future Zig will
|
||||
// support multiple outputs, depending on how
|
||||
// https://github.com/ziglang/zig/issues/215 is resolved.
|
||||
// It is allowed for there to be no outputs, in which case
|
||||
// this colon would be directly followed by the colon for the inputs.
|
||||
:
|
||||
// This specifies the name to be used in `%[ret]` syntax in
|
||||
// the above assembly string. This example does not use it,
|
||||
// but the syntax is mandatory.
|
||||
[ret]
|
||||
// Next is the output constraint string. This feature is still
|
||||
// considered unstable in Zig, and so LLVM/GCC documentation
|
||||
// must be used to understand the semantics.
|
||||
// http://releases.llvm.org/10.0.0/docs/LangRef.html#inline-asm-constraint-string
|
||||
// https://gcc.gnu.org/onlinedocs/gcc/Extended-Asm.html
|
||||
// In this example, the constraint string means "the result value of
|
||||
// this inline assembly instruction is whatever is in $rax".
|
||||
"={rax}"
|
||||
// Next is either a value binding, or `->` and then a type. The
|
||||
// type is the result type of the inline assembly expression.
|
||||
// If it is a value binding, then `%[ret]` syntax would be used
|
||||
// to refer to the register bound to the value.
|
||||
(-> usize),
|
||||
// Next is the list of inputs.
|
||||
// The constraint for these inputs means, "when the assembly code is
|
||||
// executed, $rax shall have the value of `number` and $rdi shall have
|
||||
// the value of `arg1`". Any number of input parameters is allowed,
|
||||
// including none.
|
||||
// This specifies the name to be used in `%[ret]` syntax in
|
||||
// the above assembly string. This example does not use it,
|
||||
// but the syntax is mandatory.
|
||||
[ret]
|
||||
// Next is the output constraint string. This feature is still
|
||||
// considered unstable in Zig, and so LLVM/GCC documentation
|
||||
// must be used to understand the semantics.
|
||||
// http://releases.llvm.org/10.0.0/docs/LangRef.html#inline-asm-constraint-string
|
||||
// https://gcc.gnu.org/onlinedocs/gcc/Extended-Asm.html
|
||||
// In this example, the constraint string means "the result value of
|
||||
// this inline assembly instruction is whatever is in $rax".
|
||||
"={rax}"
|
||||
// Next is either a value binding, or `->` and then a type. The
|
||||
// type is the result type of the inline assembly expression.
|
||||
// If it is a value binding, then `%[ret]` syntax would be used
|
||||
// to refer to the register bound to the value.
|
||||
(-> usize),
|
||||
// Next is the list of inputs.
|
||||
// The constraint for these inputs means, "when the assembly code is
|
||||
// executed, $rax shall have the value of `number` and $rdi shall have
|
||||
// the value of `arg1`". Any number of input parameters is allowed,
|
||||
// including none.
|
||||
: [number] "{rax}" (number),
|
||||
[arg1] "{rdi}" (arg1),
|
||||
// Next is the list of clobbers. These declare a set of registers whose
|
||||
// values will not be preserved by the execution of this assembly code.
|
||||
// These do not include output or input registers. The special clobber
|
||||
// value of "memory" means that the assembly writes to arbitrary undeclared
|
||||
// memory locations - not only the memory pointed to by a declared indirect
|
||||
// output. In this example we list $rcx and $r11 because it is known the
|
||||
// kernel syscall does not preserve these registers.
|
||||
[arg1] "{rdi}" (arg1),
|
||||
// Next is the list of clobbers. These declare a set of registers whose
|
||||
// values will not be preserved by the execution of this assembly code.
|
||||
// These do not include output or input registers. The special clobber
|
||||
// value of "memory" means that the assembly writes to arbitrary undeclared
|
||||
// memory locations - not only the memory pointed to by a declared indirect
|
||||
// output. In this example we list $rcx and $r11 because it is known the
|
||||
// kernel syscall does not preserve these registers.
|
||||
: "rcx", "r11"
|
||||
);
|
||||
}
|
||||
|
||||
@ -4,7 +4,7 @@ pub fn build(b: *std.Build) void {
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "example",
|
||||
.root_source_file = .{ .path = "example.zig" },
|
||||
.root_source_file = b.path("example.zig"),
|
||||
.optimize = optimize,
|
||||
});
|
||||
b.default_step.dependOn(&exe.step);
|
||||
|
||||
@ -3,13 +3,13 @@ const std = @import("std");
|
||||
pub fn build(b: *std.Build) void {
|
||||
const lib = b.addSharedLibrary(.{
|
||||
.name = "mathtest",
|
||||
.root_source_file = .{ .path = "mathtest.zig" },
|
||||
.root_source_file = b.path("mathtest.zig"),
|
||||
.version = .{ .major = 1, .minor = 0, .patch = 0 },
|
||||
});
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "test",
|
||||
});
|
||||
exe.addCSourceFile(.{ .file = .{ .path = "test.c" }, .flags = &.{"-std=c99"} });
|
||||
exe.addCSourceFile(.{ .file = b.path("test.c"), .flags = &.{"-std=c99"} });
|
||||
exe.linkLibrary(lib);
|
||||
exe.linkSystemLibrary("c");
|
||||
|
||||
|
||||
@ -3,13 +3,13 @@ const std = @import("std");
|
||||
pub fn build(b: *std.Build) void {
|
||||
const obj = b.addObject(.{
|
||||
.name = "base64",
|
||||
.root_source_file = .{ .path = "base64.zig" },
|
||||
.root_source_file = b.path("base64.zig"),
|
||||
});
|
||||
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "test",
|
||||
});
|
||||
exe.addCSourceFile(.{ .file = .{ .path = "test.c" }, .flags = &.{"-std=c99",} });
|
||||
exe.addCSourceFile(.{ .file = b.path("test.c"), .flags = &.{"-std=c99"} });
|
||||
exe.addObject(obj);
|
||||
exe.linkSystemLibrary("c");
|
||||
b.installArtifact(exe);
|
||||
|
||||
@ -1,11 +1,13 @@
|
||||
const Foo = struct{};
|
||||
fn doSomethingWithFoo(foo: *Foo) void { _ = foo; }
|
||||
const Foo = struct {};
|
||||
fn doSomethingWithFoo(foo: *Foo) void {
|
||||
_ = foo;
|
||||
}
|
||||
|
||||
fn doAThing(optional_foo: ?*Foo) void {
|
||||
// do some stuff
|
||||
|
||||
if (optional_foo) |foo| {
|
||||
doSomethingWithFoo(foo);
|
||||
doSomethingWithFoo(foo);
|
||||
}
|
||||
|
||||
// do some stuff
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
/// multiline doc comment).
|
||||
const Timestamp = struct {
|
||||
/// The number of seconds since the epoch (this is also a doc comment).
|
||||
seconds: i64, // signed so we can represent pre-1970 (not a doc comment)
|
||||
seconds: i64, // signed so we can represent pre-1970 (not a doc comment)
|
||||
/// The number of nanoseconds past the second (doc comment again).
|
||||
nanos: u32,
|
||||
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
const Foo = enum(c_int) { a, b, c };
|
||||
export fn entry(foo: Foo) void { _ = foo; }
|
||||
export fn entry(foo: Foo) void {
|
||||
_ = foo;
|
||||
}
|
||||
|
||||
// obj
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
const Foo = enum { a, b, c };
|
||||
export fn entry(foo: Foo) void { _ = foo; }
|
||||
export fn entry(foo: Foo) void {
|
||||
_ = foo;
|
||||
}
|
||||
|
||||
// obj=parameter of type 'enum_export_error.Foo' not allowed in function with calling convention 'C'
|
||||
|
||||
@ -26,9 +26,9 @@ pub fn parseU64(buf: []const u8, radix: u8) !u64 {
|
||||
|
||||
fn charToDigit(c: u8) u8 {
|
||||
return switch (c) {
|
||||
'0' ... '9' => c - '0',
|
||||
'A' ... 'Z' => c - 'A' + 10,
|
||||
'a' ... 'z' => c - 'a' + 10,
|
||||
'0'...'9' => c - '0',
|
||||
'A'...'Z' => c - 'A' + 10,
|
||||
'a'...'z' => c - 'a' + 10,
|
||||
else => maxInt(u8),
|
||||
};
|
||||
}
|
||||
|
||||
@ -6,8 +6,8 @@ pub extern "c" fn @"error"() void;
|
||||
pub extern "c" fn @"fstat$INODE64"(fd: c.fd_t, buf: *c.Stat) c_int;
|
||||
|
||||
const Color = enum {
|
||||
red,
|
||||
@"really red",
|
||||
red,
|
||||
@"really red",
|
||||
};
|
||||
const color: Color = .@"really red";
|
||||
|
||||
|
||||
@ -4,7 +4,7 @@ const a_number: i32 = 1234;
|
||||
const a_string = "foobar";
|
||||
|
||||
pub fn main() void {
|
||||
print("here is a string: '{s}' here is a number: {}\n", .{a_string, a_number});
|
||||
print("here is a string: '{s}' here is a number: {}\n", .{ a_string, a_number });
|
||||
}
|
||||
|
||||
// exe=succeed
|
||||
|
||||
@ -5,7 +5,7 @@ const a_string = "foobar";
|
||||
const fmt = "here is a string: '{s}' here is a number: {}\n";
|
||||
|
||||
pub fn main() void {
|
||||
print(fmt, .{a_string, a_number});
|
||||
print(fmt, .{ a_string, a_number });
|
||||
}
|
||||
|
||||
// exe=succeed
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
const err = (error {FileNotFound}).FileNotFound;
|
||||
const err = (error{FileNotFound}).FileNotFound;
|
||||
|
||||
// syntax
|
||||
|
||||
@ -3,19 +3,19 @@ const mem = @import("std").mem; // will be used to compare bytes
|
||||
|
||||
pub fn main() void {
|
||||
const bytes = "hello";
|
||||
print("{}\n", .{@TypeOf(bytes)}); // *const [5:0]u8
|
||||
print("{d}\n", .{bytes.len}); // 5
|
||||
print("{c}\n", .{bytes[1]}); // 'e'
|
||||
print("{d}\n", .{bytes[5]}); // 0
|
||||
print("{}\n", .{'e' == '\x65'}); // true
|
||||
print("{d}\n", .{'\u{1f4a9}'}); // 128169
|
||||
print("{d}\n", .{'💯'}); // 128175
|
||||
print("{}\n", .{@TypeOf(bytes)}); // *const [5:0]u8
|
||||
print("{d}\n", .{bytes.len}); // 5
|
||||
print("{c}\n", .{bytes[1]}); // 'e'
|
||||
print("{d}\n", .{bytes[5]}); // 0
|
||||
print("{}\n", .{'e' == '\x65'}); // true
|
||||
print("{d}\n", .{'\u{1f4a9}'}); // 128169
|
||||
print("{d}\n", .{'💯'}); // 128175
|
||||
print("{u}\n", .{'⚡'});
|
||||
print("{}\n", .{mem.eql(u8, "hello", "h\x65llo")}); // true
|
||||
print("{}\n", .{mem.eql(u8, "hello", "h\x65llo")}); // true
|
||||
print("{}\n", .{mem.eql(u8, "💯", "\xf0\x9f\x92\xaf")}); // also true
|
||||
const invalid_utf8 = "\xff\xfe"; // non-UTF-8 strings are possible with \xNN notation.
|
||||
const invalid_utf8 = "\xff\xfe"; // non-UTF-8 strings are possible with \xNN notation.
|
||||
print("0x{x}\n", .{invalid_utf8[1]}); // indexing them returns individual bytes...
|
||||
print("0x{x}\n", .{"💯"[1]}); // ...as does indexing part-way through non-ASCII characters
|
||||
print("0x{x}\n", .{"💯"[1]}); // ...as does indexing part-way through non-ASCII characters
|
||||
}
|
||||
|
||||
// exe=succeed
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
const expect = @import("std").testing.expect;
|
||||
|
||||
test "noinline function call" {
|
||||
try expect(@call(.auto, add, .{3, 9}) == 12);
|
||||
try expect(@call(.auto, add, .{ 3, 9 }) == 12);
|
||||
}
|
||||
|
||||
fn add(a: i32, b: i32) i32 {
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
const std = @import("std");
|
||||
|
||||
const FileOpenError = error {
|
||||
const FileOpenError = error{
|
||||
AccessDenied,
|
||||
OutOfMemory,
|
||||
FileNotFound,
|
||||
};
|
||||
|
||||
const AllocationError = error {
|
||||
const AllocationError = error{
|
||||
OutOfMemory,
|
||||
};
|
||||
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
const FileOpenError = error {
|
||||
const FileOpenError = error{
|
||||
AccessDenied,
|
||||
OutOfMemory,
|
||||
FileNotFound,
|
||||
};
|
||||
|
||||
const AllocationError = error {
|
||||
const AllocationError = error{
|
||||
OutOfMemory,
|
||||
};
|
||||
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
const std = @import("std");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
const Tuple = struct{ u8, u8 };
|
||||
const Tuple = struct { u8, u8 };
|
||||
test "coercion from homogenous tuple to array" {
|
||||
const tuple: Tuple = .{5, 6};
|
||||
const array: [2]u8 = tuple;
|
||||
_ = array;
|
||||
const tuple: Tuple = .{ 5, 6 };
|
||||
const array: [2]u8 = tuple;
|
||||
_ = array;
|
||||
}
|
||||
|
||||
// test
|
||||
|
||||
@ -2,17 +2,23 @@ const expect = @import("std").testing.expect;
|
||||
|
||||
const CmdFn = struct {
|
||||
name: []const u8,
|
||||
func: fn(i32) i32,
|
||||
func: fn (i32) i32,
|
||||
};
|
||||
|
||||
const cmd_fns = [_]CmdFn{
|
||||
CmdFn {.name = "one", .func = one},
|
||||
CmdFn {.name = "two", .func = two},
|
||||
CmdFn {.name = "three", .func = three},
|
||||
CmdFn{ .name = "one", .func = one },
|
||||
CmdFn{ .name = "two", .func = two },
|
||||
CmdFn{ .name = "three", .func = three },
|
||||
};
|
||||
fn one(value: i32) i32 { return value + 1; }
|
||||
fn two(value: i32) i32 { return value + 2; }
|
||||
fn three(value: i32) i32 { return value + 3; }
|
||||
fn one(value: i32) i32 {
|
||||
return value + 1;
|
||||
}
|
||||
fn two(value: i32) i32 {
|
||||
return value + 2;
|
||||
}
|
||||
fn three(value: i32) i32 {
|
||||
return value + 3;
|
||||
}
|
||||
|
||||
fn performFn(comptime prefix_char: u8, start_value: i32) i32 {
|
||||
var result: i32 = start_value;
|
||||
|
||||
@ -1,9 +1,7 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const Foo = struct {
|
||||
data: *u32
|
||||
};
|
||||
const Foo = struct { data: *u32 };
|
||||
|
||||
fn getData() !u32 {
|
||||
return 666;
|
||||
|
||||
@ -1,9 +1,7 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
const Foo = struct {
|
||||
data: *u32
|
||||
};
|
||||
const Foo = struct { data: *u32 };
|
||||
|
||||
fn getData() !u32 {
|
||||
return 666;
|
||||
@ -19,7 +17,7 @@ fn genFoos(allocator: Allocator, num: usize) ![]Foo {
|
||||
errdefer allocator.destroy(foo.data);
|
||||
|
||||
// The data for the first 3 foos will be leaked
|
||||
if(i >= 3) return error.TooManyFoos;
|
||||
if (i >= 3) return error.TooManyFoos;
|
||||
|
||||
foo.data.* = try getData();
|
||||
}
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
const expect = @import("std").testing.expect;
|
||||
|
||||
test "for basics" {
|
||||
const items = [_]i32 { 4, 5, 3, 4, 0 };
|
||||
const items = [_]i32{ 4, 5, 3, 4, 0 };
|
||||
var sum: i32 = 0;
|
||||
|
||||
// For loops iterate over slices and arrays.
|
||||
@ -31,7 +31,7 @@ test "for basics" {
|
||||
|
||||
// To iterate over consecutive integers, use the range syntax.
|
||||
// Unbounded range is always a compile error.
|
||||
var sum3 : usize = 0;
|
||||
var sum3: usize = 0;
|
||||
for (0..5) |i| {
|
||||
sum3 += i;
|
||||
}
|
||||
|
||||
@ -14,7 +14,9 @@ fn add(a: i8, b: i8) i8 {
|
||||
|
||||
// The export specifier makes a function externally visible in the generated
|
||||
// object file, and makes it use the C ABI.
|
||||
export fn sub(a: i8, b: i8) i8 { return a - b; }
|
||||
export fn sub(a: i8, b: i8) i8 {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// The extern specifier is used to declare a function that will be resolved
|
||||
// at link time, when linking statically, or at runtime, when linking
|
||||
@ -39,13 +41,15 @@ fn _start() callconv(.Naked) noreturn {
|
||||
|
||||
// The inline calling convention forces a function to be inlined at all call sites.
|
||||
// If the function cannot be inlined, it is a compile-time error.
|
||||
fn shiftLeftOne(a: u32) callconv(.Inline) u32 {
|
||||
inline fn shiftLeftOne(a: u32) u32 {
|
||||
return a << 1;
|
||||
}
|
||||
|
||||
// The pub specifier allows the function to be visible when importing.
|
||||
// Another file can use @import and call sub2
|
||||
pub fn sub2(a: i8, b: i8) i8 { return a - b; }
|
||||
pub fn sub2(a: i8, b: i8) i8 {
|
||||
return a - b;
|
||||
}
|
||||
|
||||
// Function pointers are prefixed with `*const `.
|
||||
const Call2Op = *const fn (a: i8, b: i8) i8;
|
||||
|
||||
@ -12,7 +12,7 @@ pub fn add_explicit(comptime T: type, a: T, b: T) Error!T {
|
||||
return ov[0];
|
||||
}
|
||||
|
||||
const Error = error {
|
||||
const Error = error{
|
||||
Overflow,
|
||||
};
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
const expect = @import("std").testing.expect;
|
||||
|
||||
test "inline for loop" {
|
||||
const nums = [_]i32{2, 4, 6};
|
||||
const nums = [_]i32{ 2, 4, 6 };
|
||||
var sum: usize = 0;
|
||||
inline for (nums) |i| {
|
||||
const T = switch (i) {
|
||||
|
||||
@ -15,7 +15,7 @@ fn getNum(u: U) u32 {
|
||||
return @intFromFloat(num);
|
||||
}
|
||||
return num;
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@ const std = @import("std");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
test "0-terminated sentinel array" {
|
||||
const array = [_:0]u8 {1, 2, 3, 4};
|
||||
const array = [_:0]u8{ 1, 2, 3, 4 };
|
||||
|
||||
try expect(@TypeOf(array) == [4:0]u8);
|
||||
try expect(array.len == 4);
|
||||
@ -11,7 +11,7 @@ test "0-terminated sentinel array" {
|
||||
|
||||
test "extra 0s in 0-terminated sentinel array" {
|
||||
// The sentinel value may appear earlier, but does not influence the compile-time 'len'.
|
||||
const array = [_:0]u8 {1, 0, 0, 4};
|
||||
const array = [_:0]u8{ 1, 0, 0, 4 };
|
||||
|
||||
try expect(@TypeOf(array) == [4:0]u8);
|
||||
try expect(array.len == 4);
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
const std = @import("std");
|
||||
const expect = std.testing.expect;
|
||||
|
||||
const Point = struct {x: i32, y: i32};
|
||||
const Point = struct { x: i32, y: i32 };
|
||||
|
||||
test "anonymous struct literal" {
|
||||
const pt: Point = .{
|
||||
|
||||
@ -13,15 +13,14 @@ const Point2 = packed struct {
|
||||
y: f32,
|
||||
};
|
||||
|
||||
|
||||
// Declare an instance of a struct.
|
||||
const p = Point {
|
||||
const p = Point{
|
||||
.x = 0.12,
|
||||
.y = 0.34,
|
||||
};
|
||||
|
||||
// Maybe we're not ready to fill out some of the fields.
|
||||
var p2 = Point {
|
||||
var p2 = Point{
|
||||
.x = 0.12,
|
||||
.y = undefined,
|
||||
};
|
||||
@ -35,7 +34,7 @@ const Vec3 = struct {
|
||||
z: f32,
|
||||
|
||||
pub fn init(x: f32, y: f32, z: f32) Vec3 {
|
||||
return Vec3 {
|
||||
return Vec3{
|
||||
.x = x,
|
||||
.y = y,
|
||||
.z = z,
|
||||
@ -69,7 +68,7 @@ test "struct namespaced variable" {
|
||||
try expect(@sizeOf(Empty) == 0);
|
||||
|
||||
// you can still instantiate an empty struct
|
||||
const does_nothing = Empty {};
|
||||
const does_nothing = Empty{};
|
||||
|
||||
_ = does_nothing;
|
||||
}
|
||||
@ -81,7 +80,7 @@ fn setYBasedOnX(x: *f32, y: f32) void {
|
||||
point.y = y;
|
||||
}
|
||||
test "field parent pointer" {
|
||||
var point = Point {
|
||||
var point = Point{
|
||||
.x = 0.1234,
|
||||
.y = 0.5678,
|
||||
};
|
||||
@ -100,8 +99,8 @@ fn LinkedList(comptime T: type) type {
|
||||
};
|
||||
|
||||
first: ?*Node,
|
||||
last: ?*Node,
|
||||
len: usize,
|
||||
last: ?*Node,
|
||||
len: usize,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@ -12,8 +12,7 @@ test "switch on non-exhaustive enum" {
|
||||
const number = Number.one;
|
||||
const result = switch (number) {
|
||||
.one => true,
|
||||
.two,
|
||||
.three => false,
|
||||
.two, .three => false,
|
||||
_ => false,
|
||||
};
|
||||
try expect(result);
|
||||
|
||||
@ -5,10 +5,7 @@ test "try to pass a runtime type" {
|
||||
foo(false);
|
||||
}
|
||||
fn foo(condition: bool) void {
|
||||
const result = max(
|
||||
if (condition) f32 else u64,
|
||||
1234,
|
||||
5678);
|
||||
const result = max(if (condition) f32 else u64, 1234, 5678);
|
||||
_ = result;
|
||||
}
|
||||
|
||||
|
||||
@ -9,7 +9,10 @@ test "while loop continue expression" {
|
||||
test "while loop continue expression, more complicated" {
|
||||
var i: usize = 1;
|
||||
var j: usize = 1;
|
||||
while (i * j < 2000) : ({ i *= 2; j *= 3; }) {
|
||||
while (i * j < 2000) : ({
|
||||
i *= 2;
|
||||
j *= 3;
|
||||
}) {
|
||||
const my_ij = i * j;
|
||||
try expect(my_ij < 2000);
|
||||
}
|
||||
|
||||
@ -39,7 +39,9 @@ pub fn main() void {
|
||||
var number_or_error: anyerror!i32 = error.ArgNotFound;
|
||||
|
||||
print("\nerror union 1\ntype: {}\nvalue: {!}\n", .{
|
||||
@TypeOf(number_or_error), number_or_error, });
|
||||
@TypeOf(number_or_error),
|
||||
number_or_error,
|
||||
});
|
||||
|
||||
number_or_error = 1234;
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -89,10 +89,10 @@ pub const CSourceFile = struct {
|
||||
file: LazyPath,
|
||||
flags: []const []const u8 = &.{},
|
||||
|
||||
pub fn dupe(self: CSourceFile, b: *std.Build) CSourceFile {
|
||||
pub fn dupe(file: CSourceFile, b: *std.Build) CSourceFile {
|
||||
return .{
|
||||
.file = self.file.dupe(b),
|
||||
.flags = b.dupeStrings(self.flags),
|
||||
.file = file.file.dupe(b),
|
||||
.flags = b.dupeStrings(file.flags),
|
||||
};
|
||||
}
|
||||
};
|
||||
@ -115,12 +115,12 @@ pub const RcSourceFile = struct {
|
||||
/// as `/I <resolved path>`.
|
||||
include_paths: []const LazyPath = &.{},
|
||||
|
||||
pub fn dupe(self: RcSourceFile, b: *std.Build) RcSourceFile {
|
||||
const include_paths = b.allocator.alloc(LazyPath, self.include_paths.len) catch @panic("OOM");
|
||||
for (include_paths, self.include_paths) |*dest, lazy_path| dest.* = lazy_path.dupe(b);
|
||||
pub fn dupe(file: RcSourceFile, b: *std.Build) RcSourceFile {
|
||||
const include_paths = b.allocator.alloc(LazyPath, file.include_paths.len) catch @panic("OOM");
|
||||
for (include_paths, file.include_paths) |*dest, lazy_path| dest.* = lazy_path.dupe(b);
|
||||
return .{
|
||||
.file = self.file.dupe(b),
|
||||
.flags = b.dupeStrings(self.flags),
|
||||
.file = file.file.dupe(b),
|
||||
.flags = b.dupeStrings(file.flags),
|
||||
.include_paths = include_paths,
|
||||
};
|
||||
}
|
||||
@ -665,24 +665,19 @@ pub fn appendZigProcessFlags(
|
||||
for (m.include_dirs.items) |include_dir| {
|
||||
switch (include_dir) {
|
||||
.path => |include_path| {
|
||||
try zig_args.append("-I");
|
||||
try zig_args.append(include_path.getPath(b));
|
||||
try zig_args.appendSlice(&.{ "-I", include_path.getPath2(b, asking_step) });
|
||||
},
|
||||
.path_system => |include_path| {
|
||||
try zig_args.append("-isystem");
|
||||
try zig_args.append(include_path.getPath(b));
|
||||
try zig_args.appendSlice(&.{ "-isystem", include_path.getPath2(b, asking_step) });
|
||||
},
|
||||
.path_after => |include_path| {
|
||||
try zig_args.append("-idirafter");
|
||||
try zig_args.append(include_path.getPath(b));
|
||||
try zig_args.appendSlice(&.{ "-idirafter", include_path.getPath2(b, asking_step) });
|
||||
},
|
||||
.framework_path => |include_path| {
|
||||
try zig_args.append("-F");
|
||||
try zig_args.append(include_path.getPath2(b, asking_step));
|
||||
try zig_args.appendSlice(&.{ "-F", include_path.getPath2(b, asking_step) });
|
||||
},
|
||||
.framework_path_system => |include_path| {
|
||||
try zig_args.append("-iframework");
|
||||
try zig_args.append(include_path.getPath2(b, asking_step));
|
||||
try zig_args.appendSlice(&.{ "-iframework", include_path.getPath2(b, asking_step) });
|
||||
},
|
||||
.other_step => |other| {
|
||||
if (other.generated_h) |header| {
|
||||
|
||||
@ -58,7 +58,7 @@ pub const TestResults = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub const MakeFn = *const fn (self: *Step, prog_node: *std.Progress.Node) anyerror!void;
|
||||
pub const MakeFn = *const fn (step: *Step, prog_node: *std.Progress.Node) anyerror!void;
|
||||
|
||||
pub const State = enum {
|
||||
precheck_unstarted,
|
||||
@ -201,8 +201,8 @@ pub fn make(s: *Step, prog_node: *std.Progress.Node) error{ MakeFailed, MakeSkip
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dependOn(self: *Step, other: *Step) void {
|
||||
self.dependencies.append(other) catch @panic("OOM");
|
||||
pub fn dependOn(step: *Step, other: *Step) void {
|
||||
step.dependencies.append(other) catch @panic("OOM");
|
||||
}
|
||||
|
||||
pub fn getStackTrace(s: *Step) ?std.builtin.StackTrace {
|
||||
|
||||
@ -14,7 +14,7 @@ expected_exact: ?[]const u8,
|
||||
source: std.Build.LazyPath,
|
||||
max_bytes: usize = 20 * 1024 * 1024,
|
||||
|
||||
pub const base_id = .check_file;
|
||||
pub const base_id: Step.Id = .check_file;
|
||||
|
||||
pub const Options = struct {
|
||||
expected_matches: []const []const u8 = &.{},
|
||||
@ -26,10 +26,10 @@ pub fn create(
|
||||
source: std.Build.LazyPath,
|
||||
options: Options,
|
||||
) *CheckFile {
|
||||
const self = owner.allocator.create(CheckFile) catch @panic("OOM");
|
||||
self.* = .{
|
||||
const check_file = owner.allocator.create(CheckFile) catch @panic("OOM");
|
||||
check_file.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = .check_file,
|
||||
.id = base_id,
|
||||
.name = "CheckFile",
|
||||
.owner = owner,
|
||||
.makeFn = make,
|
||||
@ -38,27 +38,27 @@ pub fn create(
|
||||
.expected_matches = owner.dupeStrings(options.expected_matches),
|
||||
.expected_exact = options.expected_exact,
|
||||
};
|
||||
self.source.addStepDependencies(&self.step);
|
||||
return self;
|
||||
check_file.source.addStepDependencies(&check_file.step);
|
||||
return check_file;
|
||||
}
|
||||
|
||||
pub fn setName(self: *CheckFile, name: []const u8) void {
|
||||
self.step.name = name;
|
||||
pub fn setName(check_file: *CheckFile, name: []const u8) void {
|
||||
check_file.step.name = name;
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const self: *CheckFile = @fieldParentPtr("step", step);
|
||||
const check_file: *CheckFile = @fieldParentPtr("step", step);
|
||||
|
||||
const src_path = self.source.getPath(b);
|
||||
const contents = fs.cwd().readFileAlloc(b.allocator, src_path, self.max_bytes) catch |err| {
|
||||
const src_path = check_file.source.getPath2(b, step);
|
||||
const contents = fs.cwd().readFileAlloc(b.allocator, src_path, check_file.max_bytes) catch |err| {
|
||||
return step.fail("unable to read '{s}': {s}", .{
|
||||
src_path, @errorName(err),
|
||||
});
|
||||
};
|
||||
|
||||
for (self.expected_matches) |expected_match| {
|
||||
for (check_file.expected_matches) |expected_match| {
|
||||
if (mem.indexOf(u8, contents, expected_match) == null) {
|
||||
return step.fail(
|
||||
\\
|
||||
@ -71,7 +71,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
}
|
||||
}
|
||||
|
||||
if (self.expected_exact) |expected_exact| {
|
||||
if (check_file.expected_exact) |expected_exact| {
|
||||
if (!mem.eql(u8, expected_exact, contents)) {
|
||||
return step.fail(
|
||||
\\
|
||||
|
||||
@ -12,7 +12,7 @@ const CheckObject = @This();
|
||||
const Allocator = mem.Allocator;
|
||||
const Step = std.Build.Step;
|
||||
|
||||
pub const base_id = .check_object;
|
||||
pub const base_id: Step.Id = .check_object;
|
||||
|
||||
step: Step,
|
||||
source: std.Build.LazyPath,
|
||||
@ -26,10 +26,10 @@ pub fn create(
|
||||
obj_format: std.Target.ObjectFormat,
|
||||
) *CheckObject {
|
||||
const gpa = owner.allocator;
|
||||
const self = gpa.create(CheckObject) catch @panic("OOM");
|
||||
self.* = .{
|
||||
const check_object = gpa.create(CheckObject) catch @panic("OOM");
|
||||
check_object.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = .check_file,
|
||||
.id = base_id,
|
||||
.name = "CheckObject",
|
||||
.owner = owner,
|
||||
.makeFn = make,
|
||||
@ -38,8 +38,8 @@ pub fn create(
|
||||
.checks = std.ArrayList(Check).init(gpa),
|
||||
.obj_format = obj_format,
|
||||
};
|
||||
self.source.addStepDependencies(&self.step);
|
||||
return self;
|
||||
check_object.source.addStepDependencies(&check_object.step);
|
||||
return check_object;
|
||||
}
|
||||
|
||||
const SearchPhrase = struct {
|
||||
@ -268,36 +268,36 @@ const Check = struct {
|
||||
return check;
|
||||
}
|
||||
|
||||
fn extract(self: *Check, phrase: SearchPhrase) void {
|
||||
self.actions.append(.{
|
||||
fn extract(check: *Check, phrase: SearchPhrase) void {
|
||||
check.actions.append(.{
|
||||
.tag = .extract,
|
||||
.phrase = phrase,
|
||||
}) catch @panic("OOM");
|
||||
}
|
||||
|
||||
fn exact(self: *Check, phrase: SearchPhrase) void {
|
||||
self.actions.append(.{
|
||||
fn exact(check: *Check, phrase: SearchPhrase) void {
|
||||
check.actions.append(.{
|
||||
.tag = .exact,
|
||||
.phrase = phrase,
|
||||
}) catch @panic("OOM");
|
||||
}
|
||||
|
||||
fn contains(self: *Check, phrase: SearchPhrase) void {
|
||||
self.actions.append(.{
|
||||
fn contains(check: *Check, phrase: SearchPhrase) void {
|
||||
check.actions.append(.{
|
||||
.tag = .contains,
|
||||
.phrase = phrase,
|
||||
}) catch @panic("OOM");
|
||||
}
|
||||
|
||||
fn notPresent(self: *Check, phrase: SearchPhrase) void {
|
||||
self.actions.append(.{
|
||||
fn notPresent(check: *Check, phrase: SearchPhrase) void {
|
||||
check.actions.append(.{
|
||||
.tag = .not_present,
|
||||
.phrase = phrase,
|
||||
}) catch @panic("OOM");
|
||||
}
|
||||
|
||||
fn computeCmp(self: *Check, phrase: SearchPhrase, expected: ComputeCompareExpected) void {
|
||||
self.actions.append(.{
|
||||
fn computeCmp(check: *Check, phrase: SearchPhrase, expected: ComputeCompareExpected) void {
|
||||
check.actions.append(.{
|
||||
.tag = .compute_cmp,
|
||||
.phrase = phrase,
|
||||
.expected = expected,
|
||||
@ -328,246 +328,246 @@ const Check = struct {
|
||||
};
|
||||
|
||||
/// Creates a new empty sequence of actions.
|
||||
fn checkStart(self: *CheckObject, kind: Check.Kind) void {
|
||||
const new_check = Check.create(self.step.owner.allocator, kind);
|
||||
self.checks.append(new_check) catch @panic("OOM");
|
||||
fn checkStart(check_object: *CheckObject, kind: Check.Kind) void {
|
||||
const check = Check.create(check_object.step.owner.allocator, kind);
|
||||
check_object.checks.append(check) catch @panic("OOM");
|
||||
}
|
||||
|
||||
/// Adds an exact match phrase to the latest created Check.
|
||||
pub fn checkExact(self: *CheckObject, phrase: []const u8) void {
|
||||
self.checkExactInner(phrase, null);
|
||||
pub fn checkExact(check_object: *CheckObject, phrase: []const u8) void {
|
||||
check_object.checkExactInner(phrase, null);
|
||||
}
|
||||
|
||||
/// Like `checkExact()` but takes an additional argument `LazyPath` which will be
|
||||
/// resolved to a full search query in `make()`.
|
||||
pub fn checkExactPath(self: *CheckObject, phrase: []const u8, lazy_path: std.Build.LazyPath) void {
|
||||
self.checkExactInner(phrase, lazy_path);
|
||||
pub fn checkExactPath(check_object: *CheckObject, phrase: []const u8, lazy_path: std.Build.LazyPath) void {
|
||||
check_object.checkExactInner(phrase, lazy_path);
|
||||
}
|
||||
|
||||
fn checkExactInner(self: *CheckObject, phrase: []const u8, lazy_path: ?std.Build.LazyPath) void {
|
||||
assert(self.checks.items.len > 0);
|
||||
const last = &self.checks.items[self.checks.items.len - 1];
|
||||
last.exact(.{ .string = self.step.owner.dupe(phrase), .lazy_path = lazy_path });
|
||||
fn checkExactInner(check_object: *CheckObject, phrase: []const u8, lazy_path: ?std.Build.LazyPath) void {
|
||||
assert(check_object.checks.items.len > 0);
|
||||
const last = &check_object.checks.items[check_object.checks.items.len - 1];
|
||||
last.exact(.{ .string = check_object.step.owner.dupe(phrase), .lazy_path = lazy_path });
|
||||
}
|
||||
|
||||
/// Adds a fuzzy match phrase to the latest created Check.
|
||||
pub fn checkContains(self: *CheckObject, phrase: []const u8) void {
|
||||
self.checkContainsInner(phrase, null);
|
||||
pub fn checkContains(check_object: *CheckObject, phrase: []const u8) void {
|
||||
check_object.checkContainsInner(phrase, null);
|
||||
}
|
||||
|
||||
/// Like `checkContains()` but takes an additional argument `lazy_path` which will be
|
||||
/// resolved to a full search query in `make()`.
|
||||
pub fn checkContainsPath(
|
||||
self: *CheckObject,
|
||||
check_object: *CheckObject,
|
||||
phrase: []const u8,
|
||||
lazy_path: std.Build.LazyPath,
|
||||
) void {
|
||||
self.checkContainsInner(phrase, lazy_path);
|
||||
check_object.checkContainsInner(phrase, lazy_path);
|
||||
}
|
||||
|
||||
fn checkContainsInner(self: *CheckObject, phrase: []const u8, lazy_path: ?std.Build.LazyPath) void {
|
||||
assert(self.checks.items.len > 0);
|
||||
const last = &self.checks.items[self.checks.items.len - 1];
|
||||
last.contains(.{ .string = self.step.owner.dupe(phrase), .lazy_path = lazy_path });
|
||||
fn checkContainsInner(check_object: *CheckObject, phrase: []const u8, lazy_path: ?std.Build.LazyPath) void {
|
||||
assert(check_object.checks.items.len > 0);
|
||||
const last = &check_object.checks.items[check_object.checks.items.len - 1];
|
||||
last.contains(.{ .string = check_object.step.owner.dupe(phrase), .lazy_path = lazy_path });
|
||||
}
|
||||
|
||||
/// Adds an exact match phrase with variable extractor to the latest created Check.
|
||||
pub fn checkExtract(self: *CheckObject, phrase: []const u8) void {
|
||||
self.checkExtractInner(phrase, null);
|
||||
pub fn checkExtract(check_object: *CheckObject, phrase: []const u8) void {
|
||||
check_object.checkExtractInner(phrase, null);
|
||||
}
|
||||
|
||||
/// Like `checkExtract()` but takes an additional argument `LazyPath` which will be
|
||||
/// resolved to a full search query in `make()`.
|
||||
pub fn checkExtractLazyPath(self: *CheckObject, phrase: []const u8, lazy_path: std.Build.LazyPath) void {
|
||||
self.checkExtractInner(phrase, lazy_path);
|
||||
pub fn checkExtractLazyPath(check_object: *CheckObject, phrase: []const u8, lazy_path: std.Build.LazyPath) void {
|
||||
check_object.checkExtractInner(phrase, lazy_path);
|
||||
}
|
||||
|
||||
fn checkExtractInner(self: *CheckObject, phrase: []const u8, lazy_path: ?std.Build.LazyPath) void {
|
||||
assert(self.checks.items.len > 0);
|
||||
const last = &self.checks.items[self.checks.items.len - 1];
|
||||
last.extract(.{ .string = self.step.owner.dupe(phrase), .lazy_path = lazy_path });
|
||||
fn checkExtractInner(check_object: *CheckObject, phrase: []const u8, lazy_path: ?std.Build.LazyPath) void {
|
||||
assert(check_object.checks.items.len > 0);
|
||||
const last = &check_object.checks.items[check_object.checks.items.len - 1];
|
||||
last.extract(.{ .string = check_object.step.owner.dupe(phrase), .lazy_path = lazy_path });
|
||||
}
|
||||
|
||||
/// Adds another searched phrase to the latest created Check
|
||||
/// however ensures there is no matching phrase in the output.
|
||||
pub fn checkNotPresent(self: *CheckObject, phrase: []const u8) void {
|
||||
self.checkNotPresentInner(phrase, null);
|
||||
pub fn checkNotPresent(check_object: *CheckObject, phrase: []const u8) void {
|
||||
check_object.checkNotPresentInner(phrase, null);
|
||||
}
|
||||
|
||||
/// Like `checkExtract()` but takes an additional argument `LazyPath` which will be
|
||||
/// resolved to a full search query in `make()`.
|
||||
pub fn checkNotPresentLazyPath(self: *CheckObject, phrase: []const u8, lazy_path: std.Build.LazyPath) void {
|
||||
self.checkNotPresentInner(phrase, lazy_path);
|
||||
pub fn checkNotPresentLazyPath(check_object: *CheckObject, phrase: []const u8, lazy_path: std.Build.LazyPath) void {
|
||||
check_object.checkNotPresentInner(phrase, lazy_path);
|
||||
}
|
||||
|
||||
fn checkNotPresentInner(self: *CheckObject, phrase: []const u8, lazy_path: ?std.Build.LazyPath) void {
|
||||
assert(self.checks.items.len > 0);
|
||||
const last = &self.checks.items[self.checks.items.len - 1];
|
||||
last.notPresent(.{ .string = self.step.owner.dupe(phrase), .lazy_path = lazy_path });
|
||||
fn checkNotPresentInner(check_object: *CheckObject, phrase: []const u8, lazy_path: ?std.Build.LazyPath) void {
|
||||
assert(check_object.checks.items.len > 0);
|
||||
const last = &check_object.checks.items[check_object.checks.items.len - 1];
|
||||
last.notPresent(.{ .string = check_object.step.owner.dupe(phrase), .lazy_path = lazy_path });
|
||||
}
|
||||
|
||||
/// Creates a new check checking in the file headers (section, program headers, etc.).
|
||||
pub fn checkInHeaders(self: *CheckObject) void {
|
||||
self.checkStart(.headers);
|
||||
pub fn checkInHeaders(check_object: *CheckObject) void {
|
||||
check_object.checkStart(.headers);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically symbol table parsed and dumped from the object
|
||||
/// file.
|
||||
pub fn checkInSymtab(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInSymtab(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.macho => MachODumper.symtab_label,
|
||||
.elf => ElfDumper.symtab_label,
|
||||
.wasm => WasmDumper.symtab_label,
|
||||
.coff => @panic("TODO symtab for coff"),
|
||||
else => @panic("TODO other file formats"),
|
||||
};
|
||||
self.checkStart(.symtab);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.symtab);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically dyld rebase opcodes contents parsed and dumped
|
||||
/// from the object file.
|
||||
/// This check is target-dependent and applicable to MachO only.
|
||||
pub fn checkInDyldRebase(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInDyldRebase(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.macho => MachODumper.dyld_rebase_label,
|
||||
else => @panic("Unsupported target platform"),
|
||||
};
|
||||
self.checkStart(.dyld_rebase);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.dyld_rebase);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically dyld bind opcodes contents parsed and dumped
|
||||
/// from the object file.
|
||||
/// This check is target-dependent and applicable to MachO only.
|
||||
pub fn checkInDyldBind(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInDyldBind(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.macho => MachODumper.dyld_bind_label,
|
||||
else => @panic("Unsupported target platform"),
|
||||
};
|
||||
self.checkStart(.dyld_bind);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.dyld_bind);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically dyld weak bind opcodes contents parsed and dumped
|
||||
/// from the object file.
|
||||
/// This check is target-dependent and applicable to MachO only.
|
||||
pub fn checkInDyldWeakBind(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInDyldWeakBind(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.macho => MachODumper.dyld_weak_bind_label,
|
||||
else => @panic("Unsupported target platform"),
|
||||
};
|
||||
self.checkStart(.dyld_weak_bind);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.dyld_weak_bind);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically dyld lazy bind opcodes contents parsed and dumped
|
||||
/// from the object file.
|
||||
/// This check is target-dependent and applicable to MachO only.
|
||||
pub fn checkInDyldLazyBind(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInDyldLazyBind(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.macho => MachODumper.dyld_lazy_bind_label,
|
||||
else => @panic("Unsupported target platform"),
|
||||
};
|
||||
self.checkStart(.dyld_lazy_bind);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.dyld_lazy_bind);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically exports info contents parsed and dumped
|
||||
/// from the object file.
|
||||
/// This check is target-dependent and applicable to MachO only.
|
||||
pub fn checkInExports(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInExports(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.macho => MachODumper.exports_label,
|
||||
else => @panic("Unsupported target platform"),
|
||||
};
|
||||
self.checkStart(.exports);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.exports);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically indirect symbol table parsed and dumped
|
||||
/// from the object file.
|
||||
/// This check is target-dependent and applicable to MachO only.
|
||||
pub fn checkInIndirectSymtab(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInIndirectSymtab(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.macho => MachODumper.indirect_symtab_label,
|
||||
else => @panic("Unsupported target platform"),
|
||||
};
|
||||
self.checkStart(.indirect_symtab);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.indirect_symtab);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically dynamic symbol table parsed and dumped from the object
|
||||
/// file.
|
||||
/// This check is target-dependent and applicable to ELF only.
|
||||
pub fn checkInDynamicSymtab(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInDynamicSymtab(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.elf => ElfDumper.dynamic_symtab_label,
|
||||
else => @panic("Unsupported target platform"),
|
||||
};
|
||||
self.checkStart(.dynamic_symtab);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.dynamic_symtab);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically dynamic section parsed and dumped from the object
|
||||
/// file.
|
||||
/// This check is target-dependent and applicable to ELF only.
|
||||
pub fn checkInDynamicSection(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInDynamicSection(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.elf => ElfDumper.dynamic_section_label,
|
||||
else => @panic("Unsupported target platform"),
|
||||
};
|
||||
self.checkStart(.dynamic_section);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.dynamic_section);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
/// Creates a new check checking specifically symbol table parsed and dumped from the archive
|
||||
/// file.
|
||||
pub fn checkInArchiveSymtab(self: *CheckObject) void {
|
||||
const label = switch (self.obj_format) {
|
||||
pub fn checkInArchiveSymtab(check_object: *CheckObject) void {
|
||||
const label = switch (check_object.obj_format) {
|
||||
.elf => ElfDumper.archive_symtab_label,
|
||||
else => @panic("TODO other file formats"),
|
||||
};
|
||||
self.checkStart(.archive_symtab);
|
||||
self.checkExact(label);
|
||||
check_object.checkStart(.archive_symtab);
|
||||
check_object.checkExact(label);
|
||||
}
|
||||
|
||||
pub fn dumpSection(self: *CheckObject, name: [:0]const u8) void {
|
||||
const new_check = Check.dumpSection(self.step.owner.allocator, name);
|
||||
self.checks.append(new_check) catch @panic("OOM");
|
||||
pub fn dumpSection(check_object: *CheckObject, name: [:0]const u8) void {
|
||||
const check = Check.dumpSection(check_object.step.owner.allocator, name);
|
||||
check_object.checks.append(check) catch @panic("OOM");
|
||||
}
|
||||
|
||||
/// Creates a new standalone, singular check which allows running simple binary operations
|
||||
/// on the extracted variables. It will then compare the reduced program with the value of
|
||||
/// the expected variable.
|
||||
pub fn checkComputeCompare(
|
||||
self: *CheckObject,
|
||||
check_object: *CheckObject,
|
||||
program: []const u8,
|
||||
expected: ComputeCompareExpected,
|
||||
) void {
|
||||
var new_check = Check.create(self.step.owner.allocator, .compute_compare);
|
||||
new_check.computeCmp(.{ .string = self.step.owner.dupe(program) }, expected);
|
||||
self.checks.append(new_check) catch @panic("OOM");
|
||||
var check = Check.create(check_object.step.owner.allocator, .compute_compare);
|
||||
check.computeCmp(.{ .string = check_object.step.owner.dupe(program) }, expected);
|
||||
check_object.checks.append(check) catch @panic("OOM");
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const gpa = b.allocator;
|
||||
const self: *CheckObject = @fieldParentPtr("step", step);
|
||||
const check_object: *CheckObject = @fieldParentPtr("step", step);
|
||||
|
||||
const src_path = self.source.getPath(b);
|
||||
const src_path = check_object.source.getPath2(b, step);
|
||||
const contents = fs.cwd().readFileAllocOptions(
|
||||
gpa,
|
||||
src_path,
|
||||
self.max_bytes,
|
||||
check_object.max_bytes,
|
||||
null,
|
||||
@alignOf(u64),
|
||||
null,
|
||||
) catch |err| return step.fail("unable to read '{s}': {s}", .{ src_path, @errorName(err) });
|
||||
|
||||
var vars = std.StringHashMap(u64).init(gpa);
|
||||
for (self.checks.items) |chk| {
|
||||
for (check_object.checks.items) |chk| {
|
||||
if (chk.kind == .compute_compare) {
|
||||
assert(chk.actions.items.len == 1);
|
||||
const act = chk.actions.items[0];
|
||||
@ -587,7 +587,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
continue;
|
||||
}
|
||||
|
||||
const output = switch (self.obj_format) {
|
||||
const output = switch (check_object.obj_format) {
|
||||
.macho => try MachODumper.parseAndDump(step, chk, contents),
|
||||
.elf => try ElfDumper.parseAndDump(step, chk, contents),
|
||||
.coff => return step.fail("TODO coff parser", .{}),
|
||||
@ -1597,8 +1597,8 @@ const MachODumper = struct {
|
||||
},
|
||||
},
|
||||
|
||||
inline fn rankByTag(self: Export) u3 {
|
||||
return switch (self.tag) {
|
||||
inline fn rankByTag(@"export": Export) u3 {
|
||||
return switch (@"export".tag) {
|
||||
.@"export" => 1,
|
||||
.reexport => 2,
|
||||
.stub_resolver => 3,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -52,15 +52,14 @@ pub const Options = struct {
|
||||
};
|
||||
|
||||
pub fn create(owner: *std.Build, options: Options) *ConfigHeader {
|
||||
const self = owner.allocator.create(ConfigHeader) catch @panic("OOM");
|
||||
const config_header = owner.allocator.create(ConfigHeader) catch @panic("OOM");
|
||||
|
||||
var include_path: []const u8 = "config.h";
|
||||
|
||||
if (options.style.getPath()) |s| default_include_path: {
|
||||
const sub_path = switch (s) {
|
||||
.src_path => |sp| sp.sub_path,
|
||||
.path => |path| path,
|
||||
.generated, .generated_dirname => break :default_include_path,
|
||||
.generated => break :default_include_path,
|
||||
.cwd_relative => |sub_path| sub_path,
|
||||
.dependency => |dependency| dependency.sub_path,
|
||||
};
|
||||
@ -81,7 +80,7 @@ pub fn create(owner: *std.Build, options: Options) *ConfigHeader {
|
||||
else
|
||||
owner.fmt("configure {s} header to {s}", .{ @tagName(options.style), include_path });
|
||||
|
||||
self.* = .{
|
||||
config_header.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = base_id,
|
||||
.name = name,
|
||||
@ -95,64 +94,64 @@ pub fn create(owner: *std.Build, options: Options) *ConfigHeader {
|
||||
.max_bytes = options.max_bytes,
|
||||
.include_path = include_path,
|
||||
.include_guard_override = options.include_guard_override,
|
||||
.output_file = .{ .step = &self.step },
|
||||
.output_file = .{ .step = &config_header.step },
|
||||
};
|
||||
|
||||
return self;
|
||||
return config_header;
|
||||
}
|
||||
|
||||
pub fn addValues(self: *ConfigHeader, values: anytype) void {
|
||||
return addValuesInner(self, values) catch @panic("OOM");
|
||||
pub fn addValues(config_header: *ConfigHeader, values: anytype) void {
|
||||
return addValuesInner(config_header, values) catch @panic("OOM");
|
||||
}
|
||||
|
||||
pub fn getOutput(self: *ConfigHeader) std.Build.LazyPath {
|
||||
return .{ .generated = &self.output_file };
|
||||
pub fn getOutput(config_header: *ConfigHeader) std.Build.LazyPath {
|
||||
return .{ .generated = .{ .file = &config_header.output_file } };
|
||||
}
|
||||
|
||||
fn addValuesInner(self: *ConfigHeader, values: anytype) !void {
|
||||
fn addValuesInner(config_header: *ConfigHeader, values: anytype) !void {
|
||||
inline for (@typeInfo(@TypeOf(values)).Struct.fields) |field| {
|
||||
try putValue(self, field.name, field.type, @field(values, field.name));
|
||||
try putValue(config_header, field.name, field.type, @field(values, field.name));
|
||||
}
|
||||
}
|
||||
|
||||
fn putValue(self: *ConfigHeader, field_name: []const u8, comptime T: type, v: T) !void {
|
||||
fn putValue(config_header: *ConfigHeader, field_name: []const u8, comptime T: type, v: T) !void {
|
||||
switch (@typeInfo(T)) {
|
||||
.Null => {
|
||||
try self.values.put(field_name, .undef);
|
||||
try config_header.values.put(field_name, .undef);
|
||||
},
|
||||
.Void => {
|
||||
try self.values.put(field_name, .defined);
|
||||
try config_header.values.put(field_name, .defined);
|
||||
},
|
||||
.Bool => {
|
||||
try self.values.put(field_name, .{ .boolean = v });
|
||||
try config_header.values.put(field_name, .{ .boolean = v });
|
||||
},
|
||||
.Int => {
|
||||
try self.values.put(field_name, .{ .int = v });
|
||||
try config_header.values.put(field_name, .{ .int = v });
|
||||
},
|
||||
.ComptimeInt => {
|
||||
try self.values.put(field_name, .{ .int = v });
|
||||
try config_header.values.put(field_name, .{ .int = v });
|
||||
},
|
||||
.EnumLiteral => {
|
||||
try self.values.put(field_name, .{ .ident = @tagName(v) });
|
||||
try config_header.values.put(field_name, .{ .ident = @tagName(v) });
|
||||
},
|
||||
.Optional => {
|
||||
if (v) |x| {
|
||||
return putValue(self, field_name, @TypeOf(x), x);
|
||||
return putValue(config_header, field_name, @TypeOf(x), x);
|
||||
} else {
|
||||
try self.values.put(field_name, .undef);
|
||||
try config_header.values.put(field_name, .undef);
|
||||
}
|
||||
},
|
||||
.Pointer => |ptr| {
|
||||
switch (@typeInfo(ptr.child)) {
|
||||
.Array => |array| {
|
||||
if (ptr.size == .One and array.child == u8) {
|
||||
try self.values.put(field_name, .{ .string = v });
|
||||
try config_header.values.put(field_name, .{ .string = v });
|
||||
return;
|
||||
}
|
||||
},
|
||||
.Int => {
|
||||
if (ptr.size == .Slice and ptr.child == u8) {
|
||||
try self.values.put(field_name, .{ .string = v });
|
||||
try config_header.values.put(field_name, .{ .string = v });
|
||||
return;
|
||||
}
|
||||
},
|
||||
@ -168,7 +167,7 @@ fn putValue(self: *ConfigHeader, field_name: []const u8, comptime T: type, v: T)
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const self: *ConfigHeader = @fieldParentPtr("step", step);
|
||||
const config_header: *ConfigHeader = @fieldParentPtr("step", step);
|
||||
const gpa = b.allocator;
|
||||
const arena = b.allocator;
|
||||
|
||||
@ -179,8 +178,8 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
// random bytes when ConfigHeader implementation is modified in a
|
||||
// non-backwards-compatible way.
|
||||
man.hash.add(@as(u32, 0xdef08d23));
|
||||
man.hash.addBytes(self.include_path);
|
||||
man.hash.addOptionalBytes(self.include_guard_override);
|
||||
man.hash.addBytes(config_header.include_path);
|
||||
man.hash.addOptionalBytes(config_header.include_guard_override);
|
||||
|
||||
var output = std.ArrayList(u8).init(gpa);
|
||||
defer output.deinit();
|
||||
@ -189,34 +188,34 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
const c_generated_line = "/* " ++ header_text ++ " */\n";
|
||||
const asm_generated_line = "; " ++ header_text ++ "\n";
|
||||
|
||||
switch (self.style) {
|
||||
switch (config_header.style) {
|
||||
.autoconf => |file_source| {
|
||||
try output.appendSlice(c_generated_line);
|
||||
const src_path = file_source.getPath(b);
|
||||
const contents = std.fs.cwd().readFileAlloc(arena, src_path, self.max_bytes) catch |err| {
|
||||
const src_path = file_source.getPath2(b, step);
|
||||
const contents = std.fs.cwd().readFileAlloc(arena, src_path, config_header.max_bytes) catch |err| {
|
||||
return step.fail("unable to read autoconf input file '{s}': {s}", .{
|
||||
src_path, @errorName(err),
|
||||
});
|
||||
};
|
||||
try render_autoconf(step, contents, &output, self.values, src_path);
|
||||
try render_autoconf(step, contents, &output, config_header.values, src_path);
|
||||
},
|
||||
.cmake => |file_source| {
|
||||
try output.appendSlice(c_generated_line);
|
||||
const src_path = file_source.getPath(b);
|
||||
const contents = std.fs.cwd().readFileAlloc(arena, src_path, self.max_bytes) catch |err| {
|
||||
const src_path = file_source.getPath2(b, step);
|
||||
const contents = std.fs.cwd().readFileAlloc(arena, src_path, config_header.max_bytes) catch |err| {
|
||||
return step.fail("unable to read cmake input file '{s}': {s}", .{
|
||||
src_path, @errorName(err),
|
||||
});
|
||||
};
|
||||
try render_cmake(step, contents, &output, self.values, src_path);
|
||||
try render_cmake(step, contents, &output, config_header.values, src_path);
|
||||
},
|
||||
.blank => {
|
||||
try output.appendSlice(c_generated_line);
|
||||
try render_blank(&output, self.values, self.include_path, self.include_guard_override);
|
||||
try render_blank(&output, config_header.values, config_header.include_path, config_header.include_guard_override);
|
||||
},
|
||||
.nasm => {
|
||||
try output.appendSlice(asm_generated_line);
|
||||
try render_nasm(&output, self.values);
|
||||
try render_nasm(&output, config_header.values);
|
||||
},
|
||||
}
|
||||
|
||||
@ -224,8 +223,8 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
|
||||
if (try step.cacheHit(&man)) {
|
||||
const digest = man.final();
|
||||
self.output_file.path = try b.cache_root.join(arena, &.{
|
||||
"o", &digest, self.include_path,
|
||||
config_header.output_file.path = try b.cache_root.join(arena, &.{
|
||||
"o", &digest, config_header.include_path,
|
||||
});
|
||||
return;
|
||||
}
|
||||
@ -237,7 +236,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
// output_path is libavutil/avconfig.h
|
||||
// We want to open directory zig-cache/o/HASH/libavutil/
|
||||
// but keep output_dir as zig-cache/o/HASH for -I include
|
||||
const sub_path = try std.fs.path.join(arena, &.{ "o", &digest, self.include_path });
|
||||
const sub_path = b.pathJoin(&.{ "o", &digest, config_header.include_path });
|
||||
const sub_path_dirname = std.fs.path.dirname(sub_path).?;
|
||||
|
||||
b.cache_root.handle.makePath(sub_path_dirname) catch |err| {
|
||||
@ -252,7 +251,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
});
|
||||
};
|
||||
|
||||
self.output_file.path = try b.cache_root.join(arena, &.{sub_path});
|
||||
config_header.output_file.path = try b.cache_root.join(arena, &.{sub_path});
|
||||
try man.writeManifest();
|
||||
}
|
||||
|
||||
|
||||
@ -10,7 +10,7 @@ paths: []const []const u8,
|
||||
exclude_paths: []const []const u8,
|
||||
check: bool,
|
||||
|
||||
pub const base_id = .fmt;
|
||||
pub const base_id: Step.Id = .fmt;
|
||||
|
||||
pub const Options = struct {
|
||||
paths: []const []const u8 = &.{},
|
||||
@ -20,9 +20,9 @@ pub const Options = struct {
|
||||
};
|
||||
|
||||
pub fn create(owner: *std.Build, options: Options) *Fmt {
|
||||
const self = owner.allocator.create(Fmt) catch @panic("OOM");
|
||||
const fmt = owner.allocator.create(Fmt) catch @panic("OOM");
|
||||
const name = if (options.check) "zig fmt --check" else "zig fmt";
|
||||
self.* = .{
|
||||
fmt.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = base_id,
|
||||
.name = name,
|
||||
@ -33,7 +33,7 @@ pub fn create(owner: *std.Build, options: Options) *Fmt {
|
||||
.exclude_paths = owner.dupeStrings(options.exclude_paths),
|
||||
.check = options.check,
|
||||
};
|
||||
return self;
|
||||
return fmt;
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
@ -47,23 +47,23 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
|
||||
const b = step.owner;
|
||||
const arena = b.allocator;
|
||||
const self: *Fmt = @fieldParentPtr("step", step);
|
||||
const fmt: *Fmt = @fieldParentPtr("step", step);
|
||||
|
||||
var argv: std.ArrayListUnmanaged([]const u8) = .{};
|
||||
try argv.ensureUnusedCapacity(arena, 2 + 1 + self.paths.len + 2 * self.exclude_paths.len);
|
||||
try argv.ensureUnusedCapacity(arena, 2 + 1 + fmt.paths.len + 2 * fmt.exclude_paths.len);
|
||||
|
||||
argv.appendAssumeCapacity(b.graph.zig_exe);
|
||||
argv.appendAssumeCapacity("fmt");
|
||||
|
||||
if (self.check) {
|
||||
if (fmt.check) {
|
||||
argv.appendAssumeCapacity("--check");
|
||||
}
|
||||
|
||||
for (self.paths) |p| {
|
||||
for (fmt.paths) |p| {
|
||||
argv.appendAssumeCapacity(b.pathFromRoot(p));
|
||||
}
|
||||
|
||||
for (self.exclude_paths) |p| {
|
||||
for (fmt.exclude_paths) |p| {
|
||||
argv.appendAssumeCapacity("--exclude");
|
||||
argv.appendAssumeCapacity(b.pathFromRoot(p));
|
||||
}
|
||||
|
||||
@ -29,7 +29,7 @@ const DylibSymlinkInfo = struct {
|
||||
name_only_filename: []const u8,
|
||||
};
|
||||
|
||||
pub const base_id = .install_artifact;
|
||||
pub const base_id: Step.Id = .install_artifact;
|
||||
|
||||
pub const Options = struct {
|
||||
/// Which installation directory to put the main output file into.
|
||||
@ -52,7 +52,7 @@ pub const Options = struct {
|
||||
};
|
||||
|
||||
pub fn create(owner: *std.Build, artifact: *Step.Compile, options: Options) *InstallArtifact {
|
||||
const self = owner.allocator.create(InstallArtifact) catch @panic("OOM");
|
||||
const install_artifact = owner.allocator.create(InstallArtifact) catch @panic("OOM");
|
||||
const dest_dir: ?InstallDir = switch (options.dest_dir) {
|
||||
.disabled => null,
|
||||
.default => switch (artifact.kind) {
|
||||
@ -62,7 +62,7 @@ pub fn create(owner: *std.Build, artifact: *Step.Compile, options: Options) *Ins
|
||||
},
|
||||
.override => |o| o,
|
||||
};
|
||||
self.* = .{
|
||||
install_artifact.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = base_id,
|
||||
.name = owner.fmt("install {s}", .{artifact.name}),
|
||||
@ -104,28 +104,28 @@ pub fn create(owner: *std.Build, artifact: *Step.Compile, options: Options) *Ins
|
||||
.artifact = artifact,
|
||||
};
|
||||
|
||||
self.step.dependOn(&artifact.step);
|
||||
install_artifact.step.dependOn(&artifact.step);
|
||||
|
||||
if (self.dest_dir != null) self.emitted_bin = artifact.getEmittedBin();
|
||||
if (self.pdb_dir != null) self.emitted_pdb = artifact.getEmittedPdb();
|
||||
if (install_artifact.dest_dir != null) install_artifact.emitted_bin = artifact.getEmittedBin();
|
||||
if (install_artifact.pdb_dir != null) install_artifact.emitted_pdb = artifact.getEmittedPdb();
|
||||
// https://github.com/ziglang/zig/issues/9698
|
||||
//if (self.h_dir != null) self.emitted_h = artifact.getEmittedH();
|
||||
if (self.implib_dir != null) self.emitted_implib = artifact.getEmittedImplib();
|
||||
//if (install_artifact.h_dir != null) install_artifact.emitted_h = artifact.getEmittedH();
|
||||
if (install_artifact.implib_dir != null) install_artifact.emitted_implib = artifact.getEmittedImplib();
|
||||
|
||||
return self;
|
||||
return install_artifact;
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const self: *InstallArtifact = @fieldParentPtr("step", step);
|
||||
const install_artifact: *InstallArtifact = @fieldParentPtr("step", step);
|
||||
const b = step.owner;
|
||||
const cwd = fs.cwd();
|
||||
|
||||
var all_cached = true;
|
||||
|
||||
if (self.dest_dir) |dest_dir| {
|
||||
const full_dest_path = b.getInstallPath(dest_dir, self.dest_sub_path);
|
||||
const full_src_path = self.emitted_bin.?.getPath2(b, step);
|
||||
if (install_artifact.dest_dir) |dest_dir| {
|
||||
const full_dest_path = b.getInstallPath(dest_dir, install_artifact.dest_sub_path);
|
||||
const full_src_path = install_artifact.emitted_bin.?.getPath2(b, step);
|
||||
const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_dest_path, .{}) catch |err| {
|
||||
return step.fail("unable to update file from '{s}' to '{s}': {s}", .{
|
||||
full_src_path, full_dest_path, @errorName(err),
|
||||
@ -133,15 +133,15 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
};
|
||||
all_cached = all_cached and p == .fresh;
|
||||
|
||||
if (self.dylib_symlinks) |dls| {
|
||||
if (install_artifact.dylib_symlinks) |dls| {
|
||||
try Step.Compile.doAtomicSymLinks(step, full_dest_path, dls.major_only_filename, dls.name_only_filename);
|
||||
}
|
||||
|
||||
self.artifact.installed_path = full_dest_path;
|
||||
install_artifact.artifact.installed_path = full_dest_path;
|
||||
}
|
||||
|
||||
if (self.implib_dir) |implib_dir| {
|
||||
const full_src_path = self.emitted_implib.?.getPath2(b, step);
|
||||
if (install_artifact.implib_dir) |implib_dir| {
|
||||
const full_src_path = install_artifact.emitted_implib.?.getPath2(b, step);
|
||||
const full_implib_path = b.getInstallPath(implib_dir, fs.path.basename(full_src_path));
|
||||
const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_implib_path, .{}) catch |err| {
|
||||
return step.fail("unable to update file from '{s}' to '{s}': {s}", .{
|
||||
@ -151,8 +151,8 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
all_cached = all_cached and p == .fresh;
|
||||
}
|
||||
|
||||
if (self.pdb_dir) |pdb_dir| {
|
||||
const full_src_path = self.emitted_pdb.?.getPath2(b, step);
|
||||
if (install_artifact.pdb_dir) |pdb_dir| {
|
||||
const full_src_path = install_artifact.emitted_pdb.?.getPath2(b, step);
|
||||
const full_pdb_path = b.getInstallPath(pdb_dir, fs.path.basename(full_src_path));
|
||||
const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_pdb_path, .{}) catch |err| {
|
||||
return step.fail("unable to update file from '{s}' to '{s}': {s}", .{
|
||||
@ -162,8 +162,8 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
all_cached = all_cached and p == .fresh;
|
||||
}
|
||||
|
||||
if (self.h_dir) |h_dir| {
|
||||
if (self.emitted_h) |emitted_h| {
|
||||
if (install_artifact.h_dir) |h_dir| {
|
||||
if (install_artifact.emitted_h) |emitted_h| {
|
||||
const full_src_path = emitted_h.getPath2(b, step);
|
||||
const full_h_path = b.getInstallPath(h_dir, fs.path.basename(full_src_path));
|
||||
const p = fs.Dir.updateFile(cwd, full_src_path, cwd, full_h_path, .{}) catch |err| {
|
||||
@ -174,7 +174,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
all_cached = all_cached and p == .fresh;
|
||||
}
|
||||
|
||||
for (self.artifact.installed_headers.items) |installation| switch (installation) {
|
||||
for (install_artifact.artifact.installed_headers.items) |installation| switch (installation) {
|
||||
.file => |file| {
|
||||
const full_src_path = file.source.getPath2(b, step);
|
||||
const full_h_path = b.getInstallPath(h_dir, file.dest_rel_path);
|
||||
|
||||
@ -3,17 +3,16 @@ const mem = std.mem;
|
||||
const fs = std.fs;
|
||||
const Step = std.Build.Step;
|
||||
const LazyPath = std.Build.LazyPath;
|
||||
const InstallDir = std.Build.InstallDir;
|
||||
const InstallDirStep = @This();
|
||||
const InstallDir = @This();
|
||||
|
||||
step: Step,
|
||||
options: Options,
|
||||
|
||||
pub const base_id = .install_dir;
|
||||
pub const base_id: Step.Id = .install_dir;
|
||||
|
||||
pub const Options = struct {
|
||||
source_dir: LazyPath,
|
||||
install_dir: InstallDir,
|
||||
install_dir: std.Build.InstallDir,
|
||||
install_subdir: []const u8,
|
||||
/// File paths which end in any of these suffixes will be excluded
|
||||
/// from being installed.
|
||||
@ -29,41 +28,41 @@ pub const Options = struct {
|
||||
/// `@import("test.zig")` would be a compile error.
|
||||
blank_extensions: []const []const u8 = &.{},
|
||||
|
||||
fn dupe(self: Options, b: *std.Build) Options {
|
||||
fn dupe(opts: Options, b: *std.Build) Options {
|
||||
return .{
|
||||
.source_dir = self.source_dir.dupe(b),
|
||||
.install_dir = self.install_dir.dupe(b),
|
||||
.install_subdir = b.dupe(self.install_subdir),
|
||||
.exclude_extensions = b.dupeStrings(self.exclude_extensions),
|
||||
.include_extensions = if (self.include_extensions) |incs| b.dupeStrings(incs) else null,
|
||||
.blank_extensions = b.dupeStrings(self.blank_extensions),
|
||||
.source_dir = opts.source_dir.dupe(b),
|
||||
.install_dir = opts.install_dir.dupe(b),
|
||||
.install_subdir = b.dupe(opts.install_subdir),
|
||||
.exclude_extensions = b.dupeStrings(opts.exclude_extensions),
|
||||
.include_extensions = if (opts.include_extensions) |incs| b.dupeStrings(incs) else null,
|
||||
.blank_extensions = b.dupeStrings(opts.blank_extensions),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub fn create(owner: *std.Build, options: Options) *InstallDirStep {
|
||||
pub fn create(owner: *std.Build, options: Options) *InstallDir {
|
||||
owner.pushInstalledFile(options.install_dir, options.install_subdir);
|
||||
const self = owner.allocator.create(InstallDirStep) catch @panic("OOM");
|
||||
self.* = .{
|
||||
const install_dir = owner.allocator.create(InstallDir) catch @panic("OOM");
|
||||
install_dir.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = .install_dir,
|
||||
.id = base_id,
|
||||
.name = owner.fmt("install {s}/", .{options.source_dir.getDisplayName()}),
|
||||
.owner = owner,
|
||||
.makeFn = make,
|
||||
}),
|
||||
.options = options.dupe(owner),
|
||||
};
|
||||
options.source_dir.addStepDependencies(&self.step);
|
||||
return self;
|
||||
options.source_dir.addStepDependencies(&install_dir.step);
|
||||
return install_dir;
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const self: *InstallDirStep = @fieldParentPtr("step", step);
|
||||
const install_dir: *InstallDir = @fieldParentPtr("step", step);
|
||||
const arena = b.allocator;
|
||||
const dest_prefix = b.getInstallPath(self.options.install_dir, self.options.install_subdir);
|
||||
const src_dir_path = self.options.source_dir.getPath2(b, step);
|
||||
const dest_prefix = b.getInstallPath(install_dir.options.install_dir, install_dir.options.install_subdir);
|
||||
const src_dir_path = install_dir.options.source_dir.getPath2(b, step);
|
||||
var src_dir = b.build_root.handle.openDir(src_dir_path, .{ .iterate = true }) catch |err| {
|
||||
return step.fail("unable to open source directory '{}{s}': {s}", .{
|
||||
b.build_root, src_dir_path, @errorName(err),
|
||||
@ -73,12 +72,12 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
var it = try src_dir.walk(arena);
|
||||
var all_cached = true;
|
||||
next_entry: while (try it.next()) |entry| {
|
||||
for (self.options.exclude_extensions) |ext| {
|
||||
for (install_dir.options.exclude_extensions) |ext| {
|
||||
if (mem.endsWith(u8, entry.path, ext)) {
|
||||
continue :next_entry;
|
||||
}
|
||||
}
|
||||
if (self.options.include_extensions) |incs| {
|
||||
if (install_dir.options.include_extensions) |incs| {
|
||||
var found = false;
|
||||
for (incs) |inc| {
|
||||
if (mem.endsWith(u8, entry.path, inc)) {
|
||||
@ -90,14 +89,14 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
}
|
||||
|
||||
// relative to src build root
|
||||
const src_sub_path = try fs.path.join(arena, &.{ src_dir_path, entry.path });
|
||||
const dest_path = try fs.path.join(arena, &.{ dest_prefix, entry.path });
|
||||
const src_sub_path = b.pathJoin(&.{ src_dir_path, entry.path });
|
||||
const dest_path = b.pathJoin(&.{ dest_prefix, entry.path });
|
||||
const cwd = fs.cwd();
|
||||
|
||||
switch (entry.kind) {
|
||||
.directory => try cwd.makePath(dest_path),
|
||||
.file => {
|
||||
for (self.options.blank_extensions) |ext| {
|
||||
for (install_dir.options.blank_extensions) |ext| {
|
||||
if (mem.endsWith(u8, entry.path, ext)) {
|
||||
try b.truncateFile(dest_path);
|
||||
continue :next_entry;
|
||||
|
||||
@ -5,7 +5,7 @@ const InstallDir = std.Build.InstallDir;
|
||||
const InstallFile = @This();
|
||||
const assert = std.debug.assert;
|
||||
|
||||
pub const base_id = .install_file;
|
||||
pub const base_id: Step.Id = .install_file;
|
||||
|
||||
step: Step,
|
||||
source: LazyPath,
|
||||
@ -20,8 +20,8 @@ pub fn create(
|
||||
) *InstallFile {
|
||||
assert(dest_rel_path.len != 0);
|
||||
owner.pushInstalledFile(dir, dest_rel_path);
|
||||
const self = owner.allocator.create(InstallFile) catch @panic("OOM");
|
||||
self.* = .{
|
||||
const install_file = owner.allocator.create(InstallFile) catch @panic("OOM");
|
||||
install_file.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = base_id,
|
||||
.name = owner.fmt("install {s} to {s}", .{ source.getDisplayName(), dest_rel_path }),
|
||||
@ -32,16 +32,16 @@ pub fn create(
|
||||
.dir = dir.dupe(owner),
|
||||
.dest_rel_path = owner.dupePath(dest_rel_path),
|
||||
};
|
||||
source.addStepDependencies(&self.step);
|
||||
return self;
|
||||
source.addStepDependencies(&install_file.step);
|
||||
return install_file;
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const self: *InstallFile = @fieldParentPtr("step", step);
|
||||
const full_src_path = self.source.getPath2(b, step);
|
||||
const full_dest_path = b.getInstallPath(self.dir, self.dest_rel_path);
|
||||
const install_file: *InstallFile = @fieldParentPtr("step", step);
|
||||
const full_src_path = install_file.source.getPath2(b, step);
|
||||
const full_dest_path = b.getInstallPath(install_file.dir, install_file.dest_rel_path);
|
||||
const cwd = std.fs.cwd();
|
||||
const prev = std.fs.Dir.updateFile(cwd, full_src_path, cwd, full_dest_path, .{}) catch |err| {
|
||||
return step.fail("unable to update file from '{s}' to '{s}': {s}", .{
|
||||
|
||||
@ -58,8 +58,8 @@ pub fn create(
|
||||
input_file: std.Build.LazyPath,
|
||||
options: Options,
|
||||
) *ObjCopy {
|
||||
const self = owner.allocator.create(ObjCopy) catch @panic("OOM");
|
||||
self.* = ObjCopy{
|
||||
const objcopy = owner.allocator.create(ObjCopy) catch @panic("OOM");
|
||||
objcopy.* = ObjCopy{
|
||||
.step = Step.init(.{
|
||||
.id = base_id,
|
||||
.name = owner.fmt("objcopy {s}", .{input_file.getDisplayName()}),
|
||||
@ -68,31 +68,31 @@ pub fn create(
|
||||
}),
|
||||
.input_file = input_file,
|
||||
.basename = options.basename orelse input_file.getDisplayName(),
|
||||
.output_file = std.Build.GeneratedFile{ .step = &self.step },
|
||||
.output_file_debug = if (options.strip != .none and options.extract_to_separate_file) std.Build.GeneratedFile{ .step = &self.step } else null,
|
||||
.output_file = std.Build.GeneratedFile{ .step = &objcopy.step },
|
||||
.output_file_debug = if (options.strip != .none and options.extract_to_separate_file) std.Build.GeneratedFile{ .step = &objcopy.step } else null,
|
||||
.format = options.format,
|
||||
.only_sections = options.only_sections,
|
||||
.pad_to = options.pad_to,
|
||||
.strip = options.strip,
|
||||
.compress_debug = options.compress_debug,
|
||||
};
|
||||
input_file.addStepDependencies(&self.step);
|
||||
return self;
|
||||
input_file.addStepDependencies(&objcopy.step);
|
||||
return objcopy;
|
||||
}
|
||||
|
||||
/// deprecated: use getOutput
|
||||
pub const getOutputSource = getOutput;
|
||||
|
||||
pub fn getOutput(self: *const ObjCopy) std.Build.LazyPath {
|
||||
return .{ .generated = &self.output_file };
|
||||
pub fn getOutput(objcopy: *const ObjCopy) std.Build.LazyPath {
|
||||
return .{ .generated = .{ .file = &objcopy.output_file } };
|
||||
}
|
||||
pub fn getOutputSeparatedDebug(self: *const ObjCopy) ?std.Build.LazyPath {
|
||||
return if (self.output_file_debug) |*file| .{ .generated = file } else null;
|
||||
pub fn getOutputSeparatedDebug(objcopy: *const ObjCopy) ?std.Build.LazyPath {
|
||||
return if (objcopy.output_file_debug) |*file| .{ .generated = .{ .file = file } } else null;
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
const b = step.owner;
|
||||
const self: *ObjCopy = @fieldParentPtr("step", step);
|
||||
const objcopy: *ObjCopy = @fieldParentPtr("step", step);
|
||||
|
||||
var man = b.graph.cache.obtain();
|
||||
defer man.deinit();
|
||||
@ -101,24 +101,24 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
// bytes when ObjCopy implementation is modified incompatibly.
|
||||
man.hash.add(@as(u32, 0xe18b7baf));
|
||||
|
||||
const full_src_path = self.input_file.getPath(b);
|
||||
const full_src_path = objcopy.input_file.getPath2(b, step);
|
||||
_ = try man.addFile(full_src_path, null);
|
||||
man.hash.addOptionalListOfBytes(self.only_sections);
|
||||
man.hash.addOptional(self.pad_to);
|
||||
man.hash.addOptional(self.format);
|
||||
man.hash.add(self.compress_debug);
|
||||
man.hash.add(self.strip);
|
||||
man.hash.add(self.output_file_debug != null);
|
||||
man.hash.addOptionalListOfBytes(objcopy.only_sections);
|
||||
man.hash.addOptional(objcopy.pad_to);
|
||||
man.hash.addOptional(objcopy.format);
|
||||
man.hash.add(objcopy.compress_debug);
|
||||
man.hash.add(objcopy.strip);
|
||||
man.hash.add(objcopy.output_file_debug != null);
|
||||
|
||||
if (try step.cacheHit(&man)) {
|
||||
// Cache hit, skip subprocess execution.
|
||||
const digest = man.final();
|
||||
self.output_file.path = try b.cache_root.join(b.allocator, &.{
|
||||
"o", &digest, self.basename,
|
||||
objcopy.output_file.path = try b.cache_root.join(b.allocator, &.{
|
||||
"o", &digest, objcopy.basename,
|
||||
});
|
||||
if (self.output_file_debug) |*file| {
|
||||
if (objcopy.output_file_debug) |*file| {
|
||||
file.path = try b.cache_root.join(b.allocator, &.{
|
||||
"o", &digest, b.fmt("{s}.debug", .{self.basename}),
|
||||
"o", &digest, b.fmt("{s}.debug", .{objcopy.basename}),
|
||||
});
|
||||
}
|
||||
return;
|
||||
@ -126,8 +126,8 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
|
||||
const digest = man.final();
|
||||
const cache_path = "o" ++ fs.path.sep_str ++ digest;
|
||||
const full_dest_path = try b.cache_root.join(b.allocator, &.{ cache_path, self.basename });
|
||||
const full_dest_path_debug = try b.cache_root.join(b.allocator, &.{ cache_path, b.fmt("{s}.debug", .{self.basename}) });
|
||||
const full_dest_path = try b.cache_root.join(b.allocator, &.{ cache_path, objcopy.basename });
|
||||
const full_dest_path_debug = try b.cache_root.join(b.allocator, &.{ cache_path, b.fmt("{s}.debug", .{objcopy.basename}) });
|
||||
b.cache_root.handle.makePath(cache_path) catch |err| {
|
||||
return step.fail("unable to make path {s}: {s}", .{ cache_path, @errorName(err) });
|
||||
};
|
||||
@ -135,28 +135,28 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
var argv = std.ArrayList([]const u8).init(b.allocator);
|
||||
try argv.appendSlice(&.{ b.graph.zig_exe, "objcopy" });
|
||||
|
||||
if (self.only_sections) |only_sections| {
|
||||
if (objcopy.only_sections) |only_sections| {
|
||||
for (only_sections) |only_section| {
|
||||
try argv.appendSlice(&.{ "-j", only_section });
|
||||
}
|
||||
}
|
||||
switch (self.strip) {
|
||||
switch (objcopy.strip) {
|
||||
.none => {},
|
||||
.debug => try argv.appendSlice(&.{"--strip-debug"}),
|
||||
.debug_and_symbols => try argv.appendSlice(&.{"--strip-all"}),
|
||||
}
|
||||
if (self.pad_to) |pad_to| {
|
||||
if (objcopy.pad_to) |pad_to| {
|
||||
try argv.appendSlice(&.{ "--pad-to", b.fmt("{d}", .{pad_to}) });
|
||||
}
|
||||
if (self.format) |format| switch (format) {
|
||||
if (objcopy.format) |format| switch (format) {
|
||||
.bin => try argv.appendSlice(&.{ "-O", "binary" }),
|
||||
.hex => try argv.appendSlice(&.{ "-O", "hex" }),
|
||||
.elf => try argv.appendSlice(&.{ "-O", "elf" }),
|
||||
};
|
||||
if (self.compress_debug) {
|
||||
if (objcopy.compress_debug) {
|
||||
try argv.appendSlice(&.{"--compress-debug-sections"});
|
||||
}
|
||||
if (self.output_file_debug != null) {
|
||||
if (objcopy.output_file_debug != null) {
|
||||
try argv.appendSlice(&.{b.fmt("--extract-to={s}", .{full_dest_path_debug})});
|
||||
}
|
||||
|
||||
@ -165,7 +165,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
try argv.append("--listen=-");
|
||||
_ = try step.evalZigProcess(argv.items, prog_node);
|
||||
|
||||
self.output_file.path = full_dest_path;
|
||||
if (self.output_file_debug) |*file| file.path = full_dest_path_debug;
|
||||
objcopy.output_file.path = full_dest_path;
|
||||
if (objcopy.output_file_debug) |*file| file.path = full_dest_path_debug;
|
||||
try man.writeManifest();
|
||||
}
|
||||
|
||||
@ -7,7 +7,7 @@ const LazyPath = std.Build.LazyPath;
|
||||
|
||||
const Options = @This();
|
||||
|
||||
pub const base_id = .options;
|
||||
pub const base_id: Step.Id = .options;
|
||||
|
||||
step: Step,
|
||||
generated_file: GeneratedFile,
|
||||
@ -17,8 +17,8 @@ args: std.ArrayList(Arg),
|
||||
encountered_types: std.StringHashMap(void),
|
||||
|
||||
pub fn create(owner: *std.Build) *Options {
|
||||
const self = owner.allocator.create(Options) catch @panic("OOM");
|
||||
self.* = .{
|
||||
const options = owner.allocator.create(Options) catch @panic("OOM");
|
||||
options.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = base_id,
|
||||
.name = "options",
|
||||
@ -30,21 +30,21 @@ pub fn create(owner: *std.Build) *Options {
|
||||
.args = std.ArrayList(Arg).init(owner.allocator),
|
||||
.encountered_types = std.StringHashMap(void).init(owner.allocator),
|
||||
};
|
||||
self.generated_file = .{ .step = &self.step };
|
||||
options.generated_file = .{ .step = &options.step };
|
||||
|
||||
return self;
|
||||
return options;
|
||||
}
|
||||
|
||||
pub fn addOption(self: *Options, comptime T: type, name: []const u8, value: T) void {
|
||||
return addOptionFallible(self, T, name, value) catch @panic("unhandled error");
|
||||
pub fn addOption(options: *Options, comptime T: type, name: []const u8, value: T) void {
|
||||
return addOptionFallible(options, T, name, value) catch @panic("unhandled error");
|
||||
}
|
||||
|
||||
fn addOptionFallible(self: *Options, comptime T: type, name: []const u8, value: T) !void {
|
||||
const out = self.contents.writer();
|
||||
try printType(self, out, T, value, 0, name);
|
||||
fn addOptionFallible(options: *Options, comptime T: type, name: []const u8, value: T) !void {
|
||||
const out = options.contents.writer();
|
||||
try printType(options, out, T, value, 0, name);
|
||||
}
|
||||
|
||||
fn printType(self: *Options, out: anytype, comptime T: type, value: T, indent: u8, name: ?[]const u8) !void {
|
||||
fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent: u8, name: ?[]const u8) !void {
|
||||
switch (T) {
|
||||
[]const []const u8 => {
|
||||
if (name) |payload| {
|
||||
@ -159,7 +159,7 @@ fn printType(self: *Options, out: anytype, comptime T: type, value: T, indent: u
|
||||
try out.print("{s} {{\n", .{@typeName(T)});
|
||||
for (value) |item| {
|
||||
try out.writeByteNTimes(' ', indent + 4);
|
||||
try printType(self, out, @TypeOf(item), item, indent + 4, null);
|
||||
try printType(options, out, @TypeOf(item), item, indent + 4, null);
|
||||
}
|
||||
try out.writeByteNTimes(' ', indent);
|
||||
try out.writeAll("}");
|
||||
@ -183,7 +183,7 @@ fn printType(self: *Options, out: anytype, comptime T: type, value: T, indent: u
|
||||
try out.print("&[_]{s} {{\n", .{@typeName(p.child)});
|
||||
for (value) |item| {
|
||||
try out.writeByteNTimes(' ', indent + 4);
|
||||
try printType(self, out, @TypeOf(item), item, indent + 4, null);
|
||||
try printType(options, out, @TypeOf(item), item, indent + 4, null);
|
||||
}
|
||||
try out.writeByteNTimes(' ', indent);
|
||||
try out.writeAll("}");
|
||||
@ -201,10 +201,10 @@ fn printType(self: *Options, out: anytype, comptime T: type, value: T, indent: u
|
||||
}
|
||||
|
||||
if (value) |inner| {
|
||||
try printType(self, out, @TypeOf(inner), inner, indent + 4, null);
|
||||
try printType(options, out, @TypeOf(inner), inner, indent + 4, null);
|
||||
// Pop the '\n' and ',' chars
|
||||
_ = self.contents.pop();
|
||||
_ = self.contents.pop();
|
||||
_ = options.contents.pop();
|
||||
_ = options.contents.pop();
|
||||
} else {
|
||||
try out.writeAll("null");
|
||||
}
|
||||
@ -231,7 +231,7 @@ fn printType(self: *Options, out: anytype, comptime T: type, value: T, indent: u
|
||||
return;
|
||||
},
|
||||
.Enum => |info| {
|
||||
try printEnum(self, out, T, info, indent);
|
||||
try printEnum(options, out, T, info, indent);
|
||||
|
||||
if (name) |some| {
|
||||
try out.print("pub const {}: {} = .{p_};\n", .{
|
||||
@ -243,14 +243,14 @@ fn printType(self: *Options, out: anytype, comptime T: type, value: T, indent: u
|
||||
return;
|
||||
},
|
||||
.Struct => |info| {
|
||||
try printStruct(self, out, T, info, indent);
|
||||
try printStruct(options, out, T, info, indent);
|
||||
|
||||
if (name) |some| {
|
||||
try out.print("pub const {}: {} = ", .{
|
||||
std.zig.fmtId(some),
|
||||
std.zig.fmtId(@typeName(T)),
|
||||
});
|
||||
try printStructValue(self, out, info, value, indent);
|
||||
try printStructValue(options, out, info, value, indent);
|
||||
}
|
||||
return;
|
||||
},
|
||||
@ -258,20 +258,20 @@ fn printType(self: *Options, out: anytype, comptime T: type, value: T, indent: u
|
||||
}
|
||||
}
|
||||
|
||||
fn printUserDefinedType(self: *Options, out: anytype, comptime T: type, indent: u8) !void {
|
||||
fn printUserDefinedType(options: *Options, out: anytype, comptime T: type, indent: u8) !void {
|
||||
switch (@typeInfo(T)) {
|
||||
.Enum => |info| {
|
||||
return try printEnum(self, out, T, info, indent);
|
||||
return try printEnum(options, out, T, info, indent);
|
||||
},
|
||||
.Struct => |info| {
|
||||
return try printStruct(self, out, T, info, indent);
|
||||
return try printStruct(options, out, T, info, indent);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
fn printEnum(self: *Options, out: anytype, comptime T: type, comptime val: std.builtin.Type.Enum, indent: u8) !void {
|
||||
const gop = try self.encountered_types.getOrPut(@typeName(T));
|
||||
fn printEnum(options: *Options, out: anytype, comptime T: type, comptime val: std.builtin.Type.Enum, indent: u8) !void {
|
||||
const gop = try options.encountered_types.getOrPut(@typeName(T));
|
||||
if (gop.found_existing) return;
|
||||
|
||||
try out.writeByteNTimes(' ', indent);
|
||||
@ -291,8 +291,8 @@ fn printEnum(self: *Options, out: anytype, comptime T: type, comptime val: std.b
|
||||
try out.writeAll("};\n");
|
||||
}
|
||||
|
||||
fn printStruct(self: *Options, out: anytype, comptime T: type, comptime val: std.builtin.Type.Struct, indent: u8) !void {
|
||||
const gop = try self.encountered_types.getOrPut(@typeName(T));
|
||||
fn printStruct(options: *Options, out: anytype, comptime T: type, comptime val: std.builtin.Type.Struct, indent: u8) !void {
|
||||
const gop = try options.encountered_types.getOrPut(@typeName(T));
|
||||
if (gop.found_existing) return;
|
||||
|
||||
try out.writeByteNTimes(' ', indent);
|
||||
@ -325,9 +325,9 @@ fn printStruct(self: *Options, out: anytype, comptime T: type, comptime val: std
|
||||
switch (@typeInfo(@TypeOf(default_value))) {
|
||||
.Enum => try out.print(".{s},\n", .{@tagName(default_value)}),
|
||||
.Struct => |info| {
|
||||
try printStructValue(self, out, info, default_value, indent + 4);
|
||||
try printStructValue(options, out, info, default_value, indent + 4);
|
||||
},
|
||||
else => try printType(self, out, @TypeOf(default_value), default_value, indent, null),
|
||||
else => try printType(options, out, @TypeOf(default_value), default_value, indent, null),
|
||||
}
|
||||
} else {
|
||||
try out.writeAll(",\n");
|
||||
@ -340,17 +340,17 @@ fn printStruct(self: *Options, out: anytype, comptime T: type, comptime val: std
|
||||
try out.writeAll("};\n");
|
||||
|
||||
inline for (val.fields) |field| {
|
||||
try printUserDefinedType(self, out, field.type, 0);
|
||||
try printUserDefinedType(options, out, field.type, 0);
|
||||
}
|
||||
}
|
||||
|
||||
fn printStructValue(self: *Options, out: anytype, comptime struct_val: std.builtin.Type.Struct, val: anytype, indent: u8) !void {
|
||||
fn printStructValue(options: *Options, out: anytype, comptime struct_val: std.builtin.Type.Struct, val: anytype, indent: u8) !void {
|
||||
try out.writeAll(".{\n");
|
||||
|
||||
if (struct_val.is_tuple) {
|
||||
inline for (struct_val.fields) |field| {
|
||||
try out.writeByteNTimes(' ', indent);
|
||||
try printType(self, out, @TypeOf(@field(val, field.name)), @field(val, field.name), indent, null);
|
||||
try printType(options, out, @TypeOf(@field(val, field.name)), @field(val, field.name), indent, null);
|
||||
}
|
||||
} else {
|
||||
inline for (struct_val.fields) |field| {
|
||||
@ -361,9 +361,9 @@ fn printStructValue(self: *Options, out: anytype, comptime struct_val: std.built
|
||||
switch (@typeInfo(@TypeOf(field_name))) {
|
||||
.Enum => try out.print(".{s},\n", .{@tagName(field_name)}),
|
||||
.Struct => |struct_info| {
|
||||
try printStructValue(self, out, struct_info, field_name, indent + 4);
|
||||
try printStructValue(options, out, struct_info, field_name, indent + 4);
|
||||
},
|
||||
else => try printType(self, out, @TypeOf(field_name), field_name, indent, null),
|
||||
else => try printType(options, out, @TypeOf(field_name), field_name, indent, null),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -379,25 +379,25 @@ fn printStructValue(self: *Options, out: anytype, comptime struct_val: std.built
|
||||
/// The value is the path in the cache dir.
|
||||
/// Adds a dependency automatically.
|
||||
pub fn addOptionPath(
|
||||
self: *Options,
|
||||
options: *Options,
|
||||
name: []const u8,
|
||||
path: LazyPath,
|
||||
) void {
|
||||
self.args.append(.{
|
||||
.name = self.step.owner.dupe(name),
|
||||
.path = path.dupe(self.step.owner),
|
||||
options.args.append(.{
|
||||
.name = options.step.owner.dupe(name),
|
||||
.path = path.dupe(options.step.owner),
|
||||
}) catch @panic("OOM");
|
||||
path.addStepDependencies(&self.step);
|
||||
path.addStepDependencies(&options.step);
|
||||
}
|
||||
|
||||
/// Deprecated: use `addOptionPath(options, name, artifact.getEmittedBin())` instead.
|
||||
pub fn addOptionArtifact(self: *Options, name: []const u8, artifact: *Step.Compile) void {
|
||||
return addOptionPath(self, name, artifact.getEmittedBin());
|
||||
pub fn addOptionArtifact(options: *Options, name: []const u8, artifact: *Step.Compile) void {
|
||||
return addOptionPath(options, name, artifact.getEmittedBin());
|
||||
}
|
||||
|
||||
pub fn createModule(self: *Options) *std.Build.Module {
|
||||
return self.step.owner.createModule(.{
|
||||
.root_source_file = self.getOutput(),
|
||||
pub fn createModule(options: *Options) *std.Build.Module {
|
||||
return options.step.owner.createModule(.{
|
||||
.root_source_file = options.getOutput(),
|
||||
});
|
||||
}
|
||||
|
||||
@ -406,8 +406,8 @@ pub const getSource = getOutput;
|
||||
|
||||
/// Returns the main artifact of this Build Step which is a Zig source file
|
||||
/// generated from the key-value pairs of the Options.
|
||||
pub fn getOutput(self: *Options) LazyPath {
|
||||
return .{ .generated = &self.generated_file };
|
||||
pub fn getOutput(options: *Options) LazyPath {
|
||||
return .{ .generated = .{ .file = &options.generated_file } };
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
@ -415,13 +415,13 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
|
||||
const b = step.owner;
|
||||
const self: *Options = @fieldParentPtr("step", step);
|
||||
const options: *Options = @fieldParentPtr("step", step);
|
||||
|
||||
for (self.args.items) |item| {
|
||||
self.addOption(
|
||||
for (options.args.items) |item| {
|
||||
options.addOption(
|
||||
[]const u8,
|
||||
item.name,
|
||||
item.path.getPath(b),
|
||||
item.path.getPath2(b, step),
|
||||
);
|
||||
}
|
||||
|
||||
@ -432,10 +432,10 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
// Random bytes to make unique. Refresh this with new random bytes when
|
||||
// implementation is modified in a non-backwards-compatible way.
|
||||
hash.add(@as(u32, 0xad95e922));
|
||||
hash.addBytes(self.contents.items);
|
||||
hash.addBytes(options.contents.items);
|
||||
const sub_path = "c" ++ fs.path.sep_str ++ hash.final() ++ fs.path.sep_str ++ basename;
|
||||
|
||||
self.generated_file.path = try b.cache_root.join(b.allocator, &.{sub_path});
|
||||
options.generated_file.path = try b.cache_root.join(b.allocator, &.{sub_path});
|
||||
|
||||
// Optimize for the hot path. Stat the file, and if it already exists,
|
||||
// cache hit.
|
||||
@ -464,7 +464,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
});
|
||||
};
|
||||
|
||||
b.cache_root.handle.writeFile(.{ .sub_path = tmp_sub_path, .data = self.contents.items }) catch |err| {
|
||||
b.cache_root.handle.writeFile(.{ .sub_path = tmp_sub_path, .data = options.contents.items }) catch |err| {
|
||||
return step.fail("unable to write options to '{}{s}': {s}", .{
|
||||
b.cache_root, tmp_sub_path, @errorName(err),
|
||||
});
|
||||
|
||||
@ -3,23 +3,23 @@ const fs = std.fs;
|
||||
const Step = std.Build.Step;
|
||||
const RemoveDir = @This();
|
||||
|
||||
pub const base_id = .remove_dir;
|
||||
pub const base_id: Step.Id = .remove_dir;
|
||||
|
||||
step: Step,
|
||||
dir_path: []const u8,
|
||||
|
||||
pub fn create(owner: *std.Build, dir_path: []const u8) *RemoveDir {
|
||||
const self = owner.allocator.create(RemoveDir) catch @panic("OOM");
|
||||
self.* = .{
|
||||
const remove_dir = owner.allocator.create(RemoveDir) catch @panic("OOM");
|
||||
remove_dir.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = .remove_dir,
|
||||
.id = base_id,
|
||||
.name = owner.fmt("RemoveDir {s}", .{dir_path}),
|
||||
.owner = owner,
|
||||
.makeFn = make,
|
||||
}),
|
||||
.dir_path = owner.dupePath(dir_path),
|
||||
};
|
||||
return self;
|
||||
return remove_dir;
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
@ -28,16 +28,16 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
|
||||
const b = step.owner;
|
||||
const self: *RemoveDir = @fieldParentPtr("step", step);
|
||||
const remove_dir: *RemoveDir = @fieldParentPtr("step", step);
|
||||
|
||||
b.build_root.handle.deleteTree(self.dir_path) catch |err| {
|
||||
b.build_root.handle.deleteTree(remove_dir.dir_path) catch |err| {
|
||||
if (b.build_root.path) |base| {
|
||||
return step.fail("unable to recursively delete path '{s}/{s}': {s}", .{
|
||||
base, self.dir_path, @errorName(err),
|
||||
base, remove_dir.dir_path, @errorName(err),
|
||||
});
|
||||
} else {
|
||||
return step.fail("unable to recursively delete path '{s}': {s}", .{
|
||||
self.dir_path, @errorName(err),
|
||||
remove_dir.dir_path, @errorName(err),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -5,7 +5,7 @@ const mem = std.mem;
|
||||
|
||||
const TranslateC = @This();
|
||||
|
||||
pub const base_id = .translate_c;
|
||||
pub const base_id: Step.Id = .translate_c;
|
||||
|
||||
step: Step,
|
||||
source: std.Build.LazyPath,
|
||||
@ -27,11 +27,11 @@ pub const Options = struct {
|
||||
};
|
||||
|
||||
pub fn create(owner: *std.Build, options: Options) *TranslateC {
|
||||
const self = owner.allocator.create(TranslateC) catch @panic("OOM");
|
||||
const translate_c = owner.allocator.create(TranslateC) catch @panic("OOM");
|
||||
const source = options.root_source_file.dupe(owner);
|
||||
self.* = TranslateC{
|
||||
translate_c.* = TranslateC{
|
||||
.step = Step.init(.{
|
||||
.id = .translate_c,
|
||||
.id = base_id,
|
||||
.name = "translate-c",
|
||||
.owner = owner,
|
||||
.makeFn = make,
|
||||
@ -42,12 +42,12 @@ pub fn create(owner: *std.Build, options: Options) *TranslateC {
|
||||
.out_basename = undefined,
|
||||
.target = options.target,
|
||||
.optimize = options.optimize,
|
||||
.output_file = std.Build.GeneratedFile{ .step = &self.step },
|
||||
.output_file = std.Build.GeneratedFile{ .step = &translate_c.step },
|
||||
.link_libc = options.link_libc,
|
||||
.use_clang = options.use_clang,
|
||||
};
|
||||
source.addStepDependencies(&self.step);
|
||||
return self;
|
||||
source.addStepDependencies(&translate_c.step);
|
||||
return translate_c;
|
||||
}
|
||||
|
||||
pub const AddExecutableOptions = struct {
|
||||
@ -58,18 +58,18 @@ pub const AddExecutableOptions = struct {
|
||||
linkage: ?std.builtin.LinkMode = null,
|
||||
};
|
||||
|
||||
pub fn getOutput(self: *TranslateC) std.Build.LazyPath {
|
||||
return .{ .generated = &self.output_file };
|
||||
pub fn getOutput(translate_c: *TranslateC) std.Build.LazyPath {
|
||||
return .{ .generated = .{ .file = &translate_c.output_file } };
|
||||
}
|
||||
|
||||
/// Creates a step to build an executable from the translated source.
|
||||
pub fn addExecutable(self: *TranslateC, options: AddExecutableOptions) *Step.Compile {
|
||||
return self.step.owner.addExecutable(.{
|
||||
.root_source_file = self.getOutput(),
|
||||
pub fn addExecutable(translate_c: *TranslateC, options: AddExecutableOptions) *Step.Compile {
|
||||
return translate_c.step.owner.addExecutable(.{
|
||||
.root_source_file = translate_c.getOutput(),
|
||||
.name = options.name orelse "translated_c",
|
||||
.version = options.version,
|
||||
.target = options.target orelse self.target,
|
||||
.optimize = options.optimize orelse self.optimize,
|
||||
.target = options.target orelse translate_c.target,
|
||||
.optimize = options.optimize orelse translate_c.optimize,
|
||||
.linkage = options.linkage,
|
||||
});
|
||||
}
|
||||
@ -77,90 +77,87 @@ pub fn addExecutable(self: *TranslateC, options: AddExecutableOptions) *Step.Com
|
||||
/// Creates a module from the translated source and adds it to the package's
|
||||
/// module set making it available to other packages which depend on this one.
|
||||
/// `createModule` can be used instead to create a private module.
|
||||
pub fn addModule(self: *TranslateC, name: []const u8) *std.Build.Module {
|
||||
return self.step.owner.addModule(name, .{
|
||||
.root_source_file = self.getOutput(),
|
||||
pub fn addModule(translate_c: *TranslateC, name: []const u8) *std.Build.Module {
|
||||
return translate_c.step.owner.addModule(name, .{
|
||||
.root_source_file = translate_c.getOutput(),
|
||||
});
|
||||
}
|
||||
|
||||
/// Creates a private module from the translated source to be used by the
|
||||
/// current package, but not exposed to other packages depending on this one.
|
||||
/// `addModule` can be used instead to create a public module.
|
||||
pub fn createModule(self: *TranslateC) *std.Build.Module {
|
||||
return self.step.owner.createModule(.{
|
||||
.root_source_file = self.getOutput(),
|
||||
pub fn createModule(translate_c: *TranslateC) *std.Build.Module {
|
||||
return translate_c.step.owner.createModule(.{
|
||||
.root_source_file = translate_c.getOutput(),
|
||||
});
|
||||
}
|
||||
|
||||
pub fn addIncludeDir(self: *TranslateC, include_dir: []const u8) void {
|
||||
self.include_dirs.append(self.step.owner.dupePath(include_dir)) catch @panic("OOM");
|
||||
pub fn addIncludeDir(translate_c: *TranslateC, include_dir: []const u8) void {
|
||||
translate_c.include_dirs.append(translate_c.step.owner.dupePath(include_dir)) catch @panic("OOM");
|
||||
}
|
||||
|
||||
pub fn addCheckFile(self: *TranslateC, expected_matches: []const []const u8) *Step.CheckFile {
|
||||
pub fn addCheckFile(translate_c: *TranslateC, expected_matches: []const []const u8) *Step.CheckFile {
|
||||
return Step.CheckFile.create(
|
||||
self.step.owner,
|
||||
self.getOutput(),
|
||||
translate_c.step.owner,
|
||||
translate_c.getOutput(),
|
||||
.{ .expected_matches = expected_matches },
|
||||
);
|
||||
}
|
||||
|
||||
/// If the value is omitted, it is set to 1.
|
||||
/// `name` and `value` need not live longer than the function call.
|
||||
pub fn defineCMacro(self: *TranslateC, name: []const u8, value: ?[]const u8) void {
|
||||
const macro = std.Build.constructCMacro(self.step.owner.allocator, name, value);
|
||||
self.c_macros.append(macro) catch @panic("OOM");
|
||||
pub fn defineCMacro(translate_c: *TranslateC, name: []const u8, value: ?[]const u8) void {
|
||||
const macro = std.Build.constructranslate_cMacro(translate_c.step.owner.allocator, name, value);
|
||||
translate_c.c_macros.append(macro) catch @panic("OOM");
|
||||
}
|
||||
|
||||
/// name_and_value looks like [name]=[value]. If the value is omitted, it is set to 1.
|
||||
pub fn defineCMacroRaw(self: *TranslateC, name_and_value: []const u8) void {
|
||||
self.c_macros.append(self.step.owner.dupe(name_and_value)) catch @panic("OOM");
|
||||
pub fn defineCMacroRaw(translate_c: *TranslateC, name_and_value: []const u8) void {
|
||||
translate_c.c_macros.append(translate_c.step.owner.dupe(name_and_value)) catch @panic("OOM");
|
||||
}
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
const b = step.owner;
|
||||
const self: *TranslateC = @fieldParentPtr("step", step);
|
||||
const translate_c: *TranslateC = @fieldParentPtr("step", step);
|
||||
|
||||
var argv_list = std.ArrayList([]const u8).init(b.allocator);
|
||||
try argv_list.append(b.graph.zig_exe);
|
||||
try argv_list.append("translate-c");
|
||||
if (self.link_libc) {
|
||||
if (translate_c.link_libc) {
|
||||
try argv_list.append("-lc");
|
||||
}
|
||||
if (!self.use_clang) {
|
||||
if (!translate_c.use_clang) {
|
||||
try argv_list.append("-fno-clang");
|
||||
}
|
||||
|
||||
try argv_list.append("--listen=-");
|
||||
|
||||
if (!self.target.query.isNative()) {
|
||||
if (!translate_c.target.query.isNative()) {
|
||||
try argv_list.append("-target");
|
||||
try argv_list.append(try self.target.query.zigTriple(b.allocator));
|
||||
try argv_list.append(try translate_c.target.query.zigTriple(b.allocator));
|
||||
}
|
||||
|
||||
switch (self.optimize) {
|
||||
switch (translate_c.optimize) {
|
||||
.Debug => {}, // Skip since it's the default.
|
||||
else => try argv_list.append(b.fmt("-O{s}", .{@tagName(self.optimize)})),
|
||||
else => try argv_list.append(b.fmt("-O{s}", .{@tagName(translate_c.optimize)})),
|
||||
}
|
||||
|
||||
for (self.include_dirs.items) |include_dir| {
|
||||
for (translate_c.include_dirs.items) |include_dir| {
|
||||
try argv_list.append("-I");
|
||||
try argv_list.append(include_dir);
|
||||
}
|
||||
|
||||
for (self.c_macros.items) |c_macro| {
|
||||
for (translate_c.c_macros.items) |c_macro| {
|
||||
try argv_list.append("-D");
|
||||
try argv_list.append(c_macro);
|
||||
}
|
||||
|
||||
try argv_list.append(self.source.getPath(b));
|
||||
try argv_list.append(translate_c.source.getPath2(b, step));
|
||||
|
||||
const output_path = try step.evalZigProcess(argv_list.items, prog_node);
|
||||
|
||||
self.out_basename = fs.path.basename(output_path.?);
|
||||
translate_c.out_basename = fs.path.basename(output_path.?);
|
||||
const output_dir = fs.path.dirname(output_path.?).?;
|
||||
|
||||
self.output_file.path = try fs.path.join(
|
||||
b.allocator,
|
||||
&[_][]const u8{ output_dir, self.out_basename },
|
||||
);
|
||||
translate_c.output_file.path = b.pathJoin(&.{ output_dir, translate_c.out_basename });
|
||||
}
|
||||
|
||||
@ -23,15 +23,15 @@ directories: std.ArrayListUnmanaged(*Directory),
|
||||
output_source_files: std.ArrayListUnmanaged(OutputSourceFile),
|
||||
generated_directory: std.Build.GeneratedFile,
|
||||
|
||||
pub const base_id = .write_file;
|
||||
pub const base_id: Step.Id = .write_file;
|
||||
|
||||
pub const File = struct {
|
||||
generated_file: std.Build.GeneratedFile,
|
||||
sub_path: []const u8,
|
||||
contents: Contents,
|
||||
|
||||
pub fn getPath(self: *File) std.Build.LazyPath {
|
||||
return .{ .generated = &self.generated_file };
|
||||
pub fn getPath(file: *File) std.Build.LazyPath {
|
||||
return .{ .generated = .{ .file = &file.generated_file } };
|
||||
}
|
||||
};
|
||||
|
||||
@ -49,16 +49,16 @@ pub const Directory = struct {
|
||||
/// `exclude_extensions` takes precedence over `include_extensions`.
|
||||
include_extensions: ?[]const []const u8 = null,
|
||||
|
||||
pub fn dupe(self: Options, b: *std.Build) Options {
|
||||
pub fn dupe(opts: Options, b: *std.Build) Options {
|
||||
return .{
|
||||
.exclude_extensions = b.dupeStrings(self.exclude_extensions),
|
||||
.include_extensions = if (self.include_extensions) |incs| b.dupeStrings(incs) else null,
|
||||
.exclude_extensions = b.dupeStrings(opts.exclude_extensions),
|
||||
.include_extensions = if (opts.include_extensions) |incs| b.dupeStrings(incs) else null,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub fn getPath(self: *Directory) std.Build.LazyPath {
|
||||
return .{ .generated = &self.generated_dir };
|
||||
pub fn getPath(dir: *Directory) std.Build.LazyPath {
|
||||
return .{ .generated = .{ .file = &dir.generated_dir } };
|
||||
}
|
||||
};
|
||||
|
||||
@ -73,10 +73,10 @@ pub const Contents = union(enum) {
|
||||
};
|
||||
|
||||
pub fn create(owner: *std.Build) *WriteFile {
|
||||
const wf = owner.allocator.create(WriteFile) catch @panic("OOM");
|
||||
wf.* = .{
|
||||
const write_file = owner.allocator.create(WriteFile) catch @panic("OOM");
|
||||
write_file.* = .{
|
||||
.step = Step.init(.{
|
||||
.id = .write_file,
|
||||
.id = base_id,
|
||||
.name = "WriteFile",
|
||||
.owner = owner,
|
||||
.makeFn = make,
|
||||
@ -84,22 +84,22 @@ pub fn create(owner: *std.Build) *WriteFile {
|
||||
.files = .{},
|
||||
.directories = .{},
|
||||
.output_source_files = .{},
|
||||
.generated_directory = .{ .step = &wf.step },
|
||||
.generated_directory = .{ .step = &write_file.step },
|
||||
};
|
||||
return wf;
|
||||
return write_file;
|
||||
}
|
||||
|
||||
pub fn add(wf: *WriteFile, sub_path: []const u8, bytes: []const u8) std.Build.LazyPath {
|
||||
const b = wf.step.owner;
|
||||
pub fn add(write_file: *WriteFile, sub_path: []const u8, bytes: []const u8) std.Build.LazyPath {
|
||||
const b = write_file.step.owner;
|
||||
const gpa = b.allocator;
|
||||
const file = gpa.create(File) catch @panic("OOM");
|
||||
file.* = .{
|
||||
.generated_file = .{ .step = &wf.step },
|
||||
.generated_file = .{ .step = &write_file.step },
|
||||
.sub_path = b.dupePath(sub_path),
|
||||
.contents = .{ .bytes = b.dupe(bytes) },
|
||||
};
|
||||
wf.files.append(gpa, file) catch @panic("OOM");
|
||||
wf.maybeUpdateName();
|
||||
write_file.files.append(gpa, file) catch @panic("OOM");
|
||||
write_file.maybeUpdateName();
|
||||
return file.getPath();
|
||||
}
|
||||
|
||||
@ -110,19 +110,19 @@ pub fn add(wf: *WriteFile, sub_path: []const u8, bytes: []const u8) std.Build.La
|
||||
/// include sub-directories, in which case this step will ensure the
|
||||
/// required sub-path exists.
|
||||
/// This is the option expected to be used most commonly with `addCopyFile`.
|
||||
pub fn addCopyFile(wf: *WriteFile, source: std.Build.LazyPath, sub_path: []const u8) std.Build.LazyPath {
|
||||
const b = wf.step.owner;
|
||||
pub fn addCopyFile(write_file: *WriteFile, source: std.Build.LazyPath, sub_path: []const u8) std.Build.LazyPath {
|
||||
const b = write_file.step.owner;
|
||||
const gpa = b.allocator;
|
||||
const file = gpa.create(File) catch @panic("OOM");
|
||||
file.* = .{
|
||||
.generated_file = .{ .step = &wf.step },
|
||||
.generated_file = .{ .step = &write_file.step },
|
||||
.sub_path = b.dupePath(sub_path),
|
||||
.contents = .{ .copy = source },
|
||||
};
|
||||
wf.files.append(gpa, file) catch @panic("OOM");
|
||||
write_file.files.append(gpa, file) catch @panic("OOM");
|
||||
|
||||
wf.maybeUpdateName();
|
||||
source.addStepDependencies(&wf.step);
|
||||
write_file.maybeUpdateName();
|
||||
source.addStepDependencies(&write_file.step);
|
||||
return file.getPath();
|
||||
}
|
||||
|
||||
@ -130,24 +130,24 @@ pub fn addCopyFile(wf: *WriteFile, source: std.Build.LazyPath, sub_path: []const
|
||||
/// relative to this step's generated directory.
|
||||
/// The returned value is a lazy path to the generated subdirectory.
|
||||
pub fn addCopyDirectory(
|
||||
wf: *WriteFile,
|
||||
write_file: *WriteFile,
|
||||
source: std.Build.LazyPath,
|
||||
sub_path: []const u8,
|
||||
options: Directory.Options,
|
||||
) std.Build.LazyPath {
|
||||
const b = wf.step.owner;
|
||||
const b = write_file.step.owner;
|
||||
const gpa = b.allocator;
|
||||
const dir = gpa.create(Directory) catch @panic("OOM");
|
||||
dir.* = .{
|
||||
.source = source.dupe(b),
|
||||
.sub_path = b.dupePath(sub_path),
|
||||
.options = options.dupe(b),
|
||||
.generated_dir = .{ .step = &wf.step },
|
||||
.generated_dir = .{ .step = &write_file.step },
|
||||
};
|
||||
wf.directories.append(gpa, dir) catch @panic("OOM");
|
||||
write_file.directories.append(gpa, dir) catch @panic("OOM");
|
||||
|
||||
wf.maybeUpdateName();
|
||||
source.addStepDependencies(&wf.step);
|
||||
write_file.maybeUpdateName();
|
||||
source.addStepDependencies(&write_file.step);
|
||||
return dir.getPath();
|
||||
}
|
||||
|
||||
@ -156,13 +156,13 @@ pub fn addCopyDirectory(
|
||||
/// used as part of the normal build process, but as a utility occasionally
|
||||
/// run by a developer with intent to modify source files and then commit
|
||||
/// those changes to version control.
|
||||
pub fn addCopyFileToSource(wf: *WriteFile, source: std.Build.LazyPath, sub_path: []const u8) void {
|
||||
const b = wf.step.owner;
|
||||
wf.output_source_files.append(b.allocator, .{
|
||||
pub fn addCopyFileToSource(write_file: *WriteFile, source: std.Build.LazyPath, sub_path: []const u8) void {
|
||||
const b = write_file.step.owner;
|
||||
write_file.output_source_files.append(b.allocator, .{
|
||||
.contents = .{ .copy = source },
|
||||
.sub_path = sub_path,
|
||||
}) catch @panic("OOM");
|
||||
source.addStepDependencies(&wf.step);
|
||||
source.addStepDependencies(&write_file.step);
|
||||
}
|
||||
|
||||
/// A path relative to the package root.
|
||||
@ -170,9 +170,9 @@ pub fn addCopyFileToSource(wf: *WriteFile, source: std.Build.LazyPath, sub_path:
|
||||
/// used as part of the normal build process, but as a utility occasionally
|
||||
/// run by a developer with intent to modify source files and then commit
|
||||
/// those changes to version control.
|
||||
pub fn addBytesToSource(wf: *WriteFile, bytes: []const u8, sub_path: []const u8) void {
|
||||
const b = wf.step.owner;
|
||||
wf.output_source_files.append(b.allocator, .{
|
||||
pub fn addBytesToSource(write_file: *WriteFile, bytes: []const u8, sub_path: []const u8) void {
|
||||
const b = write_file.step.owner;
|
||||
write_file.output_source_files.append(b.allocator, .{
|
||||
.contents = .{ .bytes = bytes },
|
||||
.sub_path = sub_path,
|
||||
}) catch @panic("OOM");
|
||||
@ -180,20 +180,20 @@ pub fn addBytesToSource(wf: *WriteFile, bytes: []const u8, sub_path: []const u8)
|
||||
|
||||
/// Returns a `LazyPath` representing the base directory that contains all the
|
||||
/// files from this `WriteFile`.
|
||||
pub fn getDirectory(wf: *WriteFile) std.Build.LazyPath {
|
||||
return .{ .generated = &wf.generated_directory };
|
||||
pub fn getDirectory(write_file: *WriteFile) std.Build.LazyPath {
|
||||
return .{ .generated = .{ .file = &write_file.generated_directory } };
|
||||
}
|
||||
|
||||
fn maybeUpdateName(wf: *WriteFile) void {
|
||||
if (wf.files.items.len == 1 and wf.directories.items.len == 0) {
|
||||
fn maybeUpdateName(write_file: *WriteFile) void {
|
||||
if (write_file.files.items.len == 1 and write_file.directories.items.len == 0) {
|
||||
// First time adding a file; update name.
|
||||
if (std.mem.eql(u8, wf.step.name, "WriteFile")) {
|
||||
wf.step.name = wf.step.owner.fmt("WriteFile {s}", .{wf.files.items[0].sub_path});
|
||||
if (std.mem.eql(u8, write_file.step.name, "WriteFile")) {
|
||||
write_file.step.name = write_file.step.owner.fmt("WriteFile {s}", .{write_file.files.items[0].sub_path});
|
||||
}
|
||||
} else if (wf.directories.items.len == 1 and wf.files.items.len == 0) {
|
||||
} else if (write_file.directories.items.len == 1 and write_file.files.items.len == 0) {
|
||||
// First time adding a directory; update name.
|
||||
if (std.mem.eql(u8, wf.step.name, "WriteFile")) {
|
||||
wf.step.name = wf.step.owner.fmt("WriteFile {s}", .{wf.directories.items[0].sub_path});
|
||||
if (std.mem.eql(u8, write_file.step.name, "WriteFile")) {
|
||||
write_file.step.name = write_file.step.owner.fmt("WriteFile {s}", .{write_file.directories.items[0].sub_path});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -201,14 +201,14 @@ fn maybeUpdateName(wf: *WriteFile) void {
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const wf: *WriteFile = @fieldParentPtr("step", step);
|
||||
const write_file: *WriteFile = @fieldParentPtr("step", step);
|
||||
|
||||
// Writing to source files is kind of an extra capability of this
|
||||
// WriteFile - arguably it should be a different step. But anyway here
|
||||
// it is, it happens unconditionally and does not interact with the other
|
||||
// files here.
|
||||
var any_miss = false;
|
||||
for (wf.output_source_files.items) |output_source_file| {
|
||||
for (write_file.output_source_files.items) |output_source_file| {
|
||||
if (fs.path.dirname(output_source_file.sub_path)) |dirname| {
|
||||
b.build_root.handle.makePath(dirname) catch |err| {
|
||||
return step.fail("unable to make path '{}{s}': {s}", .{
|
||||
@ -226,7 +226,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
any_miss = true;
|
||||
},
|
||||
.copy => |file_source| {
|
||||
const source_path = file_source.getPath(b);
|
||||
const source_path = file_source.getPath2(b, step);
|
||||
const prev_status = fs.Dir.updateFile(
|
||||
fs.cwd(),
|
||||
source_path,
|
||||
@ -258,18 +258,18 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
// in a non-backwards-compatible way.
|
||||
man.hash.add(@as(u32, 0xd767ee59));
|
||||
|
||||
for (wf.files.items) |file| {
|
||||
for (write_file.files.items) |file| {
|
||||
man.hash.addBytes(file.sub_path);
|
||||
switch (file.contents) {
|
||||
.bytes => |bytes| {
|
||||
man.hash.addBytes(bytes);
|
||||
},
|
||||
.copy => |file_source| {
|
||||
_ = try man.addFile(file_source.getPath(b), null);
|
||||
_ = try man.addFile(file_source.getPath2(b, step), null);
|
||||
},
|
||||
}
|
||||
}
|
||||
for (wf.directories.items) |dir| {
|
||||
for (write_file.directories.items) |dir| {
|
||||
man.hash.addBytes(dir.source.getPath2(b, step));
|
||||
man.hash.addBytes(dir.sub_path);
|
||||
for (dir.options.exclude_extensions) |ext| man.hash.addBytes(ext);
|
||||
@ -278,19 +278,19 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
|
||||
if (try step.cacheHit(&man)) {
|
||||
const digest = man.final();
|
||||
for (wf.files.items) |file| {
|
||||
for (write_file.files.items) |file| {
|
||||
file.generated_file.path = try b.cache_root.join(b.allocator, &.{
|
||||
"o", &digest, file.sub_path,
|
||||
});
|
||||
}
|
||||
wf.generated_directory.path = try b.cache_root.join(b.allocator, &.{ "o", &digest });
|
||||
write_file.generated_directory.path = try b.cache_root.join(b.allocator, &.{ "o", &digest });
|
||||
return;
|
||||
}
|
||||
|
||||
const digest = man.final();
|
||||
const cache_path = "o" ++ fs.path.sep_str ++ digest;
|
||||
|
||||
wf.generated_directory.path = try b.cache_root.join(b.allocator, &.{ "o", &digest });
|
||||
write_file.generated_directory.path = try b.cache_root.join(b.allocator, &.{ "o", &digest });
|
||||
|
||||
var cache_dir = b.cache_root.handle.makeOpenPath(cache_path, .{}) catch |err| {
|
||||
return step.fail("unable to make path '{}{s}': {s}", .{
|
||||
@ -301,7 +301,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
|
||||
const cwd = fs.cwd();
|
||||
|
||||
for (wf.files.items) |file| {
|
||||
for (write_file.files.items) |file| {
|
||||
if (fs.path.dirname(file.sub_path)) |dirname| {
|
||||
cache_dir.makePath(dirname) catch |err| {
|
||||
return step.fail("unable to make path '{}{s}{c}{s}': {s}", .{
|
||||
@ -318,7 +318,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
};
|
||||
},
|
||||
.copy => |file_source| {
|
||||
const source_path = file_source.getPath(b);
|
||||
const source_path = file_source.getPath2(b, step);
|
||||
const prev_status = fs.Dir.updateFile(
|
||||
cwd,
|
||||
source_path,
|
||||
@ -347,7 +347,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
cache_path, file.sub_path,
|
||||
});
|
||||
}
|
||||
for (wf.directories.items) |dir| {
|
||||
for (write_file.directories.items) |dir| {
|
||||
const full_src_dir_path = dir.source.getPath2(b, step);
|
||||
const dest_dirname = dir.sub_path;
|
||||
|
||||
|
||||
@ -164,6 +164,9 @@
|
||||
.dependencyFromBuildZig = .{
|
||||
.path = "dependencyFromBuildZig",
|
||||
},
|
||||
.run_output_paths = .{
|
||||
.path = "run_output_paths",
|
||||
},
|
||||
},
|
||||
.paths = .{
|
||||
"build.zig",
|
||||
|
||||
@ -18,7 +18,7 @@ pub fn build(b: *std.Build) void {
|
||||
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "main",
|
||||
.root_source_file = .{ .path = "main.zig" },
|
||||
.root_source_file = b.path("main.zig"),
|
||||
.optimize = optimize,
|
||||
.target = target,
|
||||
});
|
||||
@ -28,7 +28,7 @@ pub fn build(b: *std.Build) void {
|
||||
.optimize = optimize,
|
||||
.target = target,
|
||||
});
|
||||
lib.addCSourceFile(.{ .file = .{ .path = "shared_lib.c" }, .flags = &.{"-gdwarf"} });
|
||||
lib.addCSourceFile(.{ .file = b.path("shared_lib.c"), .flags = &.{"-gdwarf"} });
|
||||
lib.linkLibC();
|
||||
exe.linkLibrary(lib);
|
||||
|
||||
|
||||
@ -5,7 +5,7 @@ pub fn build(b: *std.Build) void {
|
||||
b.default_step = test_step;
|
||||
|
||||
const main = b.addTest(.{
|
||||
.root_source_file = .{ .path = "main.zig" },
|
||||
.root_source_file = b.path("main.zig"),
|
||||
.optimize = b.standardOptimizeOption(.{}),
|
||||
});
|
||||
// TODO: actually check these two artifacts for correctness
|
||||
|
||||
@ -8,7 +8,7 @@ pub fn build(b: *std.Build) void {
|
||||
|
||||
const obj = b.addObject(.{
|
||||
.name = "main",
|
||||
.root_source_file = .{ .path = "main.zig" },
|
||||
.root_source_file = b.path("main.zig"),
|
||||
.optimize = optimize,
|
||||
.target = b.host,
|
||||
});
|
||||
|
||||
@ -5,15 +5,15 @@ pub fn build(b: *std.Build) void {
|
||||
b.default_step = test_step;
|
||||
|
||||
const test1 = b.addTest(.{
|
||||
.root_source_file = .{ .path = "test_root/empty.zig" },
|
||||
.root_source_file = b.path("test_root/empty.zig"),
|
||||
.test_runner = "src/main.zig",
|
||||
});
|
||||
const test2 = b.addTest(.{
|
||||
.root_source_file = .{ .path = "src/empty.zig" },
|
||||
.root_source_file = b.path("src/empty.zig"),
|
||||
.test_runner = "src/main.zig",
|
||||
});
|
||||
const test3 = b.addTest(.{
|
||||
.root_source_file = .{ .path = "empty.zig" },
|
||||
.root_source_file = b.path("empty.zig"),
|
||||
.test_runner = "src/main.zig",
|
||||
});
|
||||
|
||||
|
||||
@ -16,7 +16,7 @@ pub fn build(b: *std.Build) void {
|
||||
const optimize: std.builtin.OptimizeMode = .Debug;
|
||||
const obj = b.addObject(.{
|
||||
.name = "issue_5825",
|
||||
.root_source_file = .{ .path = "main.zig" },
|
||||
.root_source_file = b.path("main.zig"),
|
||||
.optimize = optimize,
|
||||
.target = target,
|
||||
});
|
||||
|
||||
@ -2,7 +2,7 @@ const std = @import("std");
|
||||
|
||||
pub fn build(b: *std.Build) void {
|
||||
const main = b.addTest(.{
|
||||
.root_source_file = .{ .path = "src/main.zig" },
|
||||
.root_source_file = b.path("src/main.zig"),
|
||||
.target = b.host,
|
||||
.optimize = .Debug,
|
||||
});
|
||||
|
||||
40
test/standalone/run_output_paths/build.zig
Normal file
40
test/standalone/run_output_paths/build.zig
Normal file
@ -0,0 +1,40 @@
|
||||
const std = @import("std");
|
||||
|
||||
pub fn build(b: *std.Build) void {
|
||||
const test_step = b.step("test", "Test it");
|
||||
b.default_step = test_step;
|
||||
|
||||
const target = b.standardTargetOptions(.{});
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
|
||||
const create_file_exe = b.addExecutable(.{
|
||||
.name = "create_file",
|
||||
.root_source_file = b.path("create_file.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
|
||||
const create_first = b.addRunArtifact(create_file_exe);
|
||||
const first_dir = create_first.addOutputDirectoryArg("first");
|
||||
create_first.addArg("hello1.txt");
|
||||
test_step.dependOn(&b.addCheckFile(first_dir.path(b, "hello1.txt"), .{ .expected_matches = &.{
|
||||
std.fs.path.sep_str ++
|
||||
\\first
|
||||
\\hello1.txt
|
||||
\\Hello, world!
|
||||
\\
|
||||
,
|
||||
} }).step);
|
||||
|
||||
const create_second = b.addRunArtifact(create_file_exe);
|
||||
const second_dir = create_second.addPrefixedOutputDirectoryArg("--dir=", "second");
|
||||
create_second.addArg("hello2.txt");
|
||||
test_step.dependOn(&b.addCheckFile(second_dir.path(b, "hello2.txt"), .{ .expected_matches = &.{
|
||||
std.fs.path.sep_str ++
|
||||
\\second
|
||||
\\hello2.txt
|
||||
\\Hello, world!
|
||||
\\
|
||||
,
|
||||
} }).step);
|
||||
}
|
||||
19
test/standalone/run_output_paths/create_file.zig
Normal file
19
test/standalone/run_output_paths/create_file.zig
Normal file
@ -0,0 +1,19 @@
|
||||
const std = @import("std");
|
||||
|
||||
pub fn main() !void {
|
||||
var args = try std.process.argsWithAllocator(std.heap.page_allocator);
|
||||
_ = args.skip();
|
||||
const dir_name = args.next().?;
|
||||
const dir = try std.fs.cwd().openDir(if (std.mem.startsWith(u8, dir_name, "--dir="))
|
||||
dir_name["--dir=".len..]
|
||||
else
|
||||
dir_name, .{});
|
||||
const file_name = args.next().?;
|
||||
const file = try dir.createFile(file_name, .{});
|
||||
try file.writer().print(
|
||||
\\{s}
|
||||
\\{s}
|
||||
\\Hello, world!
|
||||
\\
|
||||
, .{ dir_name, file_name });
|
||||
}
|
||||
@ -29,7 +29,7 @@ pub fn build(b: *std.build.Builder) !void {
|
||||
options.addOption(bool, "keep_sigpipe", keep_sigpipe);
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "breakpipe",
|
||||
.root_source_file = .{ .path = "breakpipe.zig" },
|
||||
.root_source_file = b.path("breakpipe.zig"),
|
||||
});
|
||||
exe.addOptions("build_options", options);
|
||||
const run = b.addRunArtifact(exe);
|
||||
|
||||
@ -11,7 +11,7 @@ pub fn build(b: *std.Build) !void {
|
||||
|
||||
const lib_gnu = b.addStaticLibrary(.{
|
||||
.name = "toargv-gnu",
|
||||
.root_source_file = .{ .path = "lib.zig" },
|
||||
.root_source_file = b.path("lib.zig"),
|
||||
.target = b.resolveTargetQuery(.{
|
||||
.abi = .gnu,
|
||||
}),
|
||||
@ -25,7 +25,7 @@ pub fn build(b: *std.Build) !void {
|
||||
.optimize = optimize,
|
||||
});
|
||||
verify_gnu.addCSourceFile(.{
|
||||
.file = .{ .path = "verify.c" },
|
||||
.file = b.path("verify.c"),
|
||||
.flags = &.{ "-DUNICODE", "-D_UNICODE" },
|
||||
});
|
||||
verify_gnu.mingw_unicode_entry_point = true;
|
||||
@ -34,7 +34,7 @@ pub fn build(b: *std.Build) !void {
|
||||
|
||||
const fuzz = b.addExecutable(.{
|
||||
.name = "fuzz",
|
||||
.root_source_file = .{ .path = "fuzz.zig" },
|
||||
.root_source_file = b.path("fuzz.zig"),
|
||||
.target = b.host,
|
||||
.optimize = optimize,
|
||||
});
|
||||
@ -69,7 +69,7 @@ pub fn build(b: *std.Build) !void {
|
||||
if (has_msvc) {
|
||||
const lib_msvc = b.addStaticLibrary(.{
|
||||
.name = "toargv-msvc",
|
||||
.root_source_file = .{ .path = "lib.zig" },
|
||||
.root_source_file = b.path("lib.zig"),
|
||||
.target = b.resolveTargetQuery(.{
|
||||
.abi = .msvc,
|
||||
}),
|
||||
@ -83,7 +83,7 @@ pub fn build(b: *std.Build) !void {
|
||||
.optimize = optimize,
|
||||
});
|
||||
verify_msvc.addCSourceFile(.{
|
||||
.file = .{ .path = "verify.c" },
|
||||
.file = b.path("verify.c"),
|
||||
.flags = &.{ "-DUNICODE", "-D_UNICODE" },
|
||||
});
|
||||
verify_msvc.linkLibrary(lib_msvc);
|
||||
|
||||
@ -36,7 +36,7 @@ fn add(
|
||||
.file = b.path("res/zig.rc"),
|
||||
.flags = &.{"/c65001"}, // UTF-8 code page
|
||||
.include_paths = &.{
|
||||
.{ .generated = &generated_h_step.generated_directory },
|
||||
.{ .generated = .{ .file = &generated_h_step.generated_directory } },
|
||||
},
|
||||
});
|
||||
exe.rc_includes = switch (rc_includes) {
|
||||
|
||||
@ -12,14 +12,14 @@ pub fn build(b: *std.Build) void {
|
||||
|
||||
const hello = b.addExecutable(.{
|
||||
.name = "hello",
|
||||
.root_source_file = .{ .path = "hello.zig" },
|
||||
.root_source_file = b.path("hello.zig"),
|
||||
.optimize = optimize,
|
||||
.target = target,
|
||||
});
|
||||
|
||||
const main = b.addExecutable(.{
|
||||
.name = "main",
|
||||
.root_source_file = .{ .path = "main.zig" },
|
||||
.root_source_file = b.path("main.zig"),
|
||||
.optimize = optimize,
|
||||
.target = target,
|
||||
});
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user