Merge pull request #14265 from ziglang/init-package-manager

Package Manager MVP
This commit is contained in:
Andrew Kelley 2023-01-12 18:49:15 -05:00 committed by GitHub
commit 7cb2f9222d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 1124 additions and 43 deletions

View File

@ -185,6 +185,7 @@ pub fn build(b: *Builder) !void {
exe_options.addOption(bool, "llvm_has_arc", llvm_has_arc);
exe_options.addOption(bool, "force_gpa", force_gpa);
exe_options.addOption(bool, "only_c", only_c);
exe_options.addOption(bool, "omit_pkg_fetching_code", false);
if (link_libc) {
exe.linkLibC();
@ -567,14 +568,14 @@ fn addCmakeCfgOptionsToExe(
// back to -lc++ and cross our fingers.
addCxxKnownPath(b, cfg, exe, b.fmt("libstdc++.{s}", .{lib_suffix}), "", need_cpp_includes) catch |err| switch (err) {
error.RequiredLibraryNotFound => {
exe.linkSystemLibrary("c++");
exe.linkLibCpp();
},
else => |e| return e,
};
exe.linkSystemLibrary("unwind");
},
.ios, .macos, .watchos, .tvos => {
exe.linkSystemLibrary("c++");
.ios, .macos, .watchos, .tvos, .windows => {
exe.linkLibCpp();
},
.freebsd => {
if (static) {

View File

@ -9,6 +9,8 @@ const process = std.process;
const ArrayList = std.ArrayList;
const File = std.fs.File;
pub const dependencies = @import("@dependencies");
pub fn main() !void {
// Here we use an ArenaAllocator backed by a DirectAllocator because a build is a short-lived,
// one shot program. We don't need to waste time freeing memory and finding places to squish
@ -207,7 +209,7 @@ pub fn main() !void {
builder.debug_log_scopes = debug_log_scopes.items;
builder.resolveInstallPrefix(install_prefix, dir_list);
try runBuild(builder);
try builder.runBuild(root);
if (builder.validateUserInputDidItFail())
return usageAndErr(builder, true, stderr_stream);
@ -223,19 +225,11 @@ pub fn main() !void {
};
}
fn runBuild(builder: *Builder) anyerror!void {
switch (@typeInfo(@typeInfo(@TypeOf(root.build)).Fn.return_type.?)) {
.Void => root.build(builder),
.ErrorUnion => try root.build(builder),
else => @compileError("expected return type of build to be 'void' or '!void'"),
}
}
fn usage(builder: *Builder, already_ran_build: bool, out_stream: anytype) !void {
// run the build script to collect the options
if (!already_ran_build) {
builder.resolveInstallPrefix(null, .{});
try runBuild(builder);
try builder.runBuild(root);
}
try out_stream.print(

66
lib/std/Ini.zig Normal file
View File

@ -0,0 +1,66 @@
bytes: []const u8,
pub const SectionIterator = struct {
ini: Ini,
next_index: ?usize,
header: []const u8,
pub fn next(it: *SectionIterator) ?[]const u8 {
const bytes = it.ini.bytes;
const start = it.next_index orelse return null;
const end = mem.indexOfPos(u8, bytes, start, "\n[") orelse bytes.len;
const result = bytes[start..end];
if (mem.indexOfPos(u8, bytes, start, it.header)) |next_index| {
it.next_index = next_index + it.header.len;
} else {
it.next_index = null;
}
return result;
}
};
/// Asserts that `header` includes "\n[" at the beginning and "]\n" at the end.
/// `header` must remain valid for the lifetime of the iterator.
pub fn iterateSection(ini: Ini, header: []const u8) SectionIterator {
assert(mem.startsWith(u8, header, "\n["));
assert(mem.endsWith(u8, header, "]\n"));
const first_header = header[1..];
const next_index = if (mem.indexOf(u8, ini.bytes, first_header)) |i|
i + first_header.len
else
null;
return .{
.ini = ini,
.next_index = next_index,
.header = header,
};
}
const std = @import("std.zig");
const mem = std.mem;
const assert = std.debug.assert;
const Ini = @This();
const testing = std.testing;
test iterateSection {
const example =
\\[package]
\\name=libffmpeg
\\version=5.1.2
\\
\\[dependency]
\\id=libz
\\url=url1
\\
\\[dependency]
\\id=libmp3lame
\\url=url2
;
var ini: Ini = .{ .bytes = example };
var it = ini.iterateSection("\n[dependency]\n");
const section1 = it.next() orelse return error.TestFailed;
try testing.expectEqualStrings("id=libz\nurl=url1\n", section1);
const section2 = it.next() orelse return error.TestFailed;
try testing.expectEqualStrings("id=libmp3lame\nurl=url2", section2);
try testing.expect(it.next() == null);
}

View File

@ -69,13 +69,15 @@ pub const Builder = struct {
search_prefixes: ArrayList([]const u8),
libc_file: ?[]const u8 = null,
installed_files: ArrayList(InstalledFile),
/// Path to the directory containing build.zig.
build_root: []const u8,
cache_root: []const u8,
global_cache_root: []const u8,
release_mode: ?std.builtin.Mode,
is_release: bool,
/// zig lib dir
override_lib_dir: ?[]const u8,
vcpkg_root: VcpkgRoot,
vcpkg_root: VcpkgRoot = .unattempted,
pkg_config_pkg_list: ?(PkgConfigError![]const PkgConfigPkg) = null,
args: ?[][]const u8 = null,
debug_log_scopes: []const []const u8 = &.{},
@ -100,6 +102,8 @@ pub const Builder = struct {
/// Information about the native target. Computed before build() is invoked.
host: NativeTargetInfo,
dep_prefix: []const u8 = "",
pub const ExecError = error{
ReadFailure,
ExitCodeFailure,
@ -223,7 +227,6 @@ pub const Builder = struct {
.is_release = false,
.override_lib_dir = null,
.install_path = undefined,
.vcpkg_root = VcpkgRoot{ .unattempted = {} },
.args = null,
.host = host,
};
@ -233,6 +236,92 @@ pub const Builder = struct {
return self;
}
fn createChild(
parent: *Builder,
dep_name: []const u8,
build_root: []const u8,
args: anytype,
) !*Builder {
const child = try createChildOnly(parent, dep_name, build_root);
try applyArgs(child, args);
return child;
}
fn createChildOnly(parent: *Builder, dep_name: []const u8, build_root: []const u8) !*Builder {
const allocator = parent.allocator;
const child = try allocator.create(Builder);
child.* = .{
.allocator = allocator,
.install_tls = .{
.step = Step.initNoOp(.top_level, "install", allocator),
.description = "Copy build artifacts to prefix path",
},
.uninstall_tls = .{
.step = Step.init(.top_level, "uninstall", allocator, makeUninstall),
.description = "Remove build artifacts from prefix path",
},
.user_input_options = UserInputOptionsMap.init(allocator),
.available_options_map = AvailableOptionsMap.init(allocator),
.available_options_list = ArrayList(AvailableOption).init(allocator),
.verbose = parent.verbose,
.verbose_link = parent.verbose_link,
.verbose_cc = parent.verbose_cc,
.verbose_air = parent.verbose_air,
.verbose_llvm_ir = parent.verbose_llvm_ir,
.verbose_cimport = parent.verbose_cimport,
.verbose_llvm_cpu_features = parent.verbose_llvm_cpu_features,
.prominent_compile_errors = parent.prominent_compile_errors,
.color = parent.color,
.reference_trace = parent.reference_trace,
.invalid_user_input = false,
.zig_exe = parent.zig_exe,
.default_step = undefined,
.env_map = parent.env_map,
.top_level_steps = ArrayList(*TopLevelStep).init(allocator),
.install_prefix = undefined,
.dest_dir = parent.dest_dir,
.lib_dir = parent.lib_dir,
.exe_dir = parent.exe_dir,
.h_dir = parent.h_dir,
.install_path = parent.install_path,
.sysroot = parent.sysroot,
.search_prefixes = ArrayList([]const u8).init(allocator),
.libc_file = parent.libc_file,
.installed_files = ArrayList(InstalledFile).init(allocator),
.build_root = build_root,
.cache_root = parent.cache_root,
.global_cache_root = parent.global_cache_root,
.release_mode = parent.release_mode,
.is_release = parent.is_release,
.override_lib_dir = parent.override_lib_dir,
.debug_log_scopes = parent.debug_log_scopes,
.debug_compile_errors = parent.debug_compile_errors,
.enable_darling = parent.enable_darling,
.enable_qemu = parent.enable_qemu,
.enable_rosetta = parent.enable_rosetta,
.enable_wasmtime = parent.enable_wasmtime,
.enable_wine = parent.enable_wine,
.glibc_runtimes_dir = parent.glibc_runtimes_dir,
.host = parent.host,
.dep_prefix = parent.fmt("{s}{s}.", .{ parent.dep_prefix, dep_name }),
};
try child.top_level_steps.append(&child.install_tls);
try child.top_level_steps.append(&child.uninstall_tls);
child.default_step = &child.install_tls.step;
return child;
}
fn applyArgs(b: *Builder, args: anytype) !void {
// TODO this function is the way that a build.zig file communicates
// options to its dependencies. It is the programmatic way to give
// command line arguments to a build.zig script.
_ = args;
// TODO create a hash based on the args and the package hash, use this
// to compute the install prefix.
const install_prefix = b.pathJoin(&.{ b.cache_root, "pkg" });
b.resolveInstallPrefix(install_prefix, .{});
}
pub fn destroy(self: *Builder) void {
self.env_map.deinit();
self.top_level_steps.deinit();
@ -1068,6 +1157,10 @@ pub const Builder = struct {
return self.addInstallFileWithDir(source.dupe(self), .lib, dest_rel_path);
}
pub fn addInstallHeaderFile(b: *Builder, src_path: []const u8, dest_rel_path: []const u8) *InstallFileStep {
return b.addInstallFileWithDir(.{ .path = src_path }, .header, dest_rel_path);
}
pub fn addInstallRaw(self: *Builder, artifact: *LibExeObjStep, dest_filename: []const u8, options: InstallRawStep.CreateOptions) *InstallRawStep {
return InstallRawStep.create(self, artifact, dest_filename, options);
}
@ -1300,6 +1393,70 @@ pub const Builder = struct {
&[_][]const u8{ base_dir, dest_rel_path },
) catch unreachable;
}
pub const Dependency = struct {
builder: *Builder,
pub fn artifact(d: *Dependency, name: []const u8) *LibExeObjStep {
var found: ?*LibExeObjStep = null;
for (d.builder.install_tls.step.dependencies.items) |dep_step| {
const inst = dep_step.cast(InstallArtifactStep) orelse continue;
if (mem.eql(u8, inst.artifact.name, name)) {
if (found != null) panic("artifact name '{s}' is ambiguous", .{name});
found = inst.artifact;
}
}
return found orelse {
for (d.builder.install_tls.step.dependencies.items) |dep_step| {
const inst = dep_step.cast(InstallArtifactStep) orelse continue;
log.info("available artifact: '{s}'", .{inst.artifact.name});
}
panic("unable to find artifact '{s}'", .{name});
};
}
};
pub fn dependency(b: *Builder, name: []const u8, args: anytype) *Dependency {
const build_runner = @import("root");
const deps = build_runner.dependencies;
inline for (@typeInfo(deps.imports).Struct.decls) |decl| {
if (mem.startsWith(u8, decl.name, b.dep_prefix) and
mem.endsWith(u8, decl.name, name) and
decl.name.len == b.dep_prefix.len + name.len)
{
const build_zig = @field(deps.imports, decl.name);
const build_root = @field(deps.build_root, decl.name);
return dependencyInner(b, name, build_root, build_zig, args);
}
}
const full_path = b.pathFromRoot("build.zig.ini");
std.debug.print("no dependency named '{s}' in '{s}'\n", .{ name, full_path });
std.process.exit(1);
}
fn dependencyInner(
b: *Builder,
name: []const u8,
build_root: []const u8,
comptime build_zig: type,
args: anytype,
) *Dependency {
const sub_builder = b.createChild(name, build_root, args) catch unreachable;
sub_builder.runBuild(build_zig) catch unreachable;
const dep = b.allocator.create(Dependency) catch unreachable;
dep.* = .{ .builder = sub_builder };
return dep;
}
pub fn runBuild(b: *Builder, build_zig: anytype) anyerror!void {
switch (@typeInfo(@typeInfo(@TypeOf(build_zig.build)).Fn.return_type.?)) {
.Void => build_zig.build(b),
.ErrorUnion => try build_zig.build(b),
else => @compileError("expected return type of build to be 'void' or '!void'"),
}
}
};
test "builder.findProgram compiles" {

View File

@ -108,6 +108,7 @@ object_src: []const u8,
link_objects: ArrayList(LinkObject),
include_dirs: ArrayList(IncludeDir),
c_macros: ArrayList([]const u8),
installed_headers: ArrayList(*std.build.Step),
output_dir: ?[]const u8,
is_linking_libc: bool = false,
is_linking_libcpp: bool = false,
@ -370,6 +371,7 @@ fn initExtraArgs(
.lib_paths = ArrayList([]const u8).init(builder.allocator),
.rpaths = ArrayList([]const u8).init(builder.allocator),
.framework_dirs = ArrayList([]const u8).init(builder.allocator),
.installed_headers = ArrayList(*std.build.Step).init(builder.allocator),
.object_src = undefined,
.c_std = Builder.CStd.C99,
.override_lib_dir = null,
@ -472,6 +474,27 @@ pub fn installRaw(self: *LibExeObjStep, dest_filename: []const u8, options: Inst
return self.builder.installRaw(self, dest_filename, options);
}
pub fn installHeader(a: *LibExeObjStep, src_path: []const u8) void {
const basename = fs.path.basename(src_path);
const install_file = a.builder.addInstallHeaderFile(src_path, basename);
a.builder.getInstallStep().dependOn(&install_file.step);
a.installed_headers.append(&install_file.step) catch unreachable;
}
pub fn installHeadersDirectory(
a: *LibExeObjStep,
src_dir_path: []const u8,
dest_rel_path: []const u8,
) void {
const install_dir = a.builder.addInstallDirectory(.{
.source_dir = src_dir_path,
.install_dir = .header,
.install_subdir = dest_rel_path,
});
a.builder.getInstallStep().dependOn(&install_dir.step);
a.installed_headers.append(&install_dir.step) catch unreachable;
}
/// Creates a `RunStep` with an executable built with `addExecutable`.
/// Add command line arguments with `addArg`.
pub fn run(exe: *LibExeObjStep) *RunStep {
@ -1362,7 +1385,7 @@ fn make(step: *Step) !void {
if (self.libc_file) |libc_file| {
try zig_args.append("--libc");
try zig_args.append(libc_file.getPath(self.builder));
try zig_args.append(libc_file.getPath(builder));
} else if (builder.libc_file) |libc_file| {
try zig_args.append("--libc");
try zig_args.append(libc_file);
@ -1577,7 +1600,7 @@ fn make(step: *Step) !void {
} else {
const need_cross_glibc = self.target.isGnuLibC() and self.is_linking_libc;
switch (self.builder.host.getExternalExecutor(self.target_info, .{
switch (builder.host.getExternalExecutor(self.target_info, .{
.qemu_fixes_dl = need_cross_glibc and builder.glibc_runtimes_dir != null,
.link_libc = self.is_linking_libc,
})) {
@ -1661,7 +1684,7 @@ fn make(step: *Step) !void {
switch (include_dir) {
.raw_path => |include_path| {
try zig_args.append("-I");
try zig_args.append(self.builder.pathFromRoot(include_path));
try zig_args.append(builder.pathFromRoot(include_path));
},
.raw_path_system => |include_path| {
if (builder.sysroot != null) {
@ -1670,7 +1693,7 @@ fn make(step: *Step) !void {
try zig_args.append("-isystem");
}
const resolved_include_path = self.builder.pathFromRoot(include_path);
const resolved_include_path = builder.pathFromRoot(include_path);
const common_include_path = if (builtin.os.tag == .windows and builder.sysroot != null and fs.path.isAbsolute(resolved_include_path)) blk: {
// We need to check for disk designator and strip it out from dir path so
@ -1686,10 +1709,21 @@ fn make(step: *Step) !void {
try zig_args.append(common_include_path);
},
.other_step => |other| if (other.emit_h) {
const h_path = other.getOutputHSource().getPath(self.builder);
try zig_args.append("-isystem");
try zig_args.append(fs.path.dirname(h_path).?);
.other_step => |other| {
if (other.emit_h) {
const h_path = other.getOutputHSource().getPath(builder);
try zig_args.append("-isystem");
try zig_args.append(fs.path.dirname(h_path).?);
}
if (other.installed_headers.items.len > 0) {
for (other.installed_headers.items) |install_step| {
try install_step.make();
}
try zig_args.append("-I");
try zig_args.append(builder.pathJoin(&.{
other.builder.install_prefix, "include",
}));
}
},
.config_header_step => |config_header| {
try zig_args.append("-I");
@ -1790,7 +1824,7 @@ fn make(step: *Step) !void {
if (self.override_lib_dir) |dir| {
try zig_args.append("--zig-lib-dir");
try zig_args.append(builder.pathFromRoot(dir));
} else if (self.builder.override_lib_dir) |dir| {
} else if (builder.override_lib_dir) |dir| {
try zig_args.append("--zig-lib-dir");
try zig_args.append(builder.pathFromRoot(dir));
}

View File

@ -17,6 +17,9 @@ const FCOMMENT = 1 << 4;
const max_string_len = 1024;
/// TODO: the fully qualified namespace to this declaration is
/// std.compress.gzip.GzipStream which has a redundant "gzip" in the name.
/// Instead, it should be `std.compress.gzip.Stream`.
pub fn GzipStream(comptime ReaderType: type) type {
return struct {
const Self = @This();

View File

@ -524,11 +524,133 @@ pub const Request = struct {
req.* = undefined;
}
pub const Reader = std.io.Reader(*Request, ReadError, read);
pub fn reader(req: *Request) Reader {
return .{ .context = req };
}
pub fn readAll(req: *Request, buffer: []u8) !usize {
return readAtLeast(req, buffer, buffer.len);
}
pub fn read(req: *Request, buffer: []u8) !usize {
pub const ReadError = net.Stream.ReadError || error{
// From HTTP protocol
HttpHeadersInvalid,
HttpHeadersExceededSizeLimit,
HttpRedirectMissingLocation,
HttpTransferEncodingUnsupported,
HttpContentLengthUnknown,
TooManyHttpRedirects,
ShortHttpStatusLine,
BadHttpVersion,
HttpHeaderContinuationsUnsupported,
UnsupportedUrlScheme,
UriMissingHost,
UnknownHostName,
// Network problems
NetworkUnreachable,
HostLacksNetworkAddresses,
TemporaryNameServerFailure,
NameServerFailure,
ProtocolFamilyNotAvailable,
ProtocolNotSupported,
// System resource problems
ProcessFdQuotaExceeded,
SystemFdQuotaExceeded,
OutOfMemory,
// TLS problems
InsufficientEntropy,
TlsConnectionTruncated,
TlsRecordOverflow,
TlsDecodeError,
TlsAlert,
TlsBadRecordMac,
TlsBadLength,
TlsIllegalParameter,
TlsUnexpectedMessage,
TlsDecryptFailure,
CertificateFieldHasInvalidLength,
CertificateHostMismatch,
CertificatePublicKeyInvalid,
CertificateExpired,
CertificateFieldHasWrongDataType,
CertificateIssuerMismatch,
CertificateNotYetValid,
CertificateSignatureAlgorithmMismatch,
CertificateSignatureAlgorithmUnsupported,
CertificateSignatureInvalid,
CertificateSignatureInvalidLength,
CertificateSignatureNamedCurveUnsupported,
CertificateSignatureUnsupportedBitCount,
TlsCertificateNotVerified,
TlsBadSignatureScheme,
TlsBadRsaSignatureBitCount,
TlsDecryptError,
UnsupportedCertificateVersion,
CertificateTimeInvalid,
CertificateHasUnrecognizedObjectId,
CertificateHasInvalidBitString,
// TODO: convert to higher level errors
InvalidFormat,
InvalidPort,
UnexpectedCharacter,
Overflow,
InvalidCharacter,
AddressFamilyNotSupported,
AddressInUse,
AddressNotAvailable,
ConnectionPending,
ConnectionRefused,
FileNotFound,
PermissionDenied,
ServiceUnavailable,
SocketTypeNotSupported,
FileTooBig,
LockViolation,
NoSpaceLeft,
NotOpenForWriting,
InvalidEncoding,
IdentityElement,
NonCanonical,
SignatureVerificationFailed,
MessageTooLong,
NegativeIntoUnsigned,
TargetTooSmall,
BufferTooSmall,
InvalidSignature,
NotSquare,
DiskQuota,
InvalidEnd,
Incomplete,
InvalidIpv4Mapping,
InvalidIPAddressFormat,
BadPathName,
DeviceBusy,
FileBusy,
FileLocksNotSupported,
InvalidHandle,
InvalidUtf8,
NameTooLong,
NoDevice,
PathAlreadyExists,
PipeBusy,
SharingViolation,
SymLinkLoop,
FileSystem,
InterfaceNotFound,
AlreadyBound,
FileDescriptorNotASocket,
NetworkSubsystemFailed,
NotDir,
ReadOnlyFileSystem,
};
pub fn read(req: *Request, buffer: []u8) ReadError!usize {
return readAtLeast(req, buffer, 1);
}
@ -671,7 +793,8 @@ pub const Request = struct {
}
},
.chunk_data => {
const sub_amt = @min(req.response.next_chunk_length, in.len);
// TODO https://github.com/ziglang/zig/issues/14039
const sub_amt = @intCast(usize, @min(req.response.next_chunk_length, in.len));
req.response.next_chunk_length -= sub_amt;
if (req.response.next_chunk_length > 0) {
if (in.ptr == buffer.ptr) {
@ -709,11 +832,15 @@ pub const Request = struct {
}
};
pub fn deinit(client: *Client, gpa: Allocator) void {
client.ca_bundle.deinit(gpa);
pub fn deinit(client: *Client) void {
client.ca_bundle.deinit(client.allocator);
client.* = undefined;
}
pub fn rescanRootCertificates(client: *Client) !void {
return client.ca_bundle.rescan(client.allocator);
}
pub fn connect(client: *Client, host: []const u8, port: u16, protocol: Connection.Protocol) !Connection {
var conn: Connection = .{
.stream = try net.tcpConnectToHost(client.allocator, host, port),

View File

@ -114,6 +114,7 @@ pub const bufferedWriter = @import("io/buffered_writer.zig").bufferedWriter;
pub const BufferedReader = @import("io/buffered_reader.zig").BufferedReader;
pub const bufferedReader = @import("io/buffered_reader.zig").bufferedReader;
pub const bufferedReaderSize = @import("io/buffered_reader.zig").bufferedReaderSize;
pub const PeekStream = @import("io/peek_stream.zig").PeekStream;
pub const peekStream = @import("io/peek_stream.zig").peekStream;

View File

@ -45,8 +45,12 @@ pub fn BufferedReader(comptime buffer_size: usize, comptime ReaderType: type) ty
};
}
pub fn bufferedReader(underlying_stream: anytype) BufferedReader(4096, @TypeOf(underlying_stream)) {
return .{ .unbuffered_reader = underlying_stream };
pub fn bufferedReader(reader: anytype) BufferedReader(4096, @TypeOf(reader)) {
return .{ .unbuffered_reader = reader };
}
pub fn bufferedReaderSize(comptime size: usize, reader: anytype) BufferedReader(size, @TypeOf(reader)) {
return .{ .unbuffered_reader = reader };
}
test "io.BufferedReader OneByte" {

View File

@ -30,10 +30,20 @@ pub fn Reader(
/// means the stream reached the end. Reaching the end of a stream is not an error
/// condition.
pub fn readAll(self: Self, buffer: []u8) Error!usize {
return readAtLeast(self, buffer, buffer.len);
}
/// Returns the number of bytes read, calling the underlying read
/// function the minimal number of times until the buffer has at least
/// `len` bytes filled. If the number read is less than `len` it means
/// the stream reached the end. Reaching the end of the stream is not
/// an error condition.
pub fn readAtLeast(self: Self, buffer: []u8, len: usize) Error!usize {
assert(len <= buffer.len);
var index: usize = 0;
while (index != buffer.len) {
while (index < len) {
const amt = try self.read(buffer[index..]);
if (amt == 0) return index;
if (amt == 0) break;
index += amt;
}
return index;

View File

@ -2414,6 +2414,9 @@ pub fn unlinkatW(dirfd: fd_t, sub_path_w: []const u16, flags: u32) UnlinkatError
pub const RenameError = error{
/// In WASI, this error may occur when the file descriptor does
/// not hold the required rights to rename a resource by path relative to it.
///
/// On Windows, this error may be returned instead of PathAlreadyExists when
/// renaming a directory over an existing directory.
AccessDenied,
FileBusy,
DiskQuota,

View File

@ -21,6 +21,7 @@ pub const EnumMap = enums.EnumMap;
pub const EnumSet = enums.EnumSet;
pub const HashMap = hash_map.HashMap;
pub const HashMapUnmanaged = hash_map.HashMapUnmanaged;
pub const Ini = @import("Ini.zig");
pub const MultiArrayList = @import("multi_array_list.zig").MultiArrayList;
pub const PackedIntArray = @import("packed_int_array.zig").PackedIntArray;
pub const PackedIntArrayEndian = @import("packed_int_array.zig").PackedIntArrayEndian;
@ -85,6 +86,7 @@ pub const rand = @import("rand.zig");
pub const sort = @import("sort.zig");
pub const simd = @import("simd.zig");
pub const ascii = @import("ascii.zig");
pub const tar = @import("tar.zig");
pub const testing = @import("testing.zig");
pub const time = @import("time.zig");
pub const tz = @import("tz.zig");

172
lib/std/tar.zig Normal file
View File

@ -0,0 +1,172 @@
pub const Options = struct {
/// Number of directory levels to skip when extracting files.
strip_components: u32 = 0,
};
pub const Header = struct {
bytes: *const [512]u8,
pub const FileType = enum(u8) {
normal = '0',
hard_link = '1',
symbolic_link = '2',
character_special = '3',
block_special = '4',
directory = '5',
fifo = '6',
contiguous = '7',
global_extended_header = 'g',
extended_header = 'x',
_,
};
pub fn fileSize(header: Header) !u64 {
const raw = header.bytes[124..][0..12];
const ltrimmed = std.mem.trimLeft(u8, raw, "0");
const rtrimmed = std.mem.trimRight(u8, ltrimmed, "\x00");
if (rtrimmed.len == 0) return 0;
return std.fmt.parseInt(u64, rtrimmed, 8);
}
pub fn is_ustar(header: Header) bool {
return std.mem.eql(u8, header.bytes[257..][0..6], "ustar\x00");
}
/// Includes prefix concatenated, if any.
/// Return value may point into Header buffer, or might point into the
/// argument buffer.
/// TODO: check against "../" and other nefarious things
pub fn fullFileName(header: Header, buffer: *[255]u8) ![]const u8 {
const n = name(header);
if (!is_ustar(header))
return n;
const p = prefix(header);
if (p.len == 0)
return n;
std.mem.copy(u8, buffer[0..p.len], p);
buffer[p.len] = '/';
std.mem.copy(u8, buffer[p.len + 1 ..], n);
return buffer[0 .. p.len + 1 + n.len];
}
pub fn name(header: Header) []const u8 {
return str(header, 0, 0 + 100);
}
pub fn prefix(header: Header) []const u8 {
return str(header, 345, 345 + 155);
}
pub fn fileType(header: Header) FileType {
const result = @intToEnum(FileType, header.bytes[156]);
return if (result == @intToEnum(FileType, 0)) .normal else result;
}
fn str(header: Header, start: usize, end: usize) []const u8 {
var i: usize = start;
while (i < end) : (i += 1) {
if (header.bytes[i] == 0) break;
}
return header.bytes[start..i];
}
};
pub fn pipeToFileSystem(dir: std.fs.Dir, reader: anytype, options: Options) !void {
var file_name_buffer: [255]u8 = undefined;
var buffer: [512 * 8]u8 = undefined;
var start: usize = 0;
var end: usize = 0;
header: while (true) {
if (buffer.len - start < 1024) {
std.mem.copy(u8, &buffer, buffer[start..end]);
end -= start;
start = 0;
}
const ask_header = @min(buffer.len - end, 1024 -| (end - start));
end += try reader.readAtLeast(buffer[end..], ask_header);
switch (end - start) {
0 => return,
1...511 => return error.UnexpectedEndOfStream,
else => {},
}
const header: Header = .{ .bytes = buffer[start..][0..512] };
start += 512;
const file_size = try header.fileSize();
const rounded_file_size = std.mem.alignForwardGeneric(u64, file_size, 512);
const pad_len = @intCast(usize, rounded_file_size - file_size);
const unstripped_file_name = try header.fullFileName(&file_name_buffer);
switch (header.fileType()) {
.directory => {
const file_name = try stripComponents(unstripped_file_name, options.strip_components);
if (file_name.len != 0) {
try dir.makeDir(file_name);
}
},
.normal => {
if (file_size == 0 and unstripped_file_name.len == 0) return;
const file_name = try stripComponents(unstripped_file_name, options.strip_components);
var file = try dir.createFile(file_name, .{});
defer file.close();
var file_off: usize = 0;
while (true) {
if (buffer.len - start < 1024) {
std.mem.copy(u8, &buffer, buffer[start..end]);
end -= start;
start = 0;
}
// Ask for the rounded up file size + 512 for the next header.
// TODO: https://github.com/ziglang/zig/issues/14039
const ask = @intCast(usize, @min(
buffer.len - end,
rounded_file_size + 512 - file_off -| (end - start),
));
end += try reader.readAtLeast(buffer[end..], ask);
if (end - start < ask) return error.UnexpectedEndOfStream;
// TODO: https://github.com/ziglang/zig/issues/14039
const slice = buffer[start..@intCast(usize, @min(file_size - file_off + start, end))];
try file.writeAll(slice);
file_off += slice.len;
start += slice.len;
if (file_off >= file_size) {
start += pad_len;
// Guaranteed since we use a buffer divisible by 512.
assert(start <= end);
continue :header;
}
}
},
.global_extended_header, .extended_header => {
if (start + rounded_file_size > end) return error.TarHeadersTooBig;
start = @intCast(usize, start + rounded_file_size);
},
.hard_link => return error.TarUnsupportedFileType,
.symbolic_link => return error.TarUnsupportedFileType,
else => return error.TarUnsupportedFileType,
}
}
}
fn stripComponents(path: []const u8, count: u32) ![]const u8 {
var i: usize = 0;
var c = count;
while (c > 0) : (c -= 1) {
if (std.mem.indexOfScalarPos(u8, path, i, '/')) |pos| {
i = pos + 1;
} else {
return error.TarComponentsOutsideStrippedPrefix;
}
}
return path[i..];
}
test stripComponents {
const expectEqualStrings = std.testing.expectEqualStrings;
try expectEqualStrings("a/b/c", try stripComponents("a/b/c", 0));
try expectEqualStrings("b/c", try stripComponents("a/b/c", 1));
try expectEqualStrings("c", try stripComponents("a/b/c", 2));
}
const std = @import("std.zig");
const assert = std.debug.assert;

View File

@ -5,9 +5,15 @@ const fs = std.fs;
const mem = std.mem;
const Allocator = mem.Allocator;
const assert = std.debug.assert;
const Hash = std.crypto.hash.sha2.Sha256;
const log = std.log.scoped(.package);
const Compilation = @import("Compilation.zig");
const Module = @import("Module.zig");
const ThreadPool = @import("ThreadPool.zig");
const WaitGroup = @import("WaitGroup.zig");
const Cache = @import("Cache.zig");
const build_options = @import("build_options");
pub const Table = std.StringHashMapUnmanaged(*Package);
@ -124,3 +130,454 @@ pub fn addAndAdopt(parent: *Package, gpa: Allocator, name: []const u8, child: *P
child.parent = parent;
return parent.add(gpa, name, child);
}
pub const build_zig_basename = "build.zig";
pub const ini_basename = build_zig_basename ++ ".ini";
pub fn fetchAndAddDependencies(
pkg: *Package,
thread_pool: *ThreadPool,
http_client: *std.http.Client,
directory: Compilation.Directory,
global_cache_directory: Compilation.Directory,
local_cache_directory: Compilation.Directory,
dependencies_source: *std.ArrayList(u8),
build_roots_source: *std.ArrayList(u8),
name_prefix: []const u8,
) !void {
const max_bytes = 10 * 1024 * 1024;
const gpa = thread_pool.allocator;
const build_zig_ini = directory.handle.readFileAlloc(gpa, ini_basename, max_bytes) catch |err| switch (err) {
error.FileNotFound => {
// Handle the same as no dependencies.
return;
},
else => |e| return e,
};
defer gpa.free(build_zig_ini);
const ini: std.Ini = .{ .bytes = build_zig_ini };
var any_error = false;
var it = ini.iterateSection("\n[dependency]\n");
while (it.next()) |dep| {
var line_it = mem.split(u8, dep, "\n");
var opt_name: ?[]const u8 = null;
var opt_url: ?[]const u8 = null;
var expected_hash: ?[]const u8 = null;
while (line_it.next()) |kv| {
const eq_pos = mem.indexOfScalar(u8, kv, '=') orelse continue;
const key = kv[0..eq_pos];
const value = kv[eq_pos + 1 ..];
if (mem.eql(u8, key, "name")) {
opt_name = value;
} else if (mem.eql(u8, key, "url")) {
opt_url = value;
} else if (mem.eql(u8, key, "hash")) {
expected_hash = value;
} else {
const loc = std.zig.findLineColumn(ini.bytes, @ptrToInt(key.ptr) - @ptrToInt(ini.bytes.ptr));
std.log.warn("{s}/{s}:{d}:{d} unrecognized key: '{s}'", .{
directory.path orelse ".",
"build.zig.ini",
loc.line,
loc.column,
key,
});
}
}
const name = opt_name orelse {
const loc = std.zig.findLineColumn(ini.bytes, @ptrToInt(dep.ptr) - @ptrToInt(ini.bytes.ptr));
std.log.err("{s}/{s}:{d}:{d} missing key: 'name'", .{
directory.path orelse ".",
"build.zig.ini",
loc.line,
loc.column,
});
any_error = true;
continue;
};
const url = opt_url orelse {
const loc = std.zig.findLineColumn(ini.bytes, @ptrToInt(dep.ptr) - @ptrToInt(ini.bytes.ptr));
std.log.err("{s}/{s}:{d}:{d} missing key: 'name'", .{
directory.path orelse ".",
"build.zig.ini",
loc.line,
loc.column,
});
any_error = true;
continue;
};
const sub_prefix = try std.fmt.allocPrint(gpa, "{s}{s}.", .{ name_prefix, name });
defer gpa.free(sub_prefix);
const fqn = sub_prefix[0 .. sub_prefix.len - 1];
const sub_pkg = try fetchAndUnpack(
thread_pool,
http_client,
global_cache_directory,
url,
expected_hash,
ini,
directory,
build_roots_source,
fqn,
);
try pkg.fetchAndAddDependencies(
thread_pool,
http_client,
sub_pkg.root_src_directory,
global_cache_directory,
local_cache_directory,
dependencies_source,
build_roots_source,
sub_prefix,
);
try addAndAdopt(pkg, gpa, fqn, sub_pkg);
try dependencies_source.writer().print(" pub const {s} = @import(\"{}\");\n", .{
std.zig.fmtId(fqn), std.zig.fmtEscapes(fqn),
});
}
if (any_error) return error.InvalidBuildZigIniFile;
}
pub fn createFilePkg(
gpa: Allocator,
cache_directory: Compilation.Directory,
basename: []const u8,
contents: []const u8,
) !*Package {
const rand_int = std.crypto.random.int(u64);
const tmp_dir_sub_path = "tmp" ++ fs.path.sep_str ++ hex64(rand_int);
{
var tmp_dir = try cache_directory.handle.makeOpenPath(tmp_dir_sub_path, .{});
defer tmp_dir.close();
try tmp_dir.writeFile(basename, contents);
}
var hh: Cache.HashHelper = .{};
hh.addBytes(build_options.version);
hh.addBytes(contents);
const hex_digest = hh.final();
const o_dir_sub_path = "o" ++ fs.path.sep_str ++ hex_digest;
try renameTmpIntoCache(cache_directory.handle, tmp_dir_sub_path, o_dir_sub_path);
return createWithDir(gpa, cache_directory, o_dir_sub_path, basename);
}
fn fetchAndUnpack(
thread_pool: *ThreadPool,
http_client: *std.http.Client,
global_cache_directory: Compilation.Directory,
url: []const u8,
expected_hash: ?[]const u8,
ini: std.Ini,
comp_directory: Compilation.Directory,
build_roots_source: *std.ArrayList(u8),
fqn: []const u8,
) !*Package {
const gpa = http_client.allocator;
const s = fs.path.sep_str;
// Check if the expected_hash is already present in the global package
// cache, and thereby avoid both fetching and unpacking.
if (expected_hash) |h| cached: {
if (h.len != 2 * Hash.digest_length) {
return reportError(
ini,
comp_directory,
h.ptr,
"wrong hash size. expected: {d}, found: {d}",
.{ Hash.digest_length, h.len },
);
}
const hex_digest = h[0 .. 2 * Hash.digest_length];
const pkg_dir_sub_path = "p" ++ s ++ hex_digest;
var pkg_dir = global_cache_directory.handle.openDir(pkg_dir_sub_path, .{}) catch |err| switch (err) {
error.FileNotFound => break :cached,
else => |e| return e,
};
errdefer pkg_dir.close();
const ptr = try gpa.create(Package);
errdefer gpa.destroy(ptr);
const owned_src_path = try gpa.dupe(u8, build_zig_basename);
errdefer gpa.free(owned_src_path);
const build_root = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path});
errdefer gpa.free(build_root);
try build_roots_source.writer().print(" pub const {s} = \"{}\";\n", .{
std.zig.fmtId(fqn), std.zig.fmtEscapes(build_root),
});
ptr.* = .{
.root_src_directory = .{
.path = build_root,
.handle = pkg_dir,
},
.root_src_directory_owned = true,
.root_src_path = owned_src_path,
};
return ptr;
}
const uri = try std.Uri.parse(url);
const rand_int = std.crypto.random.int(u64);
const tmp_dir_sub_path = "tmp" ++ s ++ hex64(rand_int);
const actual_hash = a: {
var tmp_directory: Compilation.Directory = d: {
const path = try global_cache_directory.join(gpa, &.{tmp_dir_sub_path});
errdefer gpa.free(path);
const iterable_dir = try global_cache_directory.handle.makeOpenPathIterable(tmp_dir_sub_path, .{});
errdefer iterable_dir.close();
break :d .{
.path = path,
.handle = iterable_dir.dir,
};
};
defer tmp_directory.closeAndFree(gpa);
var req = try http_client.request(uri, .{}, .{});
defer req.deinit();
if (mem.endsWith(u8, uri.path, ".tar.gz")) {
// I observed the gzip stream to read 1 byte at a time, so I am using a
// buffered reader on the front of it.
var br = std.io.bufferedReaderSize(std.crypto.tls.max_ciphertext_record_len, req.reader());
var gzip_stream = try std.compress.gzip.gzipStream(gpa, br.reader());
defer gzip_stream.deinit();
try std.tar.pipeToFileSystem(tmp_directory.handle, gzip_stream.reader(), .{
.strip_components = 1,
});
} else {
return reportError(
ini,
comp_directory,
uri.path.ptr,
"unknown file extension for path '{s}'",
.{uri.path},
);
}
// TODO: delete files not included in the package prior to computing the package hash.
// for example, if the ini file has directives to include/not include certain files,
// apply those rules directly to the filesystem right here. This ensures that files
// not protected by the hash are not present on the file system.
break :a try computePackageHash(thread_pool, .{ .dir = tmp_directory.handle });
};
const pkg_dir_sub_path = "p" ++ s ++ hexDigest(actual_hash);
try renameTmpIntoCache(global_cache_directory.handle, tmp_dir_sub_path, pkg_dir_sub_path);
if (expected_hash) |h| {
const actual_hex = hexDigest(actual_hash);
if (!mem.eql(u8, h, &actual_hex)) {
return reportError(
ini,
comp_directory,
h.ptr,
"hash mismatch: expected: {s}, found: {s}",
.{ h, actual_hex },
);
}
} else {
return reportError(
ini,
comp_directory,
url.ptr,
"url field is missing corresponding hash field: hash={s}",
.{std.fmt.fmtSliceHexLower(&actual_hash)},
);
}
const build_root = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path});
defer gpa.free(build_root);
try build_roots_source.writer().print(" pub const {s} = \"{}\";\n", .{
std.zig.fmtId(fqn), std.zig.fmtEscapes(build_root),
});
return createWithDir(gpa, global_cache_directory, pkg_dir_sub_path, build_zig_basename);
}
fn reportError(
ini: std.Ini,
comp_directory: Compilation.Directory,
src_ptr: [*]const u8,
comptime fmt_string: []const u8,
fmt_args: anytype,
) error{PackageFetchFailed} {
const loc = std.zig.findLineColumn(ini.bytes, @ptrToInt(src_ptr) - @ptrToInt(ini.bytes.ptr));
if (comp_directory.path) |p| {
std.debug.print("{s}{c}{s}:{d}:{d}: error: " ++ fmt_string ++ "\n", .{
p, fs.path.sep, ini_basename, loc.line + 1, loc.column + 1,
} ++ fmt_args);
} else {
std.debug.print("{s}:{d}:{d}: error: " ++ fmt_string ++ "\n", .{
ini_basename, loc.line + 1, loc.column + 1,
} ++ fmt_args);
}
return error.PackageFetchFailed;
}
const HashedFile = struct {
path: []const u8,
hash: [Hash.digest_length]u8,
failure: Error!void,
const Error = fs.File.OpenError || fs.File.ReadError;
fn lessThan(context: void, lhs: *const HashedFile, rhs: *const HashedFile) bool {
_ = context;
return mem.lessThan(u8, lhs.path, rhs.path);
}
};
fn computePackageHash(
thread_pool: *ThreadPool,
pkg_dir: fs.IterableDir,
) ![Hash.digest_length]u8 {
const gpa = thread_pool.allocator;
// We'll use an arena allocator for the path name strings since they all
// need to be in memory for sorting.
var arena_instance = std.heap.ArenaAllocator.init(gpa);
defer arena_instance.deinit();
const arena = arena_instance.allocator();
// Collect all files, recursively, then sort.
var all_files = std.ArrayList(*HashedFile).init(gpa);
defer all_files.deinit();
var walker = try pkg_dir.walk(gpa);
defer walker.deinit();
{
// The final hash will be a hash of each file hashed independently. This
// allows hashing in parallel.
var wait_group: WaitGroup = .{};
defer wait_group.wait();
while (try walker.next()) |entry| {
switch (entry.kind) {
.Directory => continue,
.File => {},
else => return error.IllegalFileTypeInPackage,
}
const hashed_file = try arena.create(HashedFile);
hashed_file.* = .{
.path = try arena.dupe(u8, entry.path),
.hash = undefined, // to be populated by the worker
.failure = undefined, // to be populated by the worker
};
wait_group.start();
try thread_pool.spawn(workerHashFile, .{ pkg_dir.dir, hashed_file, &wait_group });
try all_files.append(hashed_file);
}
}
std.sort.sort(*HashedFile, all_files.items, {}, HashedFile.lessThan);
var hasher = Hash.init(.{});
var any_failures = false;
for (all_files.items) |hashed_file| {
hashed_file.failure catch |err| {
any_failures = true;
std.log.err("unable to hash '{s}': {s}", .{ hashed_file.path, @errorName(err) });
};
hasher.update(&hashed_file.hash);
}
if (any_failures) return error.PackageHashUnavailable;
return hasher.finalResult();
}
fn workerHashFile(dir: fs.Dir, hashed_file: *HashedFile, wg: *WaitGroup) void {
defer wg.finish();
hashed_file.failure = hashFileFallible(dir, hashed_file);
}
fn hashFileFallible(dir: fs.Dir, hashed_file: *HashedFile) HashedFile.Error!void {
var buf: [8000]u8 = undefined;
var file = try dir.openFile(hashed_file.path, .{});
var hasher = Hash.init(.{});
while (true) {
const bytes_read = try file.read(&buf);
if (bytes_read == 0) break;
hasher.update(buf[0..bytes_read]);
}
hasher.final(&hashed_file.hash);
}
const hex_charset = "0123456789abcdef";
fn hex64(x: u64) [16]u8 {
var result: [16]u8 = undefined;
var i: usize = 0;
while (i < 8) : (i += 1) {
const byte = @truncate(u8, x >> @intCast(u6, 8 * i));
result[i * 2 + 0] = hex_charset[byte >> 4];
result[i * 2 + 1] = hex_charset[byte & 15];
}
return result;
}
test hex64 {
const s = "[" ++ hex64(0x12345678_abcdef00) ++ "]";
try std.testing.expectEqualStrings("[00efcdab78563412]", s);
}
fn hexDigest(digest: [Hash.digest_length]u8) [Hash.digest_length * 2]u8 {
var result: [Hash.digest_length * 2]u8 = undefined;
for (digest) |byte, i| {
result[i * 2 + 0] = hex_charset[byte >> 4];
result[i * 2 + 1] = hex_charset[byte & 15];
}
return result;
}
fn renameTmpIntoCache(
cache_dir: fs.Dir,
tmp_dir_sub_path: []const u8,
dest_dir_sub_path: []const u8,
) !void {
assert(dest_dir_sub_path[1] == fs.path.sep);
var handled_missing_dir = false;
while (true) {
cache_dir.rename(tmp_dir_sub_path, dest_dir_sub_path) catch |err| switch (err) {
error.FileNotFound => {
if (handled_missing_dir) return err;
cache_dir.makeDir(dest_dir_sub_path[0..1]) catch |mkd_err| switch (mkd_err) {
error.PathAlreadyExists => handled_missing_dir = true,
else => |e| return e,
};
continue;
},
error.PathAlreadyExists, error.AccessDenied => {
// Package has been already downloaded and may already be in use on the system.
cache_dir.deleteTree(tmp_dir_sub_path) catch |del_err| {
std.log.warn("unable to delete temp directory: {s}", .{@errorName(del_err)});
};
},
else => |e| return e,
};
break;
}
}

View File

@ -3983,11 +3983,6 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
};
defer zig_lib_directory.handle.close();
var main_pkg: Package = .{
.root_src_directory = zig_lib_directory,
.root_src_path = "build_runner.zig",
};
var cleanup_build_dir: ?fs.Dir = null;
defer if (cleanup_build_dir) |*dir| dir.close();
@ -4031,12 +4026,6 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
};
child_argv.items[argv_index_build_file] = build_directory.path orelse cwd_path;
var build_pkg: Package = .{
.root_src_directory = build_directory,
.root_src_path = build_zig_basename,
};
try main_pkg.addAndAdopt(arena, "@build", &build_pkg);
var global_cache_directory: Compilation.Directory = l: {
const p = override_global_cache_dir orelse try introspect.resolveGlobalCacheDir(arena);
break :l .{
@ -4082,6 +4071,66 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
var thread_pool: ThreadPool = undefined;
try thread_pool.init(gpa);
defer thread_pool.deinit();
var main_pkg: Package = .{
.root_src_directory = zig_lib_directory,
.root_src_path = "build_runner.zig",
};
if (!build_options.omit_pkg_fetching_code) {
var http_client: std.http.Client = .{ .allocator = gpa };
defer http_client.deinit();
try http_client.rescanRootCertificates();
// Here we provide an import to the build runner that allows using reflection to find
// all of the dependencies. Without this, there would be no way to use `@import` to
// access dependencies by name, since `@import` requires string literals.
var dependencies_source = std.ArrayList(u8).init(gpa);
defer dependencies_source.deinit();
try dependencies_source.appendSlice("pub const imports = struct {\n");
// This will go into the same package. It contains the file system paths
// to all the build.zig files.
var build_roots_source = std.ArrayList(u8).init(gpa);
defer build_roots_source.deinit();
// Here we borrow main package's table and will replace it with a fresh
// one after this process completes.
main_pkg.fetchAndAddDependencies(
&thread_pool,
&http_client,
build_directory,
global_cache_directory,
local_cache_directory,
&dependencies_source,
&build_roots_source,
"",
) catch |err| switch (err) {
error.PackageFetchFailed => process.exit(1),
else => |e| return e,
};
try dependencies_source.appendSlice("};\npub const build_root = struct {\n");
try dependencies_source.appendSlice(build_roots_source.items);
try dependencies_source.appendSlice("};\n");
const deps_pkg = try Package.createFilePkg(
gpa,
local_cache_directory,
"dependencies.zig",
dependencies_source.items,
);
mem.swap(Package.Table, &main_pkg.table, &deps_pkg.table);
try main_pkg.addAndAdopt(gpa, "@dependencies", deps_pkg);
}
var build_pkg: Package = .{
.root_src_directory = build_directory,
.root_src_path = build_zig_basename,
};
try main_pkg.addAndAdopt(gpa, "@build", &build_pkg);
const comp = Compilation.create(gpa, .{
.zig_lib_directory = zig_lib_directory,
.local_cache_directory = local_cache_directory,

View File

@ -12,3 +12,4 @@ pub const have_stage1 = false;
pub const skip_non_native = false;
pub const only_c = false;
pub const force_gpa = false;
pub const omit_pkg_fetching_code = true;