mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 06:13:07 +00:00
update depreciated code (#7502)
* `zig env`: * fix depreciated interface, update outStream -> writer * make code more readable by updating `anytype` -> `std.fs.File.Writer`
This commit is contained in:
parent
4128eea00c
commit
51a904677c
@ -42,7 +42,7 @@ pub fn main() !void {
|
|||||||
|
|
||||||
const input_file_bytes = try in_file.inStream().readAllAlloc(allocator, max_doc_file_size);
|
const input_file_bytes = try in_file.inStream().readAllAlloc(allocator, max_doc_file_size);
|
||||||
|
|
||||||
var buffered_out_stream = io.bufferedOutStream(out_file.outStream());
|
var buffered_out_stream = io.bufferedOutStream(out_file.writer());
|
||||||
|
|
||||||
var tokenizer = Tokenizer.init(in_file_name, input_file_bytes);
|
var tokenizer = Tokenizer.init(in_file_name, input_file_bytes);
|
||||||
var toc = try genToc(allocator, &tokenizer);
|
var toc = try genToc(allocator, &tokenizer);
|
||||||
@ -50,7 +50,7 @@ pub fn main() !void {
|
|||||||
try fs.cwd().makePath(tmp_dir_name);
|
try fs.cwd().makePath(tmp_dir_name);
|
||||||
defer fs.cwd().deleteTree(tmp_dir_name) catch {};
|
defer fs.cwd().deleteTree(tmp_dir_name) catch {};
|
||||||
|
|
||||||
try genHtml(allocator, &tokenizer, &toc, buffered_out_stream.outStream(), zig_exe);
|
try genHtml(allocator, &tokenizer, &toc, buffered_out_stream.writer(), zig_exe);
|
||||||
try buffered_out_stream.flush();
|
try buffered_out_stream.flush();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -325,7 +325,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
|
|||||||
var toc_buf = std.ArrayList(u8).init(allocator);
|
var toc_buf = std.ArrayList(u8).init(allocator);
|
||||||
defer toc_buf.deinit();
|
defer toc_buf.deinit();
|
||||||
|
|
||||||
var toc = toc_buf.outStream();
|
var toc = toc_buf.writer();
|
||||||
|
|
||||||
var nodes = std.ArrayList(Node).init(allocator);
|
var nodes = std.ArrayList(Node).init(allocator);
|
||||||
defer nodes.deinit();
|
defer nodes.deinit();
|
||||||
@ -615,7 +615,7 @@ fn urlize(allocator: *mem.Allocator, input: []const u8) ![]u8 {
|
|||||||
var buf = std.ArrayList(u8).init(allocator);
|
var buf = std.ArrayList(u8).init(allocator);
|
||||||
defer buf.deinit();
|
defer buf.deinit();
|
||||||
|
|
||||||
const out = buf.outStream();
|
const out = buf.writer();
|
||||||
for (input) |c| {
|
for (input) |c| {
|
||||||
switch (c) {
|
switch (c) {
|
||||||
'a'...'z', 'A'...'Z', '_', '-', '0'...'9' => {
|
'a'...'z', 'A'...'Z', '_', '-', '0'...'9' => {
|
||||||
@ -634,7 +634,7 @@ fn escapeHtml(allocator: *mem.Allocator, input: []const u8) ![]u8 {
|
|||||||
var buf = std.ArrayList(u8).init(allocator);
|
var buf = std.ArrayList(u8).init(allocator);
|
||||||
defer buf.deinit();
|
defer buf.deinit();
|
||||||
|
|
||||||
const out = buf.outStream();
|
const out = buf.writer();
|
||||||
try writeEscaped(out, input);
|
try writeEscaped(out, input);
|
||||||
return buf.toOwnedSlice();
|
return buf.toOwnedSlice();
|
||||||
}
|
}
|
||||||
@ -680,7 +680,7 @@ fn termColor(allocator: *mem.Allocator, input: []const u8) ![]u8 {
|
|||||||
var buf = std.ArrayList(u8).init(allocator);
|
var buf = std.ArrayList(u8).init(allocator);
|
||||||
defer buf.deinit();
|
defer buf.deinit();
|
||||||
|
|
||||||
var out = buf.outStream();
|
var out = buf.writer();
|
||||||
var number_start_index: usize = undefined;
|
var number_start_index: usize = undefined;
|
||||||
var first_number: usize = undefined;
|
var first_number: usize = undefined;
|
||||||
var second_number: usize = undefined;
|
var second_number: usize = undefined;
|
||||||
|
|||||||
@ -200,7 +200,7 @@ pub fn mainArgs(gpa: *Allocator, arena: *Allocator, args: []const []const u8) !v
|
|||||||
} else if (mem.eql(u8, cmd, "version")) {
|
} else if (mem.eql(u8, cmd, "version")) {
|
||||||
try std.io.getStdOut().writeAll(build_options.version ++ "\n");
|
try std.io.getStdOut().writeAll(build_options.version ++ "\n");
|
||||||
} else if (mem.eql(u8, cmd, "env")) {
|
} else if (mem.eql(u8, cmd, "env")) {
|
||||||
try @import("print_env.zig").cmdEnv(arena, cmd_args, io.getStdOut().outStream());
|
try @import("print_env.zig").cmdEnv(arena, cmd_args, io.getStdOut().writer());
|
||||||
} else if (mem.eql(u8, cmd, "zen")) {
|
} else if (mem.eql(u8, cmd, "zen")) {
|
||||||
try io.getStdOut().writeAll(info_zen);
|
try io.getStdOut().writeAll(info_zen);
|
||||||
} else if (mem.eql(u8, cmd, "help") or mem.eql(u8, cmd, "-h") or mem.eql(u8, cmd, "--help")) {
|
} else if (mem.eql(u8, cmd, "help") or mem.eql(u8, cmd, "-h") or mem.eql(u8, cmd, "--help")) {
|
||||||
|
|||||||
@ -4,7 +4,7 @@ const introspect = @import("introspect.zig");
|
|||||||
const Allocator = std.mem.Allocator;
|
const Allocator = std.mem.Allocator;
|
||||||
const fatal = @import("main.zig").fatal;
|
const fatal = @import("main.zig").fatal;
|
||||||
|
|
||||||
pub fn cmdEnv(gpa: *Allocator, args: []const []const u8, stdout: anytype) !void {
|
pub fn cmdEnv(gpa: *Allocator, args: []const []const u8, stdout: std.fs.File.Writer) !void {
|
||||||
const self_exe_path = try std.fs.selfExePathAlloc(gpa);
|
const self_exe_path = try std.fs.selfExePathAlloc(gpa);
|
||||||
defer gpa.free(self_exe_path);
|
defer gpa.free(self_exe_path);
|
||||||
|
|
||||||
|
|||||||
@ -17,13 +17,13 @@ pub fn main() anyerror!void {
|
|||||||
var dump = Dump.init(allocator);
|
var dump = Dump.init(allocator);
|
||||||
for (args[1..]) |arg| {
|
for (args[1..]) |arg| {
|
||||||
parser = json.Parser.init(allocator, false);
|
parser = json.Parser.init(allocator, false);
|
||||||
const json_text = try std.io.readFileAlloc(allocator, arg);
|
const json_text = try std.fs.cwd().readFileAlloc(allocator, arg, std.math.maxInt(usize));
|
||||||
const tree = try parser.parse(json_text);
|
const tree = try parser.parse(json_text);
|
||||||
try dump.mergeJson(tree.root);
|
try dump.mergeJson(tree.root);
|
||||||
}
|
}
|
||||||
|
|
||||||
const stdout = try std.io.getStdOut();
|
const stdout = try std.io.getStdOut();
|
||||||
try dump.render(stdout.outStream());
|
try dump.render(stdout.writer());
|
||||||
}
|
}
|
||||||
|
|
||||||
/// AST source node
|
/// AST source node
|
||||||
@ -33,12 +33,12 @@ const Node = struct {
|
|||||||
col: usize,
|
col: usize,
|
||||||
fields: []usize,
|
fields: []usize,
|
||||||
|
|
||||||
fn hash(n: Node) u32 {
|
fn hash(n: Node) u64 {
|
||||||
var hasher = std.hash.Wyhash.init(0);
|
var hasher = std.hash.Wyhash.init(0);
|
||||||
std.hash.autoHash(&hasher, n.file);
|
std.hash.autoHash(&hasher, n.file);
|
||||||
std.hash.autoHash(&hasher, n.line);
|
std.hash.autoHash(&hasher, n.line);
|
||||||
std.hash.autoHash(&hasher, n.col);
|
std.hash.autoHash(&hasher, n.col);
|
||||||
return @truncate(u32, hasher.final());
|
return hasher.final();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eql(a: Node, b: Node) bool {
|
fn eql(a: Node, b: Node) bool {
|
||||||
@ -52,10 +52,10 @@ const Error = struct {
|
|||||||
src: usize,
|
src: usize,
|
||||||
name: []const u8,
|
name: []const u8,
|
||||||
|
|
||||||
fn hash(n: Error) u32 {
|
fn hash(n: Error) u64 {
|
||||||
var hasher = std.hash.Wyhash.init(0);
|
var hasher = std.hash.Wyhash.init(0);
|
||||||
std.hash.autoHash(&hasher, n.src);
|
std.hash.autoHash(&hasher, n.src);
|
||||||
return @truncate(u32, hasher.final());
|
return hasher.final();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eql(a: Error, b: Error) bool {
|
fn eql(a: Error, b: Error) bool {
|
||||||
@ -103,7 +103,6 @@ const Type = union(builtin.TypeId) {
|
|||||||
Union, // TODO
|
Union, // TODO
|
||||||
Fn, // TODO
|
Fn, // TODO
|
||||||
BoundFn, // TODO
|
BoundFn, // TODO
|
||||||
ArgTuple, // TODO
|
|
||||||
Opaque, // TODO
|
Opaque, // TODO
|
||||||
Frame, // TODO
|
Frame, // TODO
|
||||||
|
|
||||||
@ -127,10 +126,10 @@ const Type = union(builtin.TypeId) {
|
|||||||
len: usize,
|
len: usize,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn hash(t: Type) u32 {
|
fn hash(t: Type) u64 {
|
||||||
var hasher = std.hash.Wyhash.init(0);
|
var hasher = std.hash.Wyhash.init(0);
|
||||||
std.hash.autoHash(&hasher, builtin.TypeId(t));
|
std.hash.autoHash(&hasher, t);
|
||||||
return @truncate(u32, hasher.final());
|
return hasher.final();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eql(a: Type, b: Type) bool {
|
fn eql(a: Type, b: Type) bool {
|
||||||
@ -144,22 +143,23 @@ const Dump = struct {
|
|||||||
root_name: ?[]const u8 = null,
|
root_name: ?[]const u8 = null,
|
||||||
targets: std.ArrayList([]const u8),
|
targets: std.ArrayList([]const u8),
|
||||||
|
|
||||||
const FileMap = std.StringHashMap(usize);
|
|
||||||
file_list: std.ArrayList([]const u8),
|
file_list: std.ArrayList([]const u8),
|
||||||
file_map: FileMap,
|
file_map: FileMap,
|
||||||
|
|
||||||
const NodeMap = std.HashMap(Node, usize, Node.hash, Node.eql);
|
|
||||||
node_list: std.ArrayList(Node),
|
node_list: std.ArrayList(Node),
|
||||||
node_map: NodeMap,
|
node_map: NodeMap,
|
||||||
|
|
||||||
const ErrorMap = std.HashMap(Error, usize, Error.hash, Error.eql);
|
|
||||||
error_list: std.ArrayList(Error),
|
error_list: std.ArrayList(Error),
|
||||||
error_map: ErrorMap,
|
error_map: ErrorMap,
|
||||||
|
|
||||||
const TypeMap = std.HashMap(Type, usize, Type.hash, Type.eql);
|
|
||||||
type_list: std.ArrayList(Type),
|
type_list: std.ArrayList(Type),
|
||||||
type_map: TypeMap,
|
type_map: TypeMap,
|
||||||
|
|
||||||
|
const FileMap = std.StringHashMap(usize);
|
||||||
|
const NodeMap = std.HashMap(Node, usize, Node.hash, Node.eql, 80);
|
||||||
|
const ErrorMap = std.HashMap(Error, usize, Error.hash, Error.eql, 80);
|
||||||
|
const TypeMap = std.HashMap(Type, usize, Type.hash, Type.eql, 80);
|
||||||
|
|
||||||
fn init(allocator: *mem.Allocator) Dump {
|
fn init(allocator: *mem.Allocator) Dump {
|
||||||
return Dump{
|
return Dump{
|
||||||
.targets = std.ArrayList([]const u8).init(allocator),
|
.targets = std.ArrayList([]const u8).init(allocator),
|
||||||
@ -310,7 +310,7 @@ const Dump = struct {
|
|||||||
try other_types_to_mine.putNoClobber(other_type_index, gop.kv.value);
|
try other_types_to_mine.putNoClobber(other_type_index, gop.kv.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render(self: *Dump, stream: var) !void {
|
fn render(self: *Dump, stream: anytype) !void {
|
||||||
var jw = json.WriteStream(@TypeOf(stream).Child, 10).init(stream);
|
var jw = json.WriteStream(@TypeOf(stream).Child, 10).init(stream);
|
||||||
try jw.beginObject();
|
try jw.beginObject();
|
||||||
|
|
||||||
|
|||||||
@ -239,8 +239,8 @@ pub fn main() !void {
|
|||||||
const vers_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "vers.txt" });
|
const vers_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "vers.txt" });
|
||||||
const vers_txt_file = try fs.cwd().createFile(vers_txt_path, .{});
|
const vers_txt_file = try fs.cwd().createFile(vers_txt_path, .{});
|
||||||
defer vers_txt_file.close();
|
defer vers_txt_file.close();
|
||||||
var buffered = std.io.bufferedOutStream(vers_txt_file.outStream());
|
var buffered = std.io.bufferedOutStream(vers_txt_file.writer());
|
||||||
const vers_txt = buffered.outStream();
|
const vers_txt = buffered.writer();
|
||||||
for (global_ver_list) |name, i| {
|
for (global_ver_list) |name, i| {
|
||||||
_ = global_ver_set.put(name, i) catch unreachable;
|
_ = global_ver_set.put(name, i) catch unreachable;
|
||||||
try vers_txt.print("{}\n", .{name});
|
try vers_txt.print("{}\n", .{name});
|
||||||
@ -251,8 +251,8 @@ pub fn main() !void {
|
|||||||
const fns_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "fns.txt" });
|
const fns_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "fns.txt" });
|
||||||
const fns_txt_file = try fs.cwd().createFile(fns_txt_path, .{});
|
const fns_txt_file = try fs.cwd().createFile(fns_txt_path, .{});
|
||||||
defer fns_txt_file.close();
|
defer fns_txt_file.close();
|
||||||
var buffered = std.io.bufferedOutStream(fns_txt_file.outStream());
|
var buffered = std.io.bufferedOutStream(fns_txt_file.writer());
|
||||||
const fns_txt = buffered.outStream();
|
const fns_txt = buffered.writer();
|
||||||
for (global_fn_list) |name, i| {
|
for (global_fn_list) |name, i| {
|
||||||
const entry = global_fn_set.getEntry(name).?;
|
const entry = global_fn_set.getEntry(name).?;
|
||||||
entry.value.index = i;
|
entry.value.index = i;
|
||||||
@ -282,8 +282,8 @@ pub fn main() !void {
|
|||||||
const abilist_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "abi.txt" });
|
const abilist_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "abi.txt" });
|
||||||
const abilist_txt_file = try fs.cwd().createFile(abilist_txt_path, .{});
|
const abilist_txt_file = try fs.cwd().createFile(abilist_txt_path, .{});
|
||||||
defer abilist_txt_file.close();
|
defer abilist_txt_file.close();
|
||||||
var buffered = std.io.bufferedOutStream(abilist_txt_file.outStream());
|
var buffered = std.io.bufferedOutStream(abilist_txt_file.writer());
|
||||||
const abilist_txt = buffered.outStream();
|
const abilist_txt = buffered.writer();
|
||||||
|
|
||||||
// first iterate over the abi lists
|
// first iterate over the abi lists
|
||||||
for (abi_lists) |*abi_list, abi_index| {
|
for (abi_lists) |*abi_list, abi_index| {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user