Update all std.mem.tokenize calls to their appropriate function

Everywhere that can now use `tokenizeScalar` should get a nice little performance boost.
This commit is contained in:
Ryan Liptak 2023-05-04 18:05:40 -07:00
parent ce9f3ec990
commit 815e53b147
24 changed files with 79 additions and 79 deletions

View File

@ -284,7 +284,7 @@ pub fn build(b: *std.Build) !void {
// That means we also have to rely on stage1 compiled c++ files. We parse config.h to find // That means we also have to rely on stage1 compiled c++ files. We parse config.h to find
// the information passed on to us from cmake. // the information passed on to us from cmake.
if (cfg.cmake_prefix_path.len > 0) { if (cfg.cmake_prefix_path.len > 0) {
var it = mem.tokenize(u8, cfg.cmake_prefix_path, ";"); var it = mem.tokenizeScalar(u8, cfg.cmake_prefix_path, ';');
while (it.next()) |path| { while (it.next()) |path| {
b.addSearchPrefix(path); b.addSearchPrefix(path);
} }
@ -687,7 +687,7 @@ fn addCxxKnownPath(
if (!std.process.can_spawn) if (!std.process.can_spawn)
return error.RequiredLibraryNotFound; return error.RequiredLibraryNotFound;
const path_padded = b.exec(&.{ ctx.cxx_compiler, b.fmt("-print-file-name={s}", .{objname}) }); const path_padded = b.exec(&.{ ctx.cxx_compiler, b.fmt("-print-file-name={s}", .{objname}) });
var tokenizer = mem.tokenize(u8, path_padded, "\r\n"); var tokenizer = mem.tokenizeAny(u8, path_padded, "\r\n");
const path_unpadded = tokenizer.next().?; const path_unpadded = tokenizer.next().?;
if (mem.eql(u8, path_unpadded, objname)) { if (mem.eql(u8, path_unpadded, objname)) {
if (errtxt) |msg| { if (errtxt) |msg| {
@ -710,7 +710,7 @@ fn addCxxKnownPath(
} }
fn addCMakeLibraryList(exe: *std.Build.Step.Compile, list: []const u8) void { fn addCMakeLibraryList(exe: *std.Build.Step.Compile, list: []const u8) void {
var it = mem.tokenize(u8, list, ";"); var it = mem.tokenizeScalar(u8, list, ';');
while (it.next()) |lib| { while (it.next()) |lib| {
if (mem.startsWith(u8, lib, "-l")) { if (mem.startsWith(u8, lib, "-l")) {
exe.linkSystemLibrary(lib["-l".len..]); exe.linkSystemLibrary(lib["-l".len..]);
@ -855,7 +855,7 @@ fn parseConfigH(b: *std.Build, config_h_text: []const u8) ?CMakeConfig {
// .prefix = ZIG_LLVM_LINK_MODE parsed manually below // .prefix = ZIG_LLVM_LINK_MODE parsed manually below
}; };
var lines_it = mem.tokenize(u8, config_h_text, "\r\n"); var lines_it = mem.tokenizeAny(u8, config_h_text, "\r\n");
while (lines_it.next()) |line| { while (lines_it.next()) |line| {
inline for (mappings) |mapping| { inline for (mappings) |mapping| {
if (mem.startsWith(u8, line, mapping.prefix)) { if (mem.startsWith(u8, line, mapping.prefix)) {

View File

@ -1358,7 +1358,7 @@ pub fn findProgram(self: *Build, names: []const []const u8, paths: []const []con
if (fs.path.isAbsolute(name)) { if (fs.path.isAbsolute(name)) {
return name; return name;
} }
var it = mem.tokenize(u8, PATH, &[_]u8{fs.path.delimiter}); var it = mem.tokenizeScalar(u8, PATH, fs.path.delimiter);
while (it.next()) |path| { while (it.next()) |path| {
const full_path = self.pathJoin(&.{ const full_path = self.pathJoin(&.{
path, path,

View File

@ -434,7 +434,7 @@ pub const Manifest = struct {
const input_file_count = self.files.items.len; const input_file_count = self.files.items.len;
var any_file_changed = false; var any_file_changed = false;
var line_iter = mem.tokenize(u8, file_contents, "\n"); var line_iter = mem.tokenizeScalar(u8, file_contents, '\n');
var idx: usize = 0; var idx: usize = 0;
if (if (line_iter.next()) |line| !std.mem.eql(u8, line, manifest_header) else true) { if (if (line_iter.next()) |line| !std.mem.eql(u8, line, manifest_header) else true) {
if (try self.upgradeToExclusiveLock()) continue; if (try self.upgradeToExclusiveLock()) continue;
@ -463,7 +463,7 @@ pub const Manifest = struct {
break :blk new; break :blk new;
}; };
var iter = mem.tokenize(u8, line, " "); var iter = mem.tokenizeScalar(u8, line, ' ');
const size = iter.next() orelse return error.InvalidFormat; const size = iter.next() orelse return error.InvalidFormat;
const inode = iter.next() orelse return error.InvalidFormat; const inode = iter.next() orelse return error.InvalidFormat;
const mtime_nsec_str = iter.next() orelse return error.InvalidFormat; const mtime_nsec_str = iter.next() orelse return error.InvalidFormat;

View File

@ -103,8 +103,8 @@ const Action = struct {
assert(act.tag == .match or act.tag == .not_present); assert(act.tag == .match or act.tag == .not_present);
const phrase = act.phrase.resolve(b, step); const phrase = act.phrase.resolve(b, step);
var candidate_var: ?struct { name: []const u8, value: u64 } = null; var candidate_var: ?struct { name: []const u8, value: u64 } = null;
var hay_it = mem.tokenize(u8, mem.trim(u8, haystack, " "), " "); var hay_it = mem.tokenizeScalar(u8, mem.trim(u8, haystack, " "), ' ');
var needle_it = mem.tokenize(u8, mem.trim(u8, phrase, " "), " "); var needle_it = mem.tokenizeScalar(u8, mem.trim(u8, phrase, " "), ' ');
while (needle_it.next()) |needle_tok| { while (needle_it.next()) |needle_tok| {
const hay_tok = hay_it.next() orelse return false; const hay_tok = hay_it.next() orelse return false;
@ -155,7 +155,7 @@ const Action = struct {
var op_stack = std.ArrayList(enum { add, sub, mod, mul }).init(gpa); var op_stack = std.ArrayList(enum { add, sub, mod, mul }).init(gpa);
var values = std.ArrayList(u64).init(gpa); var values = std.ArrayList(u64).init(gpa);
var it = mem.tokenize(u8, phrase, " "); var it = mem.tokenizeScalar(u8, phrase, ' ');
while (it.next()) |next| { while (it.next()) |next| {
if (mem.eql(u8, next, "+")) { if (mem.eql(u8, next, "+")) {
try op_stack.append(.add); try op_stack.append(.add);
@ -365,7 +365,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
var vars = std.StringHashMap(u64).init(gpa); var vars = std.StringHashMap(u64).init(gpa);
for (self.checks.items) |chk| { for (self.checks.items) |chk| {
var it = mem.tokenize(u8, output, "\r\n"); var it = mem.tokenizeAny(u8, output, "\r\n");
for (chk.actions.items) |act| { for (chk.actions.items) |act| {
switch (act.tag) { switch (act.tag) {
.match => { .match => {

View File

@ -777,7 +777,7 @@ fn runPkgConfig(self: *Compile, lib_name: []const u8) ![]const []const u8 {
var zig_args = ArrayList([]const u8).init(b.allocator); var zig_args = ArrayList([]const u8).init(b.allocator);
defer zig_args.deinit(); defer zig_args.deinit();
var it = mem.tokenize(u8, stdout, " \r\n\t"); var it = mem.tokenizeAny(u8, stdout, " \r\n\t");
while (it.next()) |tok| { while (it.next()) |tok| {
if (mem.eql(u8, tok, "-I")) { if (mem.eql(u8, tok, "-I")) {
const dir = it.next() orelse return error.PkgConfigInvalidOutput; const dir = it.next() orelse return error.PkgConfigInvalidOutput;
@ -2017,10 +2017,10 @@ fn execPkgConfigList(self: *std.Build, out_code: *u8) (PkgConfigError || ExecErr
const stdout = try self.execAllowFail(&[_][]const u8{ "pkg-config", "--list-all" }, out_code, .Ignore); const stdout = try self.execAllowFail(&[_][]const u8{ "pkg-config", "--list-all" }, out_code, .Ignore);
var list = ArrayList(PkgConfigPkg).init(self.allocator); var list = ArrayList(PkgConfigPkg).init(self.allocator);
errdefer list.deinit(); errdefer list.deinit();
var line_it = mem.tokenize(u8, stdout, "\r\n"); var line_it = mem.tokenizeAny(u8, stdout, "\r\n");
while (line_it.next()) |line| { while (line_it.next()) |line| {
if (mem.trim(u8, line, " \t").len == 0) continue; if (mem.trim(u8, line, " \t").len == 0) continue;
var tok_it = mem.tokenize(u8, line, " \t"); var tok_it = mem.tokenizeAny(u8, line, " \t");
try list.append(PkgConfigPkg{ try list.append(PkgConfigPkg{
.name = tok_it.next() orelse return error.PkgConfigInvalidOutput, .name = tok_it.next() orelse return error.PkgConfigInvalidOutput,
.desc = tok_it.rest(), .desc = tok_it.rest(),

View File

@ -257,7 +257,7 @@ fn render_autoconf(
try output.appendSlice("\n"); try output.appendSlice("\n");
continue; continue;
} }
var it = std.mem.tokenize(u8, line[1..], " \t\r"); var it = std.mem.tokenizeAny(u8, line[1..], " \t\r");
const undef = it.next().?; const undef = it.next().?;
if (!std.mem.eql(u8, undef, "undef")) { if (!std.mem.eql(u8, undef, "undef")) {
try output.appendSlice(line); try output.appendSlice(line);
@ -304,7 +304,7 @@ fn render_cmake(
try output.appendSlice("\n"); try output.appendSlice("\n");
continue; continue;
} }
var it = std.mem.tokenize(u8, line[1..], " \t\r"); var it = std.mem.tokenizeAny(u8, line[1..], " \t\r");
const cmakedefine = it.next().?; const cmakedefine = it.next().?;
if (!std.mem.eql(u8, cmakedefine, "cmakedefine") and if (!std.mem.eql(u8, cmakedefine, "cmakedefine") and
!std.mem.eql(u8, cmakedefine, "cmakedefine01")) !std.mem.eql(u8, cmakedefine, "cmakedefine01"))

View File

@ -850,7 +850,7 @@ pub const ChildProcess = struct {
return original_err; return original_err;
} }
var it = mem.tokenize(u16, PATH, &[_]u16{';'}); var it = mem.tokenizeScalar(u16, PATH, ';');
while (it.next()) |search_path| { while (it.next()) |search_path| {
dir_buf.clearRetainingCapacity(); dir_buf.clearRetainingCapacity();
try dir_buf.appendSlice(self.allocator, search_path); try dir_buf.appendSlice(self.allocator, search_path);
@ -1067,7 +1067,7 @@ fn windowsCreateProcessPathExt(
// Now we know that at least *a* file matching the wildcard exists, we can loop // Now we know that at least *a* file matching the wildcard exists, we can loop
// through PATHEXT in order and exec any that exist // through PATHEXT in order and exec any that exist
var ext_it = mem.tokenize(u16, pathext, &[_]u16{';'}); var ext_it = mem.tokenizeScalar(u16, pathext, ';');
while (ext_it.next()) |ext| { while (ext_it.next()) |ext| {
if (!windowsCreateProcessSupportsExtension(ext)) continue; if (!windowsCreateProcessSupportsExtension(ext)) continue;

View File

@ -3021,7 +3021,7 @@ pub fn selfExePath(out_buffer: []u8) SelfExePathError![]u8 {
} else if (argv0.len != 0) { } else if (argv0.len != 0) {
// argv[0] is not empty (and not a path): search it inside PATH // argv[0] is not empty (and not a path): search it inside PATH
const PATH = std.os.getenvZ("PATH") orelse return error.FileNotFound; const PATH = std.os.getenvZ("PATH") orelse return error.FileNotFound;
var path_it = mem.tokenize(u8, PATH, &[_]u8{path.delimiter}); var path_it = mem.tokenizeScalar(u8, PATH, path.delimiter);
while (path_it.next()) |a_path| { while (path_it.next()) |a_path| {
var resolved_path_buf: [MAX_PATH_BYTES - 1:0]u8 = undefined; var resolved_path_buf: [MAX_PATH_BYTES - 1:0]u8 = undefined;
const resolved_path = std.fmt.bufPrintZ(&resolved_path_buf, "{s}/{s}", .{ const resolved_path = std.fmt.bufPrintZ(&resolved_path_buf, "{s}/{s}", .{

View File

@ -358,7 +358,7 @@ pub fn windowsParsePath(path: []const u8) WindowsPath {
return relative_path; return relative_path;
} }
var it = mem.tokenize(u8, path, &[_]u8{this_sep}); var it = mem.tokenizeScalar(u8, path, this_sep);
_ = (it.next() orelse return relative_path); _ = (it.next() orelse return relative_path);
_ = (it.next() orelse return relative_path); _ = (it.next() orelse return relative_path);
return WindowsPath{ return WindowsPath{
@ -420,8 +420,8 @@ fn networkShareServersEql(ns1: []const u8, ns2: []const u8) bool {
const sep1 = ns1[0]; const sep1 = ns1[0];
const sep2 = ns2[0]; const sep2 = ns2[0];
var it1 = mem.tokenize(u8, ns1, &[_]u8{sep1}); var it1 = mem.tokenizeScalar(u8, ns1, sep1);
var it2 = mem.tokenize(u8, ns2, &[_]u8{sep2}); var it2 = mem.tokenizeScalar(u8, ns2, sep2);
// TODO ASCII is wrong, we actually need full unicode support to compare paths. // TODO ASCII is wrong, we actually need full unicode support to compare paths.
return ascii.eqlIgnoreCase(it1.next().?, it2.next().?); return ascii.eqlIgnoreCase(it1.next().?, it2.next().?);
@ -441,8 +441,8 @@ fn compareDiskDesignators(kind: WindowsPath.Kind, p1: []const u8, p2: []const u8
const sep1 = p1[0]; const sep1 = p1[0];
const sep2 = p2[0]; const sep2 = p2[0];
var it1 = mem.tokenize(u8, p1, &[_]u8{sep1}); var it1 = mem.tokenizeScalar(u8, p1, sep1);
var it2 = mem.tokenize(u8, p2, &[_]u8{sep2}); var it2 = mem.tokenizeScalar(u8, p2, sep2);
// TODO ASCII is wrong, we actually need full unicode support to compare paths. // TODO ASCII is wrong, we actually need full unicode support to compare paths.
return ascii.eqlIgnoreCase(it1.next().?, it2.next().?) and ascii.eqlIgnoreCase(it1.next().?, it2.next().?); return ascii.eqlIgnoreCase(it1.next().?, it2.next().?) and ascii.eqlIgnoreCase(it1.next().?, it2.next().?);
@ -535,7 +535,7 @@ pub fn resolveWindows(allocator: Allocator, paths: []const []const u8) ![]u8 {
break :l disk_designator.len; break :l disk_designator.len;
}, },
.NetworkShare => { .NetworkShare => {
var it = mem.tokenize(u8, paths[first_index], "/\\"); var it = mem.tokenizeAny(u8, paths[first_index], "/\\");
const server_name = it.next().?; const server_name = it.next().?;
const other_name = it.next().?; const other_name = it.next().?;
@ -570,7 +570,7 @@ pub fn resolveWindows(allocator: Allocator, paths: []const []const u8) ![]u8 {
if (!correct_disk_designator) { if (!correct_disk_designator) {
continue; continue;
} }
var it = mem.tokenize(u8, p[parsed.disk_designator.len..], "/\\"); var it = mem.tokenizeAny(u8, p[parsed.disk_designator.len..], "/\\");
while (it.next()) |component| { while (it.next()) |component| {
if (mem.eql(u8, component, ".")) { if (mem.eql(u8, component, ".")) {
continue; continue;
@ -657,7 +657,7 @@ pub fn resolvePosix(allocator: Allocator, paths: []const []const u8) Allocator.E
negative_count = 0; negative_count = 0;
result.clearRetainingCapacity(); result.clearRetainingCapacity();
} }
var it = mem.tokenize(u8, p, "/"); var it = mem.tokenizeScalar(u8, p, '/');
while (it.next()) |component| { while (it.next()) |component| {
if (mem.eql(u8, component, ".")) { if (mem.eql(u8, component, ".")) {
continue; continue;
@ -1078,8 +1078,8 @@ pub fn relativeWindows(allocator: Allocator, from: []const u8, to: []const u8) !
return resolved_to; return resolved_to;
} }
var from_it = mem.tokenize(u8, resolved_from, "/\\"); var from_it = mem.tokenizeAny(u8, resolved_from, "/\\");
var to_it = mem.tokenize(u8, resolved_to, "/\\"); var to_it = mem.tokenizeAny(u8, resolved_to, "/\\");
while (true) { while (true) {
const from_component = from_it.next() orelse return allocator.dupe(u8, to_it.rest()); const from_component = from_it.next() orelse return allocator.dupe(u8, to_it.rest());
const to_rest = to_it.rest(); const to_rest = to_it.rest();
@ -1102,7 +1102,7 @@ pub fn relativeWindows(allocator: Allocator, from: []const u8, to: []const u8) !
result_index += 3; result_index += 3;
} }
var rest_it = mem.tokenize(u8, to_rest, "/\\"); var rest_it = mem.tokenizeAny(u8, to_rest, "/\\");
while (rest_it.next()) |to_component| { while (rest_it.next()) |to_component| {
result[result_index] = '\\'; result[result_index] = '\\';
result_index += 1; result_index += 1;
@ -1124,8 +1124,8 @@ pub fn relativePosix(allocator: Allocator, from: []const u8, to: []const u8) ![]
const resolved_to = try resolvePosix(allocator, &[_][]const u8{ cwd, to }); const resolved_to = try resolvePosix(allocator, &[_][]const u8{ cwd, to });
defer allocator.free(resolved_to); defer allocator.free(resolved_to);
var from_it = mem.tokenize(u8, resolved_from, "/"); var from_it = mem.tokenizeScalar(u8, resolved_from, '/');
var to_it = mem.tokenize(u8, resolved_to, "/"); var to_it = mem.tokenizeScalar(u8, resolved_to, '/');
while (true) { while (true) {
const from_component = from_it.next() orelse return allocator.dupe(u8, to_it.rest()); const from_component = from_it.next() orelse return allocator.dupe(u8, to_it.rest());
const to_rest = to_it.rest(); const to_rest = to_it.rest();

View File

@ -386,7 +386,7 @@ pub const Response = struct {
}; };
pub fn parse(res: *Response, bytes: []const u8, trailing: bool) ParseError!void { pub fn parse(res: *Response, bytes: []const u8, trailing: bool) ParseError!void {
var it = mem.tokenize(u8, bytes[0 .. bytes.len - 4], "\r\n"); var it = mem.tokenizeAny(u8, bytes[0 .. bytes.len - 4], "\r\n");
const first_line = it.next() orelse return error.HttpHeadersInvalid; const first_line = it.next() orelse return error.HttpHeadersInvalid;
if (first_line.len < 12) if (first_line.len < 12)
@ -412,7 +412,7 @@ pub const Response = struct {
else => {}, else => {},
} }
var line_it = mem.tokenize(u8, line, ": "); var line_it = mem.tokenizeAny(u8, line, ": ");
const header_name = line_it.next() orelse return error.HttpHeadersInvalid; const header_name = line_it.next() orelse return error.HttpHeadersInvalid;
const header_value = line_it.rest(); const header_value = line_it.rest();

View File

@ -231,7 +231,7 @@ pub const Request = struct {
}; };
pub fn parse(req: *Request, bytes: []const u8) ParseError!void { pub fn parse(req: *Request, bytes: []const u8) ParseError!void {
var it = mem.tokenize(u8, bytes[0 .. bytes.len - 4], "\r\n"); var it = mem.tokenizeAny(u8, bytes[0 .. bytes.len - 4], "\r\n");
const first_line = it.next() orelse return error.HttpHeadersInvalid; const first_line = it.next() orelse return error.HttpHeadersInvalid;
if (first_line.len < 10) if (first_line.len < 10)
@ -265,7 +265,7 @@ pub const Request = struct {
else => {}, else => {},
} }
var line_it = mem.tokenize(u8, line, ": "); var line_it = mem.tokenizeAny(u8, line, ": ");
const header_name = line_it.next() orelse return error.HttpHeadersInvalid; const header_name = line_it.next() orelse return error.HttpHeadersInvalid;
const header_value = line_it.rest(); const header_value = line_it.rest();

View File

@ -1266,7 +1266,7 @@ fn linuxLookupNameFromHosts(
var split_it = mem.split(u8, line, "#"); var split_it = mem.split(u8, line, "#");
const no_comment_line = split_it.first(); const no_comment_line = split_it.first();
var line_it = mem.tokenize(u8, no_comment_line, " \t"); var line_it = mem.tokenizeAny(u8, no_comment_line, " \t");
const ip_text = line_it.next() orelse continue; const ip_text = line_it.next() orelse continue;
var first_name_text: ?[]const u8 = null; var first_name_text: ?[]const u8 = null;
while (line_it.next()) |name_text| { while (line_it.next()) |name_text| {
@ -1346,7 +1346,7 @@ fn linuxLookupNameFromDnsSearch(
@memcpy(canon.items, canon_name); @memcpy(canon.items, canon_name);
try canon.append('.'); try canon.append('.');
var tok_it = mem.tokenize(u8, search, " \t"); var tok_it = mem.tokenizeAny(u8, search, " \t");
while (tok_it.next()) |tok| { while (tok_it.next()) |tok| {
canon.shrinkRetainingCapacity(canon_name.len + 1); canon.shrinkRetainingCapacity(canon_name.len + 1);
try canon.appendSlice(tok); try canon.appendSlice(tok);
@ -1468,7 +1468,7 @@ fn getResolvConf(allocator: mem.Allocator, rc: *ResolvConf) !void {
var split = mem.split(u8, line, "#"); var split = mem.split(u8, line, "#");
break :no_comment_line split.first(); break :no_comment_line split.first();
}; };
var line_it = mem.tokenize(u8, no_comment_line, " \t"); var line_it = mem.tokenizeAny(u8, no_comment_line, " \t");
const token = line_it.next() orelse continue; const token = line_it.next() orelse continue;
if (mem.eql(u8, token, "options")) { if (mem.eql(u8, token, "options")) {

View File

@ -1878,7 +1878,7 @@ pub fn execvpeZ_expandArg0(
// Use of MAX_PATH_BYTES here is valid as the path_buf will be passed // Use of MAX_PATH_BYTES here is valid as the path_buf will be passed
// directly to the operating system in execveZ. // directly to the operating system in execveZ.
var path_buf: [MAX_PATH_BYTES]u8 = undefined; var path_buf: [MAX_PATH_BYTES]u8 = undefined;
var it = mem.tokenize(u8, PATH, ":"); var it = mem.tokenizeScalar(u8, PATH, ':');
var seen_eacces = false; var seen_eacces = false;
var err: ExecveError = error.FileNotFound; var err: ExecveError = error.FileNotFound;

View File

@ -1200,7 +1200,7 @@ fn totalSystemMemoryLinux() !usize {
var buf: [50]u8 = undefined; var buf: [50]u8 = undefined;
const amt = try file.read(&buf); const amt = try file.read(&buf);
if (amt != 50) return error.Unexpected; if (amt != 50) return error.Unexpected;
var it = std.mem.tokenize(u8, buf[0..amt], " \n"); var it = std.mem.tokenizeAny(u8, buf[0..amt], " \n");
const label = it.next().?; const label = it.next().?;
if (!std.mem.eql(u8, label, "MemTotal:")) return error.Unexpected; if (!std.mem.eql(u8, label, "MemTotal:")) return error.Unexpected;
const int_text = it.next() orelse return error.Unexpected; const int_text = it.next() orelse return error.Unexpected;

View File

@ -31,7 +31,7 @@ pub fn detect(allocator: Allocator, native_info: NativeTargetInfo) !NativePaths
defer allocator.free(nix_cflags_compile); defer allocator.free(nix_cflags_compile);
is_nix = true; is_nix = true;
var it = mem.tokenize(u8, nix_cflags_compile, " "); var it = mem.tokenizeScalar(u8, nix_cflags_compile, ' ');
while (true) { while (true) {
const word = it.next() orelse break; const word = it.next() orelse break;
if (mem.eql(u8, word, "-isystem")) { if (mem.eql(u8, word, "-isystem")) {
@ -62,7 +62,7 @@ pub fn detect(allocator: Allocator, native_info: NativeTargetInfo) !NativePaths
defer allocator.free(nix_ldflags); defer allocator.free(nix_ldflags);
is_nix = true; is_nix = true;
var it = mem.tokenize(u8, nix_ldflags, " "); var it = mem.tokenizeScalar(u8, nix_ldflags, ' ');
while (true) { while (true) {
const word = it.next() orelse break; const word = it.next() orelse break;
if (mem.eql(u8, word, "-rpath")) { if (mem.eql(u8, word, "-rpath")) {
@ -147,21 +147,21 @@ pub fn detect(allocator: Allocator, native_info: NativeTargetInfo) !NativePaths
// We use os.getenv here since this part won't be executed on // We use os.getenv here since this part won't be executed on
// windows, to get rid of unnecessary error handling. // windows, to get rid of unnecessary error handling.
if (std.os.getenv("C_INCLUDE_PATH")) |c_include_path| { if (std.os.getenv("C_INCLUDE_PATH")) |c_include_path| {
var it = mem.tokenize(u8, c_include_path, ":"); var it = mem.tokenizeScalar(u8, c_include_path, ':');
while (it.next()) |dir| { while (it.next()) |dir| {
try self.addIncludeDir(dir); try self.addIncludeDir(dir);
} }
} }
if (std.os.getenv("CPLUS_INCLUDE_PATH")) |cplus_include_path| { if (std.os.getenv("CPLUS_INCLUDE_PATH")) |cplus_include_path| {
var it = mem.tokenize(u8, cplus_include_path, ":"); var it = mem.tokenizeScalar(u8, cplus_include_path, ':');
while (it.next()) |dir| { while (it.next()) |dir| {
try self.addIncludeDir(dir); try self.addIncludeDir(dir);
} }
} }
if (std.os.getenv("LIBRARY_PATH")) |library_path| { if (std.os.getenv("LIBRARY_PATH")) |library_path| {
var it = mem.tokenize(u8, library_path, ":"); var it = mem.tokenizeScalar(u8, library_path, ':');
while (it.next()) |dir| { while (it.next()) |dir| {
try self.addLibDir(dir); try self.addLibDir(dir);
} }

View File

@ -354,7 +354,7 @@ fn detectAbiAndDynamicLinker(
const newline = mem.indexOfScalar(u8, buffer[0..len], '\n') orelse break :blk file; const newline = mem.indexOfScalar(u8, buffer[0..len], '\n') orelse break :blk file;
const line = buffer[0..newline]; const line = buffer[0..newline];
if (!mem.startsWith(u8, line, "#!")) break :blk file; if (!mem.startsWith(u8, line, "#!")) break :blk file;
var it = mem.tokenize(u8, line[2..], " "); var it = mem.tokenizeScalar(u8, line[2..], ' ');
file_name = it.next() orelse return defaultAbiAndDynamicLinker(cpu, os, cross_target); file_name = it.next() orelse return defaultAbiAndDynamicLinker(cpu, os, cross_target);
file.close(); file.close();
} }
@ -811,7 +811,7 @@ pub fn abiAndDynamicLinkerFromFile(
const strtab = strtab_buf[0..strtab_read_len]; const strtab = strtab_buf[0..strtab_read_len];
const rpath_list = mem.sliceTo(strtab, 0); const rpath_list = mem.sliceTo(strtab, 0);
var it = mem.tokenize(u8, rpath_list, ":"); var it = mem.tokenizeScalar(u8, rpath_list, ':');
while (it.next()) |rpath| { while (it.next()) |rpath| {
if (glibcVerFromRPath(rpath)) |ver| { if (glibcVerFromRPath(rpath)) |ver| {
result.target.os.version_range.linux.glibc = ver; result.target.os.version_range.linux.glibc = ver;

View File

@ -8409,9 +8409,9 @@ fn airAsm(self: *Self, inst: Air.Inst.Index) !void {
} }
const asm_source = mem.sliceAsBytes(self.air.extra[extra_i..])[0..extra.data.source_len]; const asm_source = mem.sliceAsBytes(self.air.extra[extra_i..])[0..extra.data.source_len];
var line_it = mem.tokenize(u8, asm_source, "\n\r;"); var line_it = mem.tokenizeAny(u8, asm_source, "\n\r;");
while (line_it.next()) |line| { while (line_it.next()) |line| {
var mnem_it = mem.tokenize(u8, line, " \t"); var mnem_it = mem.tokenizeAny(u8, line, " \t");
const mnem_str = mnem_it.next() orelse continue; const mnem_str = mnem_it.next() orelse continue;
if (mem.startsWith(u8, mnem_str, "#")) continue; if (mem.startsWith(u8, mnem_str, "#")) continue;
@ -8435,7 +8435,7 @@ fn airAsm(self: *Self, inst: Air.Inst.Index) !void {
return self.fail("Invalid mnemonic: '{s}'", .{mnem_str}); return self.fail("Invalid mnemonic: '{s}'", .{mnem_str});
} }; } };
var op_it = mem.tokenize(u8, mnem_it.rest(), ","); var op_it = mem.tokenizeScalar(u8, mnem_it.rest(), ',');
var ops = [1]encoder.Instruction.Operand{.none} ** 4; var ops = [1]encoder.Instruction.Operand{.none} ** 4;
for (&ops) |*op| { for (&ops) |*op| {
const op_str = mem.trim(u8, op_it.next() orelse break, " \t"); const op_str = mem.trim(u8, op_it.next() orelse break, " \t");

View File

@ -109,7 +109,7 @@ pub fn loadMetaData(gpa: Allocator, contents: []const u8) LoadMetaDataError!*ABI
const target_name = mem.sliceTo(contents[index..], 0); const target_name = mem.sliceTo(contents[index..], 0);
index += target_name.len + 1; index += target_name.len + 1;
var component_it = mem.tokenize(u8, target_name, "-"); var component_it = mem.tokenizeScalar(u8, target_name, '-');
const arch_name = component_it.next() orelse { const arch_name = component_it.next() orelse {
log.err("abilists: expected arch name", .{}); log.err("abilists: expected arch name", .{});
return error.ZigInstallationCorrupt; return error.ZigInstallationCorrupt;

View File

@ -60,7 +60,7 @@ pub const LibCInstallation = struct {
const contents = try std.fs.cwd().readFileAlloc(allocator, libc_file, std.math.maxInt(usize)); const contents = try std.fs.cwd().readFileAlloc(allocator, libc_file, std.math.maxInt(usize));
defer allocator.free(contents); defer allocator.free(contents);
var it = std.mem.tokenize(u8, contents, "\n"); var it = std.mem.tokenizeScalar(u8, contents, '\n');
while (it.next()) |line| { while (it.next()) |line| {
if (line.len == 0 or line[0] == '#') continue; if (line.len == 0 or line[0] == '#') continue;
var line_it = std.mem.split(u8, line, "="); var line_it = std.mem.split(u8, line, "=");
@ -293,7 +293,7 @@ pub const LibCInstallation = struct {
}, },
} }
var it = std.mem.tokenize(u8, exec_res.stderr, "\n\r"); var it = std.mem.tokenizeAny(u8, exec_res.stderr, "\n\r");
var search_paths = std.ArrayList([]const u8).init(allocator); var search_paths = std.ArrayList([]const u8).init(allocator);
defer search_paths.deinit(); defer search_paths.deinit();
while (it.next()) |line| { while (it.next()) |line| {
@ -613,7 +613,7 @@ fn ccPrintFileName(args: CCPrintFileNameOptions) ![:0]u8 {
}, },
} }
var it = std.mem.tokenize(u8, exec_res.stdout, "\n\r"); var it = std.mem.tokenizeAny(u8, exec_res.stdout, "\n\r");
const line = it.next() orelse return error.LibCRuntimeNotFound; const line = it.next() orelse return error.LibCRuntimeNotFound;
// When this command fails, it returns exit code 0 and duplicates the input file name. // When this command fails, it returns exit code 0 and duplicates the input file name.
// So we detect failure by checking if the output matches exactly the input. // So we detect failure by checking if the output matches exactly the input.
@ -692,7 +692,7 @@ fn appendCcExe(args: *std.ArrayList([]const u8), skip_cc_env_var: bool) !void {
return; return;
}; };
// Respect space-separated flags to the C compiler. // Respect space-separated flags to the C compiler.
var it = std.mem.tokenize(u8, cc_env_var, " "); var it = std.mem.tokenizeScalar(u8, cc_env_var, ' ');
while (it.next()) |arg| { while (it.next()) |arg| {
try args.append(arg); try args.append(arg);
} }

View File

@ -264,7 +264,7 @@ fn putFn(self: *Plan9, decl_index: Module.Decl.Index, out: FnDeclOutput) !void {
fn addPathComponents(self: *Plan9, path: []const u8, a: *std.ArrayList(u8)) !void { fn addPathComponents(self: *Plan9, path: []const u8, a: *std.ArrayList(u8)) !void {
const sep = std.fs.path.sep; const sep = std.fs.path.sep;
var it = std.mem.tokenize(u8, path, &.{sep}); var it = std.mem.tokenizeScalar(u8, path, sep);
while (it.next()) |component| { while (it.next()) |component| {
if (self.file_segments.get(component)) |num| { if (self.file_segments.get(component)) |num| {
try a.writer().writeIntBig(u16, num); try a.writer().writeIntBig(u16, num);

View File

@ -2581,7 +2581,7 @@ const Writer = struct {
fn writeDocComment(self: *Writer, stream: anytype, doc_comment_index: u32) !void { fn writeDocComment(self: *Writer, stream: anytype, doc_comment_index: u32) !void {
if (doc_comment_index != 0) { if (doc_comment_index != 0) {
const doc_comment = self.code.nullTerminatedString(doc_comment_index); const doc_comment = self.code.nullTerminatedString(doc_comment_index);
var it = std.mem.tokenize(u8, doc_comment, "\n"); var it = std.mem.tokenizeScalar(u8, doc_comment, '\n');
while (it.next()) |doc_line| { while (it.next()) |doc_line| {
try stream.writeByteNTimes(' ', self.indent); try stream.writeByteNTimes(' ', self.indent);
try stream.print("///{s}\n", .{doc_line}); try stream.print("///{s}\n", .{doc_line});

View File

@ -18,7 +18,7 @@ test "issue 6456" {
comptime { comptime {
var fields: []const StructField = &[0]StructField{}; var fields: []const StructField = &[0]StructField{};
var it = std.mem.tokenize(u8, text, "\n"); var it = std.mem.tokenizeScalar(u8, text, '\n');
while (it.next()) |name| { while (it.next()) |name| {
fields = fields ++ &[_]StructField{StructField{ fields = fields ++ &[_]StructField{StructField{
.alignment = 0, .alignment = 0,

View File

@ -846,7 +846,7 @@ const TestManifest = struct {
const actual_start = start orelse return error.MissingTestManifest; const actual_start = start orelse return error.MissingTestManifest;
const manifest_bytes = bytes[actual_start..end]; const manifest_bytes = bytes[actual_start..end];
var it = std.mem.tokenize(u8, manifest_bytes, "\r\n"); var it = std.mem.tokenizeAny(u8, manifest_bytes, "\r\n");
// First line is the test type // First line is the test type
const tt: Type = blk: { const tt: Type = blk: {
@ -923,7 +923,7 @@ const TestManifest = struct {
fn trailing(self: TestManifest) TrailingIterator { fn trailing(self: TestManifest) TrailingIterator {
return .{ return .{
.inner = std.mem.tokenize(u8, self.trailing_bytes, "\r\n"), .inner = std.mem.tokenizeAny(u8, self.trailing_bytes, "\r\n"),
}; };
} }

View File

@ -51,11 +51,11 @@ pub fn main() !void {
try writer.writeAll("pub const X86 = enum(usize) {\n"); try writer.writeAll("pub const X86 = enum(usize) {\n");
const table = try linux_dir.readFile("arch/x86/entry/syscalls/syscall_32.tbl", buf); const table = try linux_dir.readFile("arch/x86/entry/syscalls/syscall_32.tbl", buf);
var lines = mem.tokenize(u8, table, "\n"); var lines = mem.tokenizeScalar(u8, table, '\n');
while (lines.next()) |line| { while (lines.next()) |line| {
if (line[0] == '#') continue; if (line[0] == '#') continue;
var fields = mem.tokenize(u8, line, " \t"); var fields = mem.tokenizeAny(u8, line, " \t");
const number = fields.next() orelse return error.Incomplete; const number = fields.next() orelse return error.Incomplete;
// abi is always i386 // abi is always i386
_ = fields.next() orelse return error.Incomplete; _ = fields.next() orelse return error.Incomplete;
@ -70,11 +70,11 @@ pub fn main() !void {
try writer.writeAll("pub const X64 = enum(usize) {\n"); try writer.writeAll("pub const X64 = enum(usize) {\n");
const table = try linux_dir.readFile("arch/x86/entry/syscalls/syscall_64.tbl", buf); const table = try linux_dir.readFile("arch/x86/entry/syscalls/syscall_64.tbl", buf);
var lines = mem.tokenize(u8, table, "\n"); var lines = mem.tokenizeScalar(u8, table, '\n');
while (lines.next()) |line| { while (lines.next()) |line| {
if (line[0] == '#') continue; if (line[0] == '#') continue;
var fields = mem.tokenize(u8, line, " \t"); var fields = mem.tokenizeAny(u8, line, " \t");
const number = fields.next() orelse return error.Incomplete; const number = fields.next() orelse return error.Incomplete;
const abi = fields.next() orelse return error.Incomplete; const abi = fields.next() orelse return error.Incomplete;
// The x32 abi syscalls are always at the end. // The x32 abi syscalls are always at the end.
@ -96,11 +96,11 @@ pub fn main() !void {
); );
const table = try linux_dir.readFile("arch/arm/tools/syscall.tbl", buf); const table = try linux_dir.readFile("arch/arm/tools/syscall.tbl", buf);
var lines = mem.tokenize(u8, table, "\n"); var lines = mem.tokenizeScalar(u8, table, '\n');
while (lines.next()) |line| { while (lines.next()) |line| {
if (line[0] == '#') continue; if (line[0] == '#') continue;
var fields = mem.tokenize(u8, line, " \t"); var fields = mem.tokenizeAny(u8, line, " \t");
const number = fields.next() orelse return error.Incomplete; const number = fields.next() orelse return error.Incomplete;
const abi = fields.next() orelse return error.Incomplete; const abi = fields.next() orelse return error.Incomplete;
if (mem.eql(u8, abi, "oabi")) continue; if (mem.eql(u8, abi, "oabi")) continue;
@ -127,11 +127,11 @@ pub fn main() !void {
{ {
try writer.writeAll("pub const Sparc64 = enum(usize) {\n"); try writer.writeAll("pub const Sparc64 = enum(usize) {\n");
const table = try linux_dir.readFile("arch/sparc/kernel/syscalls/syscall.tbl", buf); const table = try linux_dir.readFile("arch/sparc/kernel/syscalls/syscall.tbl", buf);
var lines = mem.tokenize(u8, table, "\n"); var lines = mem.tokenizeScalar(u8, table, '\n');
while (lines.next()) |line| { while (lines.next()) |line| {
if (line[0] == '#') continue; if (line[0] == '#') continue;
var fields = mem.tokenize(u8, line, " \t"); var fields = mem.tokenizeAny(u8, line, " \t");
const number = fields.next() orelse return error.Incomplete; const number = fields.next() orelse return error.Incomplete;
const abi = fields.next() orelse return error.Incomplete; const abi = fields.next() orelse return error.Incomplete;
if (mem.eql(u8, abi, "32")) continue; if (mem.eql(u8, abi, "32")) continue;
@ -151,11 +151,11 @@ pub fn main() !void {
); );
const table = try linux_dir.readFile("arch/mips/kernel/syscalls/syscall_o32.tbl", buf); const table = try linux_dir.readFile("arch/mips/kernel/syscalls/syscall_o32.tbl", buf);
var lines = mem.tokenize(u8, table, "\n"); var lines = mem.tokenizeScalar(u8, table, '\n');
while (lines.next()) |line| { while (lines.next()) |line| {
if (line[0] == '#') continue; if (line[0] == '#') continue;
var fields = mem.tokenize(u8, line, " \t"); var fields = mem.tokenizeAny(u8, line, " \t");
const number = fields.next() orelse return error.Incomplete; const number = fields.next() orelse return error.Incomplete;
// abi is always o32 // abi is always o32
_ = fields.next() orelse return error.Incomplete; _ = fields.next() orelse return error.Incomplete;
@ -176,11 +176,11 @@ pub fn main() !void {
); );
const table = try linux_dir.readFile("arch/mips/kernel/syscalls/syscall_n64.tbl", buf); const table = try linux_dir.readFile("arch/mips/kernel/syscalls/syscall_n64.tbl", buf);
var lines = mem.tokenize(u8, table, "\n"); var lines = mem.tokenizeScalar(u8, table, '\n');
while (lines.next()) |line| { while (lines.next()) |line| {
if (line[0] == '#') continue; if (line[0] == '#') continue;
var fields = mem.tokenize(u8, line, " \t"); var fields = mem.tokenizeAny(u8, line, " \t");
const number = fields.next() orelse return error.Incomplete; const number = fields.next() orelse return error.Incomplete;
// abi is always n64 // abi is always n64
_ = fields.next() orelse return error.Incomplete; _ = fields.next() orelse return error.Incomplete;
@ -197,11 +197,11 @@ pub fn main() !void {
const table = try linux_dir.readFile("arch/powerpc/kernel/syscalls/syscall.tbl", buf); const table = try linux_dir.readFile("arch/powerpc/kernel/syscalls/syscall.tbl", buf);
var list_64 = std.ArrayList(u8).init(allocator); var list_64 = std.ArrayList(u8).init(allocator);
var lines = mem.tokenize(u8, table, "\n"); var lines = mem.tokenizeScalar(u8, table, '\n');
while (lines.next()) |line| { while (lines.next()) |line| {
if (line[0] == '#') continue; if (line[0] == '#') continue;
var fields = mem.tokenize(u8, line, " \t"); var fields = mem.tokenizeAny(u8, line, " \t");
const number = fields.next() orelse return error.Incomplete; const number = fields.next() orelse return error.Incomplete;
const abi = fields.next() orelse return error.Incomplete; const abi = fields.next() orelse return error.Incomplete;
const name = fields.next() orelse return error.Incomplete; const name = fields.next() orelse return error.Incomplete;
@ -277,9 +277,9 @@ pub fn main() !void {
}, },
}; };
var lines = mem.tokenize(u8, defines, "\n"); var lines = mem.tokenizeScalar(u8, defines, '\n');
loop: while (lines.next()) |line| { loop: while (lines.next()) |line| {
var fields = mem.tokenize(u8, line, " \t"); var fields = mem.tokenizeAny(u8, line, " \t");
const cmd = fields.next() orelse return error.Incomplete; const cmd = fields.next() orelse return error.Incomplete;
if (!mem.eql(u8, cmd, "#define")) continue; if (!mem.eql(u8, cmd, "#define")) continue;
const define = fields.next() orelse return error.Incomplete; const define = fields.next() orelse return error.Incomplete;
@ -339,9 +339,9 @@ pub fn main() !void {
}, },
}; };
var lines = mem.tokenize(u8, defines, "\n"); var lines = mem.tokenizeScalar(u8, defines, '\n');
loop: while (lines.next()) |line| { loop: while (lines.next()) |line| {
var fields = mem.tokenize(u8, line, " \t"); var fields = mem.tokenizeAny(u8, line, " \t");
const cmd = fields.next() orelse return error.Incomplete; const cmd = fields.next() orelse return error.Incomplete;
if (!mem.eql(u8, cmd, "#define")) continue; if (!mem.eql(u8, cmd, "#define")) continue;
const define = fields.next() orelse return error.Incomplete; const define = fields.next() orelse return error.Incomplete;