remove deprecated uses of ArrayList.span

This commit is contained in:
Josh Holland 2020-11-06 18:54:08 +00:00
parent c9551652b0
commit c25b157dda
29 changed files with 108 additions and 108 deletions

View File

@ -1061,7 +1061,7 @@ test "std.ArrayList(u8) implements outStream" {
const y: i32 = 1234; const y: i32 = 1234;
try buffer.outStream().print("x: {}\ny: {}\n", .{ x, y }); try buffer.outStream().print("x: {}\ny: {}\n", .{ x, y });
testing.expectEqualSlices(u8, "x: 42\ny: 1234\n", buffer.span()); testing.expectEqualSlices(u8, "x: 42\ny: 1234\n", buffer.items);
} }
test "std.ArrayList/ArrayListUnmanaged.shrink still sets length on error.OutOfMemory" { test "std.ArrayList/ArrayListUnmanaged.shrink still sets length on error.OutOfMemory" {

View File

@ -147,7 +147,7 @@ pub fn ArrayListSentineled(comptime T: type, comptime sentinel: T) type {
pub fn replaceContents(self: *Self, m: []const T) !void { pub fn replaceContents(self: *Self, m: []const T) !void {
try self.resize(m.len); try self.resize(m.len);
mem.copy(T, self.list.span(), m); mem.copy(T, self.list.items, m);
} }
/// Initializes an OutStream which will append to the list. /// Initializes an OutStream which will append to the list.

View File

@ -386,7 +386,7 @@ pub const Builder = struct {
} }
} }
for (wanted_steps.span()) |s| { for (wanted_steps.items) |s| {
try self.makeOneStep(s); try self.makeOneStep(s);
} }
} }
@ -403,7 +403,7 @@ pub const Builder = struct {
const uninstall_tls = @fieldParentPtr(TopLevelStep, "step", uninstall_step); const uninstall_tls = @fieldParentPtr(TopLevelStep, "step", uninstall_step);
const self = @fieldParentPtr(Builder, "uninstall_tls", uninstall_tls); const self = @fieldParentPtr(Builder, "uninstall_tls", uninstall_tls);
for (self.installed_files.span()) |installed_file| { for (self.installed_files.items) |installed_file| {
const full_path = self.getInstallPath(installed_file.dir, installed_file.path); const full_path = self.getInstallPath(installed_file.dir, installed_file.path);
if (self.verbose) { if (self.verbose) {
warn("rm {}\n", .{full_path}); warn("rm {}\n", .{full_path});
@ -421,7 +421,7 @@ pub const Builder = struct {
} }
s.loop_flag = true; s.loop_flag = true;
for (s.dependencies.span()) |dep| { for (s.dependencies.items) |dep| {
self.makeOneStep(dep) catch |err| { self.makeOneStep(dep) catch |err| {
if (err == error.DependencyLoopDetected) { if (err == error.DependencyLoopDetected) {
warn(" {}\n", .{s.name}); warn(" {}\n", .{s.name});
@ -436,7 +436,7 @@ pub const Builder = struct {
} }
fn getTopLevelStepByName(self: *Builder, name: []const u8) !*Step { fn getTopLevelStepByName(self: *Builder, name: []const u8) !*Step {
for (self.top_level_steps.span()) |top_level_step| { for (self.top_level_steps.items) |top_level_step| {
if (mem.eql(u8, top_level_step.step.name, name)) { if (mem.eql(u8, top_level_step.step.name, name)) {
return &top_level_step.step; return &top_level_step.step;
} }
@ -550,7 +550,7 @@ pub const Builder = struct {
.Scalar => |s| { .Scalar => |s| {
return self.allocator.dupe([]const u8, &[_][]const u8{s}) catch unreachable; return self.allocator.dupe([]const u8, &[_][]const u8{s}) catch unreachable;
}, },
.List => |lst| return lst.span(), .List => |lst| return lst.items,
}, },
} }
} }
@ -951,7 +951,7 @@ pub const Builder = struct {
pub fn findProgram(self: *Builder, names: []const []const u8, paths: []const []const u8) ![]const u8 { pub fn findProgram(self: *Builder, names: []const []const u8, paths: []const []const u8) ![]const u8 {
// TODO report error for ambiguous situations // TODO report error for ambiguous situations
const exe_extension = @as(CrossTarget, .{}).exeFileExt(); const exe_extension = @as(CrossTarget, .{}).exeFileExt();
for (self.search_prefixes.span()) |search_prefix| { for (self.search_prefixes.items) |search_prefix| {
for (names) |name| { for (names) |name| {
if (fs.path.isAbsolute(name)) { if (fs.path.isAbsolute(name)) {
return name; return name;
@ -1096,7 +1096,7 @@ pub const Builder = struct {
.desc = tok_it.rest(), .desc = tok_it.rest(),
}); });
} }
return list.span(); return list.items;
} }
fn getPkgConfigList(self: *Builder) ![]const PkgConfigPkg { fn getPkgConfigList(self: *Builder) ![]const PkgConfigPkg {
@ -1505,7 +1505,7 @@ pub const LibExeObjStep = struct {
if (isLibCLibrary(name)) { if (isLibCLibrary(name)) {
return self.is_linking_libc; return self.is_linking_libc;
} }
for (self.link_objects.span()) |link_object| { for (self.link_objects.items) |link_object| {
switch (link_object) { switch (link_object) {
LinkObject.SystemLib => |n| if (mem.eql(u8, n, name)) return true, LinkObject.SystemLib => |n| if (mem.eql(u8, n, name)) return true,
else => continue, else => continue,
@ -1908,7 +1908,7 @@ pub const LibExeObjStep = struct {
self.include_dirs.append(IncludeDir{ .OtherStep = other }) catch unreachable; self.include_dirs.append(IncludeDir{ .OtherStep = other }) catch unreachable;
// Inherit dependency on system libraries // Inherit dependency on system libraries
for (other.link_objects.span()) |link_object| { for (other.link_objects.items) |link_object| {
switch (link_object) { switch (link_object) {
.SystemLib => |name| self.linkSystemLibrary(name), .SystemLib => |name| self.linkSystemLibrary(name),
else => continue, else => continue,
@ -1970,7 +1970,7 @@ pub const LibExeObjStep = struct {
if (self.root_src) |root_src| try zig_args.append(root_src.getPath(builder)); if (self.root_src) |root_src| try zig_args.append(root_src.getPath(builder));
var prev_has_extra_flags = false; var prev_has_extra_flags = false;
for (self.link_objects.span()) |link_object| { for (self.link_objects.items) |link_object| {
switch (link_object) { switch (link_object) {
.StaticPath => |static_path| { .StaticPath => |static_path| {
try zig_args.append(builder.pathFromRoot(static_path)); try zig_args.append(builder.pathFromRoot(static_path));
@ -2040,7 +2040,7 @@ pub const LibExeObjStep = struct {
&[_][]const u8{ builder.cache_root, builder.fmt("{}_build_options.zig", .{self.name}) }, &[_][]const u8{ builder.cache_root, builder.fmt("{}_build_options.zig", .{self.name}) },
); );
const path_from_root = builder.pathFromRoot(build_options_file); const path_from_root = builder.pathFromRoot(build_options_file);
try fs.cwd().writeFile(path_from_root, self.build_options_contents.span()); try fs.cwd().writeFile(path_from_root, self.build_options_contents.items);
try zig_args.append("--pkg-begin"); try zig_args.append("--pkg-begin");
try zig_args.append("build_options"); try zig_args.append("build_options");
try zig_args.append(path_from_root); try zig_args.append(path_from_root);
@ -2238,11 +2238,11 @@ pub const LibExeObjStep = struct {
}, },
} }
for (self.packages.span()) |pkg| { for (self.packages.items) |pkg| {
try self.makePackageCmd(pkg, &zig_args); try self.makePackageCmd(pkg, &zig_args);
} }
for (self.include_dirs.span()) |include_dir| { for (self.include_dirs.items) |include_dir| {
switch (include_dir) { switch (include_dir) {
.RawPath => |include_path| { .RawPath => |include_path| {
try zig_args.append("-I"); try zig_args.append("-I");
@ -2260,18 +2260,18 @@ pub const LibExeObjStep = struct {
} }
} }
for (self.lib_paths.span()) |lib_path| { for (self.lib_paths.items) |lib_path| {
try zig_args.append("-L"); try zig_args.append("-L");
try zig_args.append(lib_path); try zig_args.append(lib_path);
} }
for (self.c_macros.span()) |c_macro| { for (self.c_macros.items) |c_macro| {
try zig_args.append("-D"); try zig_args.append("-D");
try zig_args.append(c_macro); try zig_args.append(c_macro);
} }
if (self.target.isDarwin()) { if (self.target.isDarwin()) {
for (self.framework_dirs.span()) |dir| { for (self.framework_dirs.items) |dir| {
try zig_args.append("-F"); try zig_args.append("-F");
try zig_args.append(dir); try zig_args.append(dir);
} }
@ -2331,11 +2331,11 @@ pub const LibExeObjStep = struct {
} }
if (self.kind == Kind.Test) { if (self.kind == Kind.Test) {
try builder.spawnChild(zig_args.span()); try builder.spawnChild(zig_args.items);
} else { } else {
try zig_args.append("--enable-cache"); try zig_args.append("--enable-cache");
const output_dir_nl = try builder.execFromStep(zig_args.span(), &self.step); const output_dir_nl = try builder.execFromStep(zig_args.items, &self.step);
const build_output_dir = mem.trimRight(u8, output_dir_nl, "\r\n"); const build_output_dir = mem.trimRight(u8, output_dir_nl, "\r\n");
if (self.output_dir) |output_dir| { if (self.output_dir) |output_dir| {

View File

@ -79,7 +79,7 @@ const BinaryElfOutput = struct {
newSegment.binaryOffset = 0; newSegment.binaryOffset = 0;
newSegment.firstSection = null; newSegment.firstSection = null;
for (self.sections.span()) |section| { for (self.sections.items) |section| {
if (sectionWithinSegment(section, phdr)) { if (sectionWithinSegment(section, phdr)) {
if (section.segment) |sectionSegment| { if (section.segment) |sectionSegment| {
if (sectionSegment.elfOffset > newSegment.elfOffset) { if (sectionSegment.elfOffset > newSegment.elfOffset) {
@ -99,7 +99,7 @@ const BinaryElfOutput = struct {
} }
} }
sort.sort(*BinaryElfSegment, self.segments.span(), {}, segmentSortCompare); sort.sort(*BinaryElfSegment, self.segments.items, {}, segmentSortCompare);
if (self.segments.items.len > 0) { if (self.segments.items.len > 0) {
const firstSegment = self.segments.items[0]; const firstSegment = self.segments.items[0];
@ -112,19 +112,19 @@ const BinaryElfOutput = struct {
const basePhysicalAddress = firstSegment.physicalAddress; const basePhysicalAddress = firstSegment.physicalAddress;
for (self.segments.span()) |segment| { for (self.segments.items) |segment| {
segment.binaryOffset = segment.physicalAddress - basePhysicalAddress; segment.binaryOffset = segment.physicalAddress - basePhysicalAddress;
} }
} }
} }
for (self.sections.span()) |section| { for (self.sections.items) |section| {
if (section.segment) |segment| { if (section.segment) |segment| {
section.binaryOffset = segment.binaryOffset + (section.elfOffset - segment.elfOffset); section.binaryOffset = segment.binaryOffset + (section.elfOffset - segment.elfOffset);
} }
} }
sort.sort(*BinaryElfSection, self.sections.span(), {}, sectionSortCompare); sort.sort(*BinaryElfSection, self.sections.items, {}, sectionSortCompare);
return self; return self;
} }
@ -172,7 +172,7 @@ fn emitRaw(allocator: *Allocator, elf_path: []const u8, raw_path: []const u8) !v
var binary_elf_output = try BinaryElfOutput.parse(allocator, elf_file); var binary_elf_output = try BinaryElfOutput.parse(allocator, elf_file);
defer binary_elf_output.deinit(); defer binary_elf_output.deinit();
for (binary_elf_output.sections.span()) |section| { for (binary_elf_output.sections.items) |section| {
try writeBinaryElfSection(elf_file, out_file, section); try writeBinaryElfSection(elf_file, out_file, section);
} }
} }

View File

@ -159,7 +159,7 @@ pub const RunStep = struct {
const cwd = if (self.cwd) |cwd| self.builder.pathFromRoot(cwd) else self.builder.build_root; const cwd = if (self.cwd) |cwd| self.builder.pathFromRoot(cwd) else self.builder.build_root;
var argv_list = ArrayList([]const u8).init(self.builder.allocator); var argv_list = ArrayList([]const u8).init(self.builder.allocator);
for (self.argv.span()) |arg| { for (self.argv.items) |arg| {
switch (arg) { switch (arg) {
Arg.Bytes => |bytes| try argv_list.append(bytes), Arg.Bytes => |bytes| try argv_list.append(bytes),
Arg.WriteFile => |file| { Arg.WriteFile => |file| {
@ -176,7 +176,7 @@ pub const RunStep = struct {
} }
} }
const argv = argv_list.span(); const argv = argv_list.items;
const child = std.ChildProcess.init(argv, self.builder.allocator) catch unreachable; const child = std.ChildProcess.init(argv, self.builder.allocator) catch unreachable;
defer child.deinit(); defer child.deinit();
@ -312,7 +312,7 @@ pub const RunStep = struct {
} }
fn addPathForDynLibs(self: *RunStep, artifact: *LibExeObjStep) void { fn addPathForDynLibs(self: *RunStep, artifact: *LibExeObjStep) void {
for (artifact.link_objects.span()) |link_object| { for (artifact.link_objects.items) |link_object| {
switch (link_object) { switch (link_object) {
.OtherStep => |other| { .OtherStep => |other| {
if (other.target.isWindows() and other.isDynamicLibrary()) { if (other.target.isWindows() and other.isDynamicLibrary()) {

View File

@ -86,7 +86,7 @@ pub const TranslateCStep = struct {
try argv_list.append(self.source.getPath(self.builder)); try argv_list.append(self.source.getPath(self.builder));
const output_path_nl = try self.builder.execFromStep(argv_list.span(), &self.step); const output_path_nl = try self.builder.execFromStep(argv_list.items, &self.step);
const output_path = mem.trimRight(u8, output_path_nl, "\r\n"); const output_path = mem.trimRight(u8, output_path_nl, "\r\n");
self.out_basename = fs.path.basename(output_path); self.out_basename = fs.path.basename(output_path);

View File

@ -64,7 +64,7 @@ pub const WriteFileStep = struct {
// new random bytes when WriteFileStep implementation is modified // new random bytes when WriteFileStep implementation is modified
// in a non-backwards-compatible way. // in a non-backwards-compatible way.
hash.update("eagVR1dYXoE7ARDP"); hash.update("eagVR1dYXoE7ARDP");
for (self.files.span()) |file| { for (self.files.items) |file| {
hash.update(file.basename); hash.update(file.basename);
hash.update(file.bytes); hash.update(file.bytes);
hash.update("|"); hash.update("|");
@ -85,7 +85,7 @@ pub const WriteFileStep = struct {
}; };
var dir = try fs.cwd().openDir(self.output_dir, .{}); var dir = try fs.cwd().openDir(self.output_dir, .{});
defer dir.close(); defer dir.close();
for (self.files.span()) |file| { for (self.files.items) |file| {
dir.writeFile(file.basename, file.bytes) catch |err| { dir.writeFile(file.basename, file.bytes) catch |err| {
warn("unable to write {} into {}: {}\n", .{ warn("unable to write {} into {}: {}\n", .{
file.basename, file.basename,

View File

@ -216,7 +216,7 @@ pub const Coff = struct {
blk: while (i < debug_dir_entry_count) : (i += 1) { blk: while (i < debug_dir_entry_count) : (i += 1) {
const debug_dir_entry = try in.readStruct(DebugDirectoryEntry); const debug_dir_entry = try in.readStruct(DebugDirectoryEntry);
if (debug_dir_entry.type == IMAGE_DEBUG_TYPE_CODEVIEW) { if (debug_dir_entry.type == IMAGE_DEBUG_TYPE_CODEVIEW) {
for (self.sections.span()) |*section| { for (self.sections.items) |*section| {
const section_start = section.header.virtual_address; const section_start = section.header.virtual_address;
const section_size = section.header.misc.virtual_size; const section_size = section.header.misc.virtual_size;
const rva = debug_dir_entry.address_of_raw_data; const rva = debug_dir_entry.address_of_raw_data;
@ -282,7 +282,7 @@ pub const Coff = struct {
} }
pub fn getSection(self: *Coff, comptime name: []const u8) ?*Section { pub fn getSection(self: *Coff, comptime name: []const u8) ?*Section {
for (self.sections.span()) |*sec| { for (self.sections.items) |*sec| {
if (mem.eql(u8, sec.header.name[0..name.len], name)) { if (mem.eql(u8, sec.header.name[0..name.len], name)) {
return sec; return sec;
} }

View File

@ -1509,7 +1509,7 @@ pub const ModuleDebugInfo = switch (builtin.os.tag) {
const mod_index = for (self.sect_contribs) |sect_contrib| { const mod_index = for (self.sect_contribs) |sect_contrib| {
if (sect_contrib.Section > self.coff.sections.items.len) continue; if (sect_contrib.Section > self.coff.sections.items.len) continue;
// Remember that SectionContribEntry.Section is 1-based. // Remember that SectionContribEntry.Section is 1-based.
coff_section = &self.coff.sections.span()[sect_contrib.Section - 1]; coff_section = &self.coff.sections.items[sect_contrib.Section - 1];
const vaddr_start = coff_section.header.virtual_address + sect_contrib.Offset; const vaddr_start = coff_section.header.virtual_address + sect_contrib.Offset;
const vaddr_end = vaddr_start + sect_contrib.Size; const vaddr_end = vaddr_start + sect_contrib.Size;

View File

@ -87,7 +87,7 @@ const Die = struct {
}; };
fn getAttr(self: *const Die, id: u64) ?*const FormValue { fn getAttr(self: *const Die, id: u64) ?*const FormValue {
for (self.attrs.span()) |*attr| { for (self.attrs.items) |*attr| {
if (attr.id == id) return &attr.value; if (attr.id == id) return &attr.value;
} }
return null; return null;
@ -371,7 +371,7 @@ fn parseFormValue(allocator: *mem.Allocator, in_stream: anytype, form_id: u64, e
} }
fn getAbbrevTableEntry(abbrev_table: *const AbbrevTable, abbrev_code: u64) ?*const AbbrevTableEntry { fn getAbbrevTableEntry(abbrev_table: *const AbbrevTable, abbrev_code: u64) ?*const AbbrevTableEntry {
for (abbrev_table.span()) |*table_entry| { for (abbrev_table.items) |*table_entry| {
if (table_entry.abbrev_code == abbrev_code) return table_entry; if (table_entry.abbrev_code == abbrev_code) return table_entry;
} }
return null; return null;
@ -395,7 +395,7 @@ pub const DwarfInfo = struct {
} }
pub fn getSymbolName(di: *DwarfInfo, address: u64) ?[]const u8 { pub fn getSymbolName(di: *DwarfInfo, address: u64) ?[]const u8 {
for (di.func_list.span()) |*func| { for (di.func_list.items) |*func| {
if (func.pc_range) |range| { if (func.pc_range) |range| {
if (address >= range.start and address < range.end) { if (address >= range.start and address < range.end) {
return func.name; return func.name;
@ -584,7 +584,7 @@ pub const DwarfInfo = struct {
} }
pub fn findCompileUnit(di: *DwarfInfo, target_address: u64) !*const CompileUnit { pub fn findCompileUnit(di: *DwarfInfo, target_address: u64) !*const CompileUnit {
for (di.compile_unit_list.span()) |*compile_unit| { for (di.compile_unit_list.items) |*compile_unit| {
if (compile_unit.pc_range) |range| { if (compile_unit.pc_range) |range| {
if (target_address >= range.start and target_address < range.end) return compile_unit; if (target_address >= range.start and target_address < range.end) return compile_unit;
} }
@ -632,7 +632,7 @@ pub const DwarfInfo = struct {
/// Gets an already existing AbbrevTable given the abbrev_offset, or if not found, /// Gets an already existing AbbrevTable given the abbrev_offset, or if not found,
/// seeks in the stream and parses it. /// seeks in the stream and parses it.
fn getAbbrevTable(di: *DwarfInfo, abbrev_offset: u64) !*const AbbrevTable { fn getAbbrevTable(di: *DwarfInfo, abbrev_offset: u64) !*const AbbrevTable {
for (di.abbrev_table_list.span()) |*header| { for (di.abbrev_table_list.items) |*header| {
if (header.offset == abbrev_offset) { if (header.offset == abbrev_offset) {
return &header.table; return &header.table;
} }
@ -686,7 +686,7 @@ pub const DwarfInfo = struct {
.attrs = ArrayList(Die.Attr).init(di.allocator()), .attrs = ArrayList(Die.Attr).init(di.allocator()),
}; };
try result.attrs.resize(table_entry.attrs.items.len); try result.attrs.resize(table_entry.attrs.items.len);
for (table_entry.attrs.span()) |attr, i| { for (table_entry.attrs.items) |attr, i| {
result.attrs.items[i] = Die.Attr{ result.attrs.items[i] = Die.Attr{
.id = attr.attr_id, .id = attr.attr_id,
.value = try parseFormValue(di.allocator(), in_stream, attr.form_id, di.endian, is_64), .value = try parseFormValue(di.allocator(), in_stream, attr.form_id, di.endian, is_64),
@ -753,7 +753,7 @@ pub const DwarfInfo = struct {
} }
var file_entries = ArrayList(FileEntry).init(di.allocator()); var file_entries = ArrayList(FileEntry).init(di.allocator());
var prog = LineNumberProgram.init(default_is_stmt, include_directories.span(), &file_entries, target_address); var prog = LineNumberProgram.init(default_is_stmt, include_directories.items, &file_entries, target_address);
while (true) { while (true) {
const file_name = try in.readUntilDelimiterAlloc(di.allocator(), 0, math.maxInt(usize)); const file_name = try in.readUntilDelimiterAlloc(di.allocator(), 0, math.maxInt(usize));

View File

@ -2078,7 +2078,7 @@ pub const Walker = struct {
while (true) { while (true) {
if (self.stack.items.len == 0) return null; if (self.stack.items.len == 0) return null;
// `top` becomes invalid after appending to `self.stack`. // `top` becomes invalid after appending to `self.stack`.
const top = &self.stack.span()[self.stack.items.len - 1]; const top = &self.stack.items[self.stack.items.len - 1];
const dirname_len = top.dirname_len; const dirname_len = top.dirname_len;
if (try top.dir_it.next()) |base| { if (try top.dir_it.next()) |base| {
self.name_buffer.shrink(dirname_len); self.name_buffer.shrink(dirname_len);
@ -2099,8 +2099,8 @@ pub const Walker = struct {
} }
return Entry{ return Entry{
.dir = top.dir_it.dir, .dir = top.dir_it.dir,
.basename = self.name_buffer.span()[dirname_len + 1 ..], .basename = self.name_buffer.items[dirname_len + 1 ..],
.path = self.name_buffer.span(), .path = self.name_buffer.items,
.kind = base.kind, .kind = base.kind,
}; };
} else { } else {

View File

@ -62,7 +62,7 @@ pub fn Reader(
var start_index: usize = original_len; var start_index: usize = original_len;
while (true) { while (true) {
array_list.expandToCapacity(); array_list.expandToCapacity();
const dest_slice = array_list.span()[start_index..]; const dest_slice = array_list.items[start_index..];
const bytes_read = try self.readAll(dest_slice); const bytes_read = try self.readAll(dest_slice);
start_index += bytes_read; start_index += bytes_read;

View File

@ -1249,7 +1249,7 @@ pub const Value = union(enum) {
.Integer => |inner| try stringify(inner, options, out_stream), .Integer => |inner| try stringify(inner, options, out_stream),
.Float => |inner| try stringify(inner, options, out_stream), .Float => |inner| try stringify(inner, options, out_stream),
.String => |inner| try stringify(inner, options, out_stream), .String => |inner| try stringify(inner, options, out_stream),
.Array => |inner| try stringify(inner.span(), options, out_stream), .Array => |inner| try stringify(inner.items, options, out_stream),
.Object => |inner| { .Object => |inner| {
try out_stream.writeByte('{'); try out_stream.writeByte('{');
var field_output = false; var field_output = false;
@ -2036,7 +2036,7 @@ pub const Parser = struct {
} }
fn pushToParent(p: *Parser, value: *const Value) !void { fn pushToParent(p: *Parser, value: *const Value) !void {
switch (p.stack.span()[p.stack.items.len - 1]) { switch (p.stack.items[p.stack.items.len - 1]) {
// Object Parent -> [ ..., object, <key>, value ] // Object Parent -> [ ..., object, <key>, value ]
Value.String => |key| { Value.String => |key| {
_ = p.stack.pop(); _ = p.stack.pop();

View File

@ -796,7 +796,7 @@ pub fn getAddressList(allocator: *mem.Allocator, name: []const u8, port: u16) !*
result.canon_name = canon.toOwnedSlice(); result.canon_name = canon.toOwnedSlice();
} }
for (lookup_addrs.span()) |lookup_addr, i| { for (lookup_addrs.items) |lookup_addr, i| {
result.addrs[i] = lookup_addr.addr; result.addrs[i] = lookup_addr.addr;
assert(result.addrs[i].getPort() == port); assert(result.addrs[i].getPort() == port);
} }
@ -849,7 +849,7 @@ fn linuxLookupName(
// No further processing is needed if there are fewer than 2 // No further processing is needed if there are fewer than 2
// results or if there are only IPv4 results. // results or if there are only IPv4 results.
if (addrs.items.len == 1 or family == os.AF_INET) return; if (addrs.items.len == 1 or family == os.AF_INET) return;
const all_ip4 = for (addrs.span()) |addr| { const all_ip4 = for (addrs.items) |addr| {
if (addr.addr.any.family != os.AF_INET) break false; if (addr.addr.any.family != os.AF_INET) break false;
} else true; } else true;
if (all_ip4) return; if (all_ip4) return;
@ -861,7 +861,7 @@ fn linuxLookupName(
// So far the label/precedence table cannot be customized. // So far the label/precedence table cannot be customized.
// This implementation is ported from musl libc. // This implementation is ported from musl libc.
// A more idiomatic "ziggy" implementation would be welcome. // A more idiomatic "ziggy" implementation would be welcome.
for (addrs.span()) |*addr, i| { for (addrs.items) |*addr, i| {
var key: i32 = 0; var key: i32 = 0;
var sa6: os.sockaddr_in6 = undefined; var sa6: os.sockaddr_in6 = undefined;
@memset(@ptrCast([*]u8, &sa6), 0, @sizeOf(os.sockaddr_in6)); @memset(@ptrCast([*]u8, &sa6), 0, @sizeOf(os.sockaddr_in6));
@ -926,7 +926,7 @@ fn linuxLookupName(
key |= (MAXADDRS - @intCast(i32, i)) << DAS_ORDER_SHIFT; key |= (MAXADDRS - @intCast(i32, i)) << DAS_ORDER_SHIFT;
addr.sortkey = key; addr.sortkey = key;
} }
std.sort.sort(LookupAddr, addrs.span(), {}, addrCmpLessThan); std.sort.sort(LookupAddr, addrs.items, {}, addrCmpLessThan);
} }
const Policy = struct { const Policy = struct {
@ -1361,9 +1361,9 @@ fn resMSendRc(
defer ns_list.deinit(); defer ns_list.deinit();
try ns_list.resize(rc.ns.items.len); try ns_list.resize(rc.ns.items.len);
const ns = ns_list.span(); const ns = ns_list.items;
for (rc.ns.span()) |iplit, i| { for (rc.ns.items) |iplit, i| {
ns[i] = iplit.addr; ns[i] = iplit.addr;
assert(ns[i].getPort() == 53); assert(ns[i].getPort() == 53);
if (iplit.addr.any.family != os.AF_INET) { if (iplit.addr.any.family != os.AF_INET) {

View File

@ -654,7 +654,7 @@ const MsfStream = struct {
while (true) { while (true) {
const byte = try self.reader().readByte(); const byte = try self.reader().readByte();
if (byte == 0) { if (byte == 0) {
return list.span(); return list.items;
} }
try list.append(byte); try list.append(byte);
} }

View File

@ -519,8 +519,8 @@ pub fn argsAlloc(allocator: *mem.Allocator) ![][:0]u8 {
try slice_list.append(arg.len); try slice_list.append(arg.len);
} }
const contents_slice = contents.span(); const contents_slice = contents.items;
const slice_sizes = slice_list.span(); const slice_sizes = slice_list.items;
const contents_size_bytes = try math.add(usize, contents_slice.len, slice_sizes.len); const contents_size_bytes = try math.add(usize, contents_slice.len, slice_sizes.len);
const slice_list_bytes = try math.mul(usize, @sizeOf([]u8), slice_sizes.len); const slice_list_bytes = try math.mul(usize, @sizeOf([]u8), slice_sizes.len);
const total_bytes = try math.add(usize, slice_list_bytes, contents_size_bytes); const total_bytes = try math.add(usize, slice_list_bytes, contents_size_bytes);

View File

@ -130,7 +130,7 @@ pub fn main() !void {
if (builder.validateUserInputDidItFail()) if (builder.validateUserInputDidItFail())
return usageAndErr(builder, true, stderr_stream); return usageAndErr(builder, true, stderr_stream);
builder.make(targets.span()) catch |err| { builder.make(targets.items) catch |err| {
switch (err) { switch (err) {
error.InvalidStepName => { error.InvalidStepName => {
return usageAndErr(builder, true, stderr_stream); return usageAndErr(builder, true, stderr_stream);
@ -165,7 +165,7 @@ fn usage(builder: *Builder, already_ran_build: bool, out_stream: anytype) !void
, .{builder.zig_exe}); , .{builder.zig_exe});
const allocator = builder.allocator; const allocator = builder.allocator;
for (builder.top_level_steps.span()) |top_level_step| { for (builder.top_level_steps.items) |top_level_step| {
const name = if (&top_level_step.step == builder.default_step) const name = if (&top_level_step.step == builder.default_step)
try fmt.allocPrint(allocator, "{} (default)", .{top_level_step.step.name}) try fmt.allocPrint(allocator, "{} (default)", .{top_level_step.step.name})
else else
@ -189,7 +189,7 @@ fn usage(builder: *Builder, already_ran_build: bool, out_stream: anytype) !void
if (builder.available_options_list.items.len == 0) { if (builder.available_options_list.items.len == 0) {
try out_stream.print(" (none)\n", .{}); try out_stream.print(" (none)\n", .{});
} else { } else {
for (builder.available_options_list.span()) |option| { for (builder.available_options_list.items) |option| {
const name = try fmt.allocPrint(allocator, " -D{}=[{}]", .{ const name = try fmt.allocPrint(allocator, " -D{}=[{}]", .{
option.name, option.name,
Builder.typeIdName(option.type_id), Builder.typeIdName(option.type_id),

View File

@ -128,7 +128,7 @@ pub const NativePaths = struct {
} }
fn deinitArray(array: *ArrayList([:0]u8)) void { fn deinitArray(array: *ArrayList([:0]u8)) void {
for (array.span()) |item| { for (array.items) |item| {
array.allocator.free(item); array.allocator.free(item);
} }
array.deinit(); array.deinit();

View File

@ -342,7 +342,7 @@ pub const LibCInstallation = struct {
result_buf.shrink(0); result_buf.shrink(0);
try result_buf.outStream().print("{}\\Include\\{}\\ucrt", .{ search.path, search.version }); try result_buf.outStream().print("{}\\Include\\{}\\ucrt", .{ search.path, search.version });
var dir = fs.cwd().openDir(result_buf.span(), .{}) catch |err| switch (err) { var dir = fs.cwd().openDir(result_buf.items, .{}) catch |err| switch (err) {
error.FileNotFound, error.FileNotFound,
error.NotDir, error.NotDir,
error.NoDevice, error.NoDevice,
@ -388,7 +388,7 @@ pub const LibCInstallation = struct {
result_buf.shrink(0); result_buf.shrink(0);
try result_buf.outStream().print("{}\\Lib\\{}\\ucrt\\{}", .{ search.path, search.version, arch_sub_dir }); try result_buf.outStream().print("{}\\Lib\\{}\\ucrt\\{}", .{ search.path, search.version, arch_sub_dir });
var dir = fs.cwd().openDir(result_buf.span(), .{}) catch |err| switch (err) { var dir = fs.cwd().openDir(result_buf.items, .{}) catch |err| switch (err) {
error.FileNotFound, error.FileNotFound,
error.NotDir, error.NotDir,
error.NoDevice, error.NoDevice,
@ -443,7 +443,7 @@ pub const LibCInstallation = struct {
const stream = result_buf.outStream(); const stream = result_buf.outStream();
try stream.print("{}\\Lib\\{}\\um\\{}", .{ search.path, search.version, arch_sub_dir }); try stream.print("{}\\Lib\\{}\\um\\{}", .{ search.path, search.version, arch_sub_dir });
var dir = fs.cwd().openDir(result_buf.span(), .{}) catch |err| switch (err) { var dir = fs.cwd().openDir(result_buf.items, .{}) catch |err| switch (err) {
error.FileNotFound, error.FileNotFound,
error.NotDir, error.NotDir,
error.NoDevice, error.NoDevice,

View File

@ -2501,7 +2501,7 @@ pub fn cmdFmt(gpa: *Allocator, args: []const []const u8) !void {
defer fmt.seen.deinit(); defer fmt.seen.deinit();
defer fmt.out_buffer.deinit(); defer fmt.out_buffer.deinit();
for (input_files.span()) |file_path| { for (input_files.items) |file_path| {
// Get the real path here to avoid Windows failing on relative file paths with . or .. in them. // Get the real path here to avoid Windows failing on relative file paths with . or .. in them.
const real_path = fs.realpathAlloc(gpa, file_path) catch |err| { const real_path = fs.realpathAlloc(gpa, file_path) catch |err| {
fatal("unable to open '{}': {}", .{ file_path, err }); fatal("unable to open '{}': {}", .{ file_path, err });
@ -2681,7 +2681,7 @@ fn printErrMsgToFile(
defer text_buf.deinit(); defer text_buf.deinit();
const out_stream = text_buf.outStream(); const out_stream = text_buf.outStream();
try parse_error.render(tree.token_ids, out_stream); try parse_error.render(tree.token_ids, out_stream);
const text = text_buf.span(); const text = text_buf.items;
const stream = file.outStream(); const stream = file.outStream();
try stream.print("{}:{}:{}: error: {}\n", .{ path, start_loc.line + 1, start_loc.column + 1, text }); try stream.print("{}:{}:{}: error: {}\n", .{ path, start_loc.line + 1, start_loc.column + 1, text });
@ -2830,7 +2830,7 @@ pub const ClangArgIterator = struct {
defer resp_arg_list.deinit(); defer resp_arg_list.deinit();
{ {
errdefer { errdefer {
for (resp_arg_list.span()) |item| { for (resp_arg_list.items) |item| {
allocator.free(mem.span(item)); allocator.free(mem.span(item));
} }
} }

View File

@ -6566,7 +6566,7 @@ fn parseCUnaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Nod
fn tokenSlice(c: *Context, token: ast.TokenIndex) []u8 { fn tokenSlice(c: *Context, token: ast.TokenIndex) []u8 {
const tok = c.token_locs.items[token]; const tok = c.token_locs.items[token];
const slice = c.source_buffer.span()[tok.start..tok.end]; const slice = c.source_buffer.items[tok.start..tok.end];
return if (mem.startsWith(u8, slice, "@\"")) return if (mem.startsWith(u8, slice, "@\""))
slice[2 .. slice.len - 1] slice[2 .. slice.len - 1]
else else

View File

@ -91,7 +91,7 @@ pub const CompareOutputContext = struct {
const b = self.b; const b = self.b;
const write_src = b.addWriteFiles(); const write_src = b.addWriteFiles();
for (case.sources.span()) |src_file| { for (case.sources.items) |src_file| {
write_src.add(src_file.filename, src_file.source); write_src.add(src_file.filename, src_file.source);
} }
@ -105,7 +105,7 @@ pub const CompareOutputContext = struct {
} }
const exe = b.addExecutable("test", null); const exe = b.addExecutable("test", null);
exe.addAssemblyFileFromWriteFileStep(write_src, case.sources.span()[0].filename); exe.addAssemblyFileFromWriteFileStep(write_src, case.sources.items[0].filename);
const run = exe.run(); const run = exe.run();
run.addArgs(case.cli_args); run.addArgs(case.cli_args);
@ -125,7 +125,7 @@ pub const CompareOutputContext = struct {
if (mem.indexOf(u8, annotated_case_name, filter) == null) continue; if (mem.indexOf(u8, annotated_case_name, filter) == null) continue;
} }
const basename = case.sources.span()[0].filename; const basename = case.sources.items[0].filename;
const exe = b.addExecutableFromWriteFileStep("test", write_src, basename); const exe = b.addExecutableFromWriteFileStep("test", write_src, basename);
exe.setBuildMode(mode); exe.setBuildMode(mode);
if (case.link_libc) { if (case.link_libc) {
@ -146,7 +146,7 @@ pub const CompareOutputContext = struct {
if (mem.indexOf(u8, annotated_case_name, filter) == null) return; if (mem.indexOf(u8, annotated_case_name, filter) == null) return;
} }
const basename = case.sources.span()[0].filename; const basename = case.sources.items[0].filename;
const exe = b.addExecutableFromWriteFileStep("test", write_src, basename); const exe = b.addExecutableFromWriteFileStep("test", write_src, basename);
if (case.link_libc) { if (case.link_libc) {
exe.linkSystemLibrary("c"); exe.linkSystemLibrary("c");

View File

@ -82,13 +82,13 @@ pub const RunTranslatedCContext = struct {
} }
const write_src = b.addWriteFiles(); const write_src = b.addWriteFiles();
for (case.sources.span()) |src_file| { for (case.sources.items) |src_file| {
write_src.add(src_file.filename, src_file.source); write_src.add(src_file.filename, src_file.source);
} }
const translate_c = b.addTranslateC(.{ const translate_c = b.addTranslateC(.{
.write_file = .{ .write_file = .{
.step = write_src, .step = write_src,
.basename = case.sources.span()[0].filename, .basename = case.sources.items[0].filename,
}, },
}); });
translate_c.step.name = b.fmt("{} translate-c", .{annotated_case_name}); translate_c.step.name = b.fmt("{} translate-c", .{annotated_case_name});

View File

@ -105,20 +105,20 @@ pub const TranslateCContext = struct {
} }
const write_src = b.addWriteFiles(); const write_src = b.addWriteFiles();
for (case.sources.span()) |src_file| { for (case.sources.items) |src_file| {
write_src.add(src_file.filename, src_file.source); write_src.add(src_file.filename, src_file.source);
} }
const translate_c = b.addTranslateC(.{ const translate_c = b.addTranslateC(.{
.write_file = .{ .write_file = .{
.step = write_src, .step = write_src,
.basename = case.sources.span()[0].filename, .basename = case.sources.items[0].filename,
}, },
}); });
translate_c.step.name = annotated_case_name; translate_c.step.name = annotated_case_name;
translate_c.setTarget(case.target); translate_c.setTarget(case.target);
const check_file = translate_c.addCheckFile(case.expected_lines.span()); const check_file = translate_c.addCheckFile(case.expected_lines.items);
self.step.dependOn(&check_file.step); self.step.dependOn(&check_file.step);
} }

View File

@ -131,7 +131,7 @@ fn expandString(input: []const u8, output: *ArrayListSentineled(u8, 0)) !void {
try expandNode(root, &result_list); try expandNode(root, &result_list);
try output.resize(0); try output.resize(0);
for (result_list.span()) |buf, i| { for (result_list.items) |buf, i| {
if (i != 0) { if (i != 0) {
try output.append(' '); try output.append(' ');
} }
@ -157,8 +157,8 @@ fn expandNode(node: Node, output: *ArrayList(ArrayListSentineled(u8, 0))) Expand
var child_list_b = ArrayList(ArrayListSentineled(u8, 0)).init(global_allocator); var child_list_b = ArrayList(ArrayListSentineled(u8, 0)).init(global_allocator);
try expandNode(b_node, &child_list_b); try expandNode(b_node, &child_list_b);
for (child_list_a.span()) |buf_a| { for (child_list_a.items) |buf_a| {
for (child_list_b.span()) |buf_b| { for (child_list_b.items) |buf_b| {
var combined_buf = try ArrayListSentineled(u8, 0).initFromBuffer(buf_a); var combined_buf = try ArrayListSentineled(u8, 0).initFromBuffer(buf_a);
try combined_buf.appendSlice(buf_b.span()); try combined_buf.appendSlice(buf_b.span());
try output.append(combined_buf); try output.append(combined_buf);
@ -166,11 +166,11 @@ fn expandNode(node: Node, output: *ArrayList(ArrayListSentineled(u8, 0))) Expand
} }
}, },
Node.List => |list| { Node.List => |list| {
for (list.span()) |child_node| { for (list.items) |child_node| {
var child_list = ArrayList(ArrayListSentineled(u8, 0)).init(global_allocator); var child_list = ArrayList(ArrayListSentineled(u8, 0)).init(global_allocator);
try expandNode(child_node, &child_list); try expandNode(child_node, &child_list);
for (child_list.span()) |buf| { for (child_list.items) |buf| {
try output.append(buf); try output.append(buf);
} }
} }

View File

@ -183,13 +183,13 @@ const Dump = struct {
try mergeSameStrings(&self.zig_version, zig_version); try mergeSameStrings(&self.zig_version, zig_version);
try mergeSameStrings(&self.root_name, root_name); try mergeSameStrings(&self.root_name, root_name);
for (params.get("builds").?.value.Array.span()) |json_build| { for (params.get("builds").?.value.Array.items) |json_build| {
const target = json_build.Object.get("target").?.value.String; const target = json_build.Object.get("target").?.value.String;
try self.targets.append(target); try self.targets.append(target);
} }
// Merge files. If the string matches, it's the same file. // Merge files. If the string matches, it's the same file.
const other_files = root.Object.get("files").?.value.Array.span(); const other_files = root.Object.get("files").?.value.Array.items;
var other_file_to_mine = std.AutoHashMap(usize, usize).init(self.a()); var other_file_to_mine = std.AutoHashMap(usize, usize).init(self.a());
for (other_files) |other_file, i| { for (other_files) |other_file, i| {
const gop = try self.file_map.getOrPut(other_file.String); const gop = try self.file_map.getOrPut(other_file.String);
@ -201,7 +201,7 @@ const Dump = struct {
} }
// Merge AST nodes. If the file id, line, and column all match, it's the same AST node. // Merge AST nodes. If the file id, line, and column all match, it's the same AST node.
const other_ast_nodes = root.Object.get("astNodes").?.value.Array.span(); const other_ast_nodes = root.Object.get("astNodes").?.value.Array.items;
var other_ast_node_to_mine = std.AutoHashMap(usize, usize).init(self.a()); var other_ast_node_to_mine = std.AutoHashMap(usize, usize).init(self.a());
for (other_ast_nodes) |other_ast_node_json, i| { for (other_ast_nodes) |other_ast_node_json, i| {
const other_file_id = jsonObjInt(other_ast_node_json, "file"); const other_file_id = jsonObjInt(other_ast_node_json, "file");
@ -221,9 +221,9 @@ const Dump = struct {
// convert fields lists // convert fields lists
for (other_ast_nodes) |other_ast_node_json, i| { for (other_ast_nodes) |other_ast_node_json, i| {
const my_node_index = other_ast_node_to_mine.get(i).?.value; const my_node_index = other_ast_node_to_mine.get(i).?.value;
const my_node = &self.node_list.span()[my_node_index]; const my_node = &self.node_list.items[my_node_index];
if (other_ast_node_json.Object.get("fields")) |fields_json_kv| { if (other_ast_node_json.Object.get("fields")) |fields_json_kv| {
const other_fields = fields_json_kv.value.Array.span(); const other_fields = fields_json_kv.value.Array.items;
my_node.fields = try self.a().alloc(usize, other_fields.len); my_node.fields = try self.a().alloc(usize, other_fields.len);
for (other_fields) |other_field_index, field_i| { for (other_fields) |other_field_index, field_i| {
const other_index = @intCast(usize, other_field_index.Integer); const other_index = @intCast(usize, other_field_index.Integer);
@ -233,7 +233,7 @@ const Dump = struct {
} }
// Merge errors. If the AST Node matches, it's the same error value. // Merge errors. If the AST Node matches, it's the same error value.
const other_errors = root.Object.get("errors").?.value.Array.span(); const other_errors = root.Object.get("errors").?.value.Array.items;
var other_error_to_mine = std.AutoHashMap(usize, usize).init(self.a()); var other_error_to_mine = std.AutoHashMap(usize, usize).init(self.a());
for (other_errors) |other_error_json, i| { for (other_errors) |other_error_json, i| {
const other_src_id = jsonObjInt(other_error_json, "src"); const other_src_id = jsonObjInt(other_error_json, "src");
@ -253,7 +253,7 @@ const Dump = struct {
// First we identify all the simple types and merge those. // First we identify all the simple types and merge those.
// Example: void, type, noreturn // Example: void, type, noreturn
// We can also do integers and floats. // We can also do integers and floats.
const other_types = root.Object.get("types").?.value.Array.span(); const other_types = root.Object.get("types").?.value.Array.items;
var other_types_to_mine = std.AutoHashMap(usize, usize).init(self.a()); var other_types_to_mine = std.AutoHashMap(usize, usize).init(self.a());
for (other_types) |other_type_json, i| { for (other_types) |other_type_json, i| {
const type_kind = jsonObjInt(other_type_json, "kind"); const type_kind = jsonObjInt(other_type_json, "kind");
@ -336,7 +336,7 @@ const Dump = struct {
try jw.objectField("builds"); try jw.objectField("builds");
try jw.beginArray(); try jw.beginArray();
for (self.targets.span()) |target| { for (self.targets.items) |target| {
try jw.arrayElem(); try jw.arrayElem();
try jw.beginObject(); try jw.beginObject();
try jw.objectField("target"); try jw.objectField("target");
@ -349,7 +349,7 @@ const Dump = struct {
try jw.objectField("types"); try jw.objectField("types");
try jw.beginArray(); try jw.beginArray();
for (self.type_list.span()) |t| { for (self.type_list.items) |t| {
try jw.arrayElem(); try jw.arrayElem();
try jw.beginObject(); try jw.beginObject();
@ -379,7 +379,7 @@ const Dump = struct {
try jw.objectField("errors"); try jw.objectField("errors");
try jw.beginArray(); try jw.beginArray();
for (self.error_list.span()) |zig_error| { for (self.error_list.items) |zig_error| {
try jw.arrayElem(); try jw.arrayElem();
try jw.beginObject(); try jw.beginObject();
@ -395,7 +395,7 @@ const Dump = struct {
try jw.objectField("astNodes"); try jw.objectField("astNodes");
try jw.beginArray(); try jw.beginArray();
for (self.node_list.span()) |node| { for (self.node_list.items) |node| {
try jw.arrayElem(); try jw.arrayElem();
try jw.beginObject(); try jw.beginObject();
@ -425,7 +425,7 @@ const Dump = struct {
try jw.objectField("files"); try jw.objectField("files");
try jw.beginArray(); try jw.beginArray();
for (self.file_list.span()) |file| { for (self.file_list.items) |file| {
try jw.arrayElem(); try jw.arrayElem();
try jw.emitString(file); try jw.emitString(file);
} }

View File

@ -325,7 +325,7 @@ pub fn main() !void {
}, },
.os = .linux, .os = .linux,
}; };
search: for (search_paths.span()) |search_path| { search: for (search_paths.items) |search_path| {
var sub_path: []const []const u8 = undefined; var sub_path: []const []const u8 = undefined;
switch (vendor) { switch (vendor) {
.musl => { .musl => {
@ -416,7 +416,7 @@ pub fn main() !void {
try contents_list.append(contents); try contents_list.append(contents);
} }
} }
std.sort.sort(*Contents, contents_list.span(), {}, Contents.hitCountLessThan); std.sort.sort(*Contents, contents_list.items, {}, Contents.hitCountLessThan);
const best_contents = contents_list.popOrNull().?; const best_contents = contents_list.popOrNull().?;
if (best_contents.hit_count > 1) { if (best_contents.hit_count > 1) {
// worth it to make it generic // worth it to make it generic

View File

@ -374,7 +374,7 @@ pub fn main() anyerror!void {
} }
// Some options have multiple matches. As an example, "-Wl,foo" matches both // Some options have multiple matches. As an example, "-Wl,foo" matches both
// "W" and "Wl,". So we sort this list in order of descending priority. // "W" and "Wl,". So we sort this list in order of descending priority.
std.sort.sort(*json.ObjectMap, all_objects.span(), {}, objectLessThan); std.sort.sort(*json.ObjectMap, all_objects.items, {}, objectLessThan);
var stdout_bos = std.io.bufferedOutStream(std.io.getStdOut().outStream()); var stdout_bos = std.io.bufferedOutStream(std.io.getStdOut().outStream());
const stdout = stdout_bos.outStream(); const stdout = stdout_bos.outStream();
@ -386,12 +386,12 @@ pub fn main() anyerror!void {
\\ \\
); );
for (all_objects.span()) |obj| { for (all_objects.items) |obj| {
const name = obj.get("Name").?.String; const name = obj.get("Name").?.String;
var pd1 = false; var pd1 = false;
var pd2 = false; var pd2 = false;
var pslash = false; var pslash = false;
for (obj.get("Prefixes").?.Array.span()) |prefix_json| { for (obj.get("Prefixes").?.Array.items) |prefix_json| {
const prefix = prefix_json.String; const prefix = prefix_json.String;
if (std.mem.eql(u8, prefix, "-")) { if (std.mem.eql(u8, prefix, "-")) {
pd1 = true; pd1 = true;
@ -502,7 +502,7 @@ const Syntax = union(enum) {
fn objSyntax(obj: *json.ObjectMap) Syntax { fn objSyntax(obj: *json.ObjectMap) Syntax {
const num_args = @intCast(u8, obj.get("NumArgs").?.Integer); const num_args = @intCast(u8, obj.get("NumArgs").?.Integer);
for (obj.get("!superclasses").?.Array.span()) |superclass_json| { for (obj.get("!superclasses").?.Array.items) |superclass_json| {
const superclass = superclass_json.String; const superclass = superclass_json.String;
if (std.mem.eql(u8, superclass, "Joined")) { if (std.mem.eql(u8, superclass, "Joined")) {
return .joined; return .joined;
@ -548,7 +548,7 @@ fn objSyntax(obj: *json.ObjectMap) Syntax {
} }
const key = obj.get("!name").?.String; const key = obj.get("!name").?.String;
std.debug.warn("{} (key {}) has unrecognized superclasses:\n", .{ name, key }); std.debug.warn("{} (key {}) has unrecognized superclasses:\n", .{ name, key });
for (obj.get("!superclasses").?.Array.span()) |superclass_json| { for (obj.get("!superclasses").?.Array.items) |superclass_json| {
std.debug.warn(" {}\n", .{superclass_json.String}); std.debug.warn(" {}\n", .{superclass_json.String});
} }
std.process.exit(1); std.process.exit(1);

View File

@ -225,15 +225,15 @@ pub fn main() !void {
var list = std.ArrayList([]const u8).init(allocator); var list = std.ArrayList([]const u8).init(allocator);
var it = global_fn_set.iterator(); var it = global_fn_set.iterator();
while (it.next()) |entry| try list.append(entry.key); while (it.next()) |entry| try list.append(entry.key);
std.sort.sort([]const u8, list.span(), {}, strCmpLessThan); std.sort.sort([]const u8, list.items, {}, strCmpLessThan);
break :blk list.span(); break :blk list.items;
}; };
const global_ver_list = blk: { const global_ver_list = blk: {
var list = std.ArrayList([]const u8).init(allocator); var list = std.ArrayList([]const u8).init(allocator);
var it = global_ver_set.iterator(); var it = global_ver_set.iterator();
while (it.next()) |entry| try list.append(entry.key); while (it.next()) |entry| try list.append(entry.key);
std.sort.sort([]const u8, list.span(), {}, versionLessThan); std.sort.sort([]const u8, list.items, {}, versionLessThan);
break :blk list.span(); break :blk list.items;
}; };
{ {
const vers_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "vers.txt" }); const vers_txt_path = try fs.path.join(allocator, &[_][]const u8{ glibc_out_dir, "vers.txt" });
@ -266,13 +266,13 @@ pub fn main() !void {
for (abi_lists) |*abi_list, abi_index| { for (abi_lists) |*abi_list, abi_index| {
const entry = target_functions.getEntry(@ptrToInt(abi_list)).?; const entry = target_functions.getEntry(@ptrToInt(abi_list)).?;
const fn_vers_list = &entry.value.fn_vers_list; const fn_vers_list = &entry.value.fn_vers_list;
for (entry.value.list.span()) |*ver_fn| { for (entry.value.list.items) |*ver_fn| {
const gop = try fn_vers_list.getOrPut(ver_fn.name); const gop = try fn_vers_list.getOrPut(ver_fn.name);
if (!gop.found_existing) { if (!gop.found_existing) {
gop.entry.value = std.ArrayList(usize).init(allocator); gop.entry.value = std.ArrayList(usize).init(allocator);
} }
const ver_index = global_ver_set.getEntry(ver_fn.ver).?.value; const ver_index = global_ver_set.getEntry(ver_fn.ver).?.value;
if (std.mem.indexOfScalar(usize, gop.entry.value.span(), ver_index) == null) { if (std.mem.indexOfScalar(usize, gop.entry.value.items, ver_index) == null) {
try gop.entry.value.append(ver_index); try gop.entry.value.append(ver_index);
} }
} }
@ -299,7 +299,7 @@ pub fn main() !void {
try abilist_txt.writeByte('\n'); try abilist_txt.writeByte('\n');
continue; continue;
}; };
for (entry.value.span()) |ver_index, it_i| { for (entry.value.items) |ver_index, it_i| {
if (it_i != 0) try abilist_txt.writeByte(' '); if (it_i != 0) try abilist_txt.writeByte(' ');
try abilist_txt.print("{d}", .{ver_index}); try abilist_txt.print("{d}", .{ver_index});
} }