diff --git a/doc/docgen.zig b/doc/docgen.zig index 430d6eb1e2..6fb9b93e1d 100644 --- a/doc/docgen.zig +++ b/doc/docgen.zig @@ -215,32 +215,33 @@ const Tokenizer = struct { } }; -fn parseError(tokenizer: *Tokenizer, token: Token, comptime fmt: []const u8, args: ...) anyerror { +fn parseError(tokenizer: *Tokenizer, token: Token, comptime fmt: []const u8, args: var) anyerror { const loc = tokenizer.getTokenLocation(token); - warn("{}:{}:{}: error: " ++ fmt ++ "\n", tokenizer.source_file_name, loc.line + 1, loc.column + 1, args); + const args_prefix = .{ tokenizer.source_file_name, loc.line + 1, loc.column + 1 }; + warn("{}:{}:{}: error: " ++ fmt ++ "\n", args_prefix ++ args); if (loc.line_start <= loc.line_end) { - warn("{}\n", tokenizer.buffer[loc.line_start..loc.line_end]); + warn("{}\n", .{tokenizer.buffer[loc.line_start..loc.line_end]}); { var i: usize = 0; while (i < loc.column) : (i += 1) { - warn(" "); + warn(" ", .{}); } } { const caret_count = token.end - token.start; var i: usize = 0; while (i < caret_count) : (i += 1) { - warn("~"); + warn("~", .{}); } } - warn("\n"); + warn("\n", .{}); } return error.ParseError; } fn assertToken(tokenizer: *Tokenizer, token: Token, id: Token.Id) !void { if (token.id != id) { - return parseError(tokenizer, token, "expected {}, found {}", @tagName(id), @tagName(token.id)); + return parseError(tokenizer, token, "expected {}, found {}", .{ @tagName(id), @tagName(token.id) }); } } @@ -339,7 +340,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc { switch (token.id) { Token.Id.Eof => { if (header_stack_size != 0) { - return parseError(tokenizer, token, "unbalanced headers"); + return parseError(tokenizer, token, "unbalanced headers", .{}); } try toc.write(" \n"); break; @@ -373,10 +374,15 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc { if (mem.eql(u8, param, "3col")) { columns = 3; } else { - return parseError(tokenizer, bracket_tok, "unrecognized header_open param: {}", param); + return parseError( + tokenizer, + bracket_tok, + "unrecognized header_open param: {}", + .{param}, + ); } }, - else => return parseError(tokenizer, bracket_tok, "invalid header_open token"), + else => return parseError(tokenizer, bracket_tok, "invalid header_open token", .{}), } } @@ -391,15 +397,15 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc { }, }); if (try urls.put(urlized, tag_token)) |entry| { - parseError(tokenizer, tag_token, "duplicate header url: #{}", urlized) catch {}; - parseError(tokenizer, entry.value, "other tag here") catch {}; + parseError(tokenizer, tag_token, "duplicate header url: #{}", .{urlized}) catch {}; + parseError(tokenizer, entry.value, "other tag here", .{}) catch {}; return error.ParseError; } if (last_action == Action.Open) { try toc.writeByte('\n'); try toc.writeByteNTimes(' ', header_stack_size * 4); if (last_columns) |n| { - try toc.print("