mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 06:13:07 +00:00
update langref and docs to avoid GenericWriter
This commit is contained in:
parent
8023f3dceb
commit
9b47dd2028
@ -6,6 +6,7 @@ const gpa = std.heap.wasm_allocator;
|
||||
const assert = std.debug.assert;
|
||||
const log = std.log;
|
||||
const Oom = error{OutOfMemory};
|
||||
const ArrayList = std.ArrayList;
|
||||
|
||||
ast_node: Ast.Node.Index,
|
||||
file: Walk.File.Index,
|
||||
@ -189,7 +190,7 @@ pub fn lookup(decl: *const Decl, name: []const u8) ?Decl.Index {
|
||||
}
|
||||
|
||||
/// Appends the fully qualified name to `out`.
|
||||
pub fn fqn(decl: *const Decl, out: *std.ArrayListUnmanaged(u8)) Oom!void {
|
||||
pub fn fqn(decl: *const Decl, out: *ArrayList(u8)) Oom!void {
|
||||
try decl.append_path(out);
|
||||
if (decl.parent != .none) {
|
||||
try append_parent_ns(out, decl.parent);
|
||||
@ -199,12 +200,12 @@ pub fn fqn(decl: *const Decl, out: *std.ArrayListUnmanaged(u8)) Oom!void {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reset_with_path(decl: *const Decl, list: *std.ArrayListUnmanaged(u8)) Oom!void {
|
||||
pub fn reset_with_path(decl: *const Decl, list: *ArrayList(u8)) Oom!void {
|
||||
list.clearRetainingCapacity();
|
||||
try append_path(decl, list);
|
||||
}
|
||||
|
||||
pub fn append_path(decl: *const Decl, list: *std.ArrayListUnmanaged(u8)) Oom!void {
|
||||
pub fn append_path(decl: *const Decl, list: *ArrayList(u8)) Oom!void {
|
||||
const start = list.items.len;
|
||||
// Prefer the module name alias.
|
||||
for (Walk.modules.keys(), Walk.modules.values()) |pkg_name, pkg_file| {
|
||||
@ -230,7 +231,7 @@ pub fn append_path(decl: *const Decl, list: *std.ArrayListUnmanaged(u8)) Oom!voi
|
||||
}
|
||||
}
|
||||
|
||||
pub fn append_parent_ns(list: *std.ArrayListUnmanaged(u8), parent: Decl.Index) Oom!void {
|
||||
pub fn append_parent_ns(list: *ArrayList(u8), parent: Decl.Index) Oom!void {
|
||||
assert(parent != .none);
|
||||
const decl = parent.get();
|
||||
if (decl.parent != .none) {
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
const std = @import("std");
|
||||
const Ast = std.zig.Ast;
|
||||
const assert = std.debug.assert;
|
||||
const ArrayList = std.ArrayList;
|
||||
const Writer = std.Io.Writer;
|
||||
|
||||
const Walk = @import("Walk");
|
||||
const Decl = Walk.Decl;
|
||||
@ -30,7 +32,7 @@ pub const Annotation = struct {
|
||||
|
||||
pub fn fileSourceHtml(
|
||||
file_index: Walk.File.Index,
|
||||
out: *std.ArrayListUnmanaged(u8),
|
||||
out: *ArrayList(u8),
|
||||
root_node: Ast.Node.Index,
|
||||
options: RenderSourceOptions,
|
||||
) !void {
|
||||
@ -38,7 +40,7 @@ pub fn fileSourceHtml(
|
||||
const file = file_index.get();
|
||||
|
||||
const g = struct {
|
||||
var field_access_buffer: std.ArrayListUnmanaged(u8) = .empty;
|
||||
var field_access_buffer: ArrayList(u8) = .empty;
|
||||
};
|
||||
|
||||
const start_token = ast.firstToken(root_node);
|
||||
@ -88,7 +90,7 @@ pub fn fileSourceHtml(
|
||||
if (next_annotate_index >= options.source_location_annotations.len) break;
|
||||
const next_annotation = options.source_location_annotations[next_annotate_index];
|
||||
if (cursor <= next_annotation.file_byte_offset) break;
|
||||
try out.writer(gpa).print("<span id=\"{s}{d}\"></span>", .{
|
||||
try out.print(gpa, "<span id=\"{s}{d}\"></span>", .{
|
||||
options.annotation_prefix, next_annotation.dom_id,
|
||||
});
|
||||
next_annotate_index += 1;
|
||||
@ -318,7 +320,7 @@ pub fn fileSourceHtml(
|
||||
}
|
||||
}
|
||||
|
||||
fn appendUnindented(out: *std.ArrayListUnmanaged(u8), s: []const u8, indent: usize) !void {
|
||||
fn appendUnindented(out: *ArrayList(u8), s: []const u8, indent: usize) !void {
|
||||
var it = std.mem.splitScalar(u8, s, '\n');
|
||||
var is_first_line = true;
|
||||
while (it.next()) |line| {
|
||||
@ -332,7 +334,7 @@ fn appendUnindented(out: *std.ArrayListUnmanaged(u8), s: []const u8, indent: usi
|
||||
}
|
||||
}
|
||||
|
||||
pub fn appendEscaped(out: *std.ArrayListUnmanaged(u8), s: []const u8) !void {
|
||||
pub fn appendEscaped(out: *ArrayList(u8), s: []const u8) !void {
|
||||
for (s) |c| {
|
||||
try out.ensureUnusedCapacity(gpa, 6);
|
||||
switch (c) {
|
||||
@ -347,7 +349,7 @@ pub fn appendEscaped(out: *std.ArrayListUnmanaged(u8), s: []const u8) !void {
|
||||
|
||||
fn walkFieldAccesses(
|
||||
file_index: Walk.File.Index,
|
||||
out: *std.ArrayListUnmanaged(u8),
|
||||
out: *ArrayList(u8),
|
||||
node: Ast.Node.Index,
|
||||
) Oom!void {
|
||||
const ast = file_index.get_ast();
|
||||
@ -371,7 +373,7 @@ fn walkFieldAccesses(
|
||||
|
||||
fn resolveIdentLink(
|
||||
file_index: Walk.File.Index,
|
||||
out: *std.ArrayListUnmanaged(u8),
|
||||
out: *ArrayList(u8),
|
||||
ident_token: Ast.TokenIndex,
|
||||
) Oom!void {
|
||||
const decl_index = file_index.get().lookup_token(ident_token);
|
||||
@ -391,7 +393,7 @@ fn unindent(s: []const u8, indent: usize) []const u8 {
|
||||
return s[indent_idx..];
|
||||
}
|
||||
|
||||
pub fn resolveDeclLink(decl_index: Decl.Index, out: *std.ArrayListUnmanaged(u8)) Oom!void {
|
||||
pub fn resolveDeclLink(decl_index: Decl.Index, out: *ArrayList(u8)) Oom!void {
|
||||
const decl = decl_index.get();
|
||||
switch (decl.categorize()) {
|
||||
.alias => |alias_decl| try alias_decl.get().fqn(out),
|
||||
|
||||
@ -5,6 +5,8 @@ const Ast = std.zig.Ast;
|
||||
const Walk = @import("Walk");
|
||||
const markdown = @import("markdown.zig");
|
||||
const Decl = Walk.Decl;
|
||||
const ArrayList = std.ArrayList;
|
||||
const Writer = std.Io.Writer;
|
||||
|
||||
const fileSourceHtml = @import("html_render.zig").fileSourceHtml;
|
||||
const appendEscaped = @import("html_render.zig").appendEscaped;
|
||||
@ -66,8 +68,8 @@ export fn unpack(tar_ptr: [*]u8, tar_len: usize) void {
|
||||
};
|
||||
}
|
||||
|
||||
var query_string: std.ArrayListUnmanaged(u8) = .empty;
|
||||
var query_results: std.ArrayListUnmanaged(Decl.Index) = .empty;
|
||||
var query_string: ArrayList(u8) = .empty;
|
||||
var query_results: ArrayList(Decl.Index) = .empty;
|
||||
|
||||
/// Resizes the query string to be the correct length; returns the pointer to
|
||||
/// the query string.
|
||||
@ -99,11 +101,11 @@ fn query_exec_fallible(query: []const u8, ignore_case: bool) !void {
|
||||
segments: u16,
|
||||
};
|
||||
const g = struct {
|
||||
var full_path_search_text: std.ArrayListUnmanaged(u8) = .empty;
|
||||
var full_path_search_text_lower: std.ArrayListUnmanaged(u8) = .empty;
|
||||
var doc_search_text: std.ArrayListUnmanaged(u8) = .empty;
|
||||
var full_path_search_text: ArrayList(u8) = .empty;
|
||||
var full_path_search_text_lower: ArrayList(u8) = .empty;
|
||||
var doc_search_text: ArrayList(u8) = .empty;
|
||||
/// Each element matches a corresponding query_results element.
|
||||
var scores: std.ArrayListUnmanaged(Score) = .empty;
|
||||
var scores: ArrayList(Score) = .empty;
|
||||
};
|
||||
|
||||
// First element stores the size of the list.
|
||||
@ -234,7 +236,7 @@ const ErrorIdentifier = packed struct(u64) {
|
||||
return ast.tokenTag(token_index - 1) == .doc_comment;
|
||||
}
|
||||
|
||||
fn html(ei: ErrorIdentifier, base_decl: Decl.Index, out: *std.ArrayListUnmanaged(u8)) Oom!void {
|
||||
fn html(ei: ErrorIdentifier, base_decl: Decl.Index, out: *ArrayList(u8)) Oom!void {
|
||||
const decl_index = ei.decl_index;
|
||||
const ast = decl_index.get().file.get_ast();
|
||||
const name = ast.tokenSlice(ei.token_index);
|
||||
@ -260,7 +262,7 @@ const ErrorIdentifier = packed struct(u64) {
|
||||
}
|
||||
};
|
||||
|
||||
var string_result: std.ArrayListUnmanaged(u8) = .empty;
|
||||
var string_result: ArrayList(u8) = .empty;
|
||||
var error_set_result: std.StringArrayHashMapUnmanaged(ErrorIdentifier) = .empty;
|
||||
|
||||
export fn decl_error_set(decl_index: Decl.Index) Slice(ErrorIdentifier) {
|
||||
@ -411,7 +413,7 @@ fn decl_fields_fallible(decl_index: Decl.Index) ![]Ast.Node.Index {
|
||||
|
||||
fn ast_decl_fields_fallible(ast: *Ast, ast_index: Ast.Node.Index) ![]Ast.Node.Index {
|
||||
const g = struct {
|
||||
var result: std.ArrayListUnmanaged(Ast.Node.Index) = .empty;
|
||||
var result: ArrayList(Ast.Node.Index) = .empty;
|
||||
};
|
||||
g.result.clearRetainingCapacity();
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
@ -429,7 +431,7 @@ fn ast_decl_fields_fallible(ast: *Ast, ast_index: Ast.Node.Index) ![]Ast.Node.In
|
||||
|
||||
fn decl_params_fallible(decl_index: Decl.Index) ![]Ast.Node.Index {
|
||||
const g = struct {
|
||||
var result: std.ArrayListUnmanaged(Ast.Node.Index) = .empty;
|
||||
var result: ArrayList(Ast.Node.Index) = .empty;
|
||||
};
|
||||
g.result.clearRetainingCapacity();
|
||||
const decl = decl_index.get();
|
||||
@ -460,7 +462,7 @@ export fn decl_param_html(decl_index: Decl.Index, param_node: Ast.Node.Index) St
|
||||
}
|
||||
|
||||
fn decl_field_html_fallible(
|
||||
out: *std.ArrayListUnmanaged(u8),
|
||||
out: *ArrayList(u8),
|
||||
decl_index: Decl.Index,
|
||||
field_node: Ast.Node.Index,
|
||||
) !void {
|
||||
@ -480,7 +482,7 @@ fn decl_field_html_fallible(
|
||||
}
|
||||
|
||||
fn decl_param_html_fallible(
|
||||
out: *std.ArrayListUnmanaged(u8),
|
||||
out: *ArrayList(u8),
|
||||
decl_index: Decl.Index,
|
||||
param_node: Ast.Node.Index,
|
||||
) !void {
|
||||
@ -649,7 +651,7 @@ export fn decl_docs_html(decl_index: Decl.Index, short: bool) String {
|
||||
}
|
||||
|
||||
fn collect_docs(
|
||||
list: *std.ArrayListUnmanaged(u8),
|
||||
list: *ArrayList(u8),
|
||||
ast: *const Ast,
|
||||
first_doc_comment: Ast.TokenIndex,
|
||||
) Oom!void {
|
||||
@ -667,7 +669,7 @@ fn collect_docs(
|
||||
}
|
||||
|
||||
fn render_docs(
|
||||
out: *std.ArrayListUnmanaged(u8),
|
||||
out: *ArrayList(u8),
|
||||
decl_index: Decl.Index,
|
||||
first_doc_comment: Ast.TokenIndex,
|
||||
short: bool,
|
||||
@ -691,11 +693,10 @@ fn render_docs(
|
||||
defer parsed_doc.deinit(gpa);
|
||||
|
||||
const g = struct {
|
||||
var link_buffer: std.ArrayListUnmanaged(u8) = .empty;
|
||||
var link_buffer: ArrayList(u8) = .empty;
|
||||
};
|
||||
|
||||
const Writer = std.ArrayListUnmanaged(u8).Writer;
|
||||
const Renderer = markdown.Renderer(Writer, Decl.Index);
|
||||
const Renderer = markdown.Renderer(Decl.Index);
|
||||
const renderer: Renderer = .{
|
||||
.context = decl_index,
|
||||
.renderFn = struct {
|
||||
@ -703,8 +704,8 @@ fn render_docs(
|
||||
r: Renderer,
|
||||
doc: markdown.Document,
|
||||
node: markdown.Document.Node.Index,
|
||||
writer: Writer,
|
||||
) !void {
|
||||
writer: *Writer,
|
||||
) Writer.Error!void {
|
||||
const data = doc.nodes.items(.data)[@intFromEnum(node)];
|
||||
switch (doc.nodes.items(.tag)[@intFromEnum(node)]) {
|
||||
.code_span => {
|
||||
@ -712,7 +713,7 @@ fn render_docs(
|
||||
const content = doc.string(data.text.content);
|
||||
if (resolve_decl_path(r.context, content)) |resolved_decl_index| {
|
||||
g.link_buffer.clearRetainingCapacity();
|
||||
try resolveDeclLink(resolved_decl_index, &g.link_buffer);
|
||||
resolveDeclLink(resolved_decl_index, &g.link_buffer) catch return error.WriteFailed;
|
||||
|
||||
try writer.writeAll("<a href=\"#");
|
||||
_ = missing_feature_url_escape;
|
||||
@ -730,7 +731,12 @@ fn render_docs(
|
||||
}
|
||||
}.render,
|
||||
};
|
||||
try renderer.render(parsed_doc, out.writer(gpa));
|
||||
|
||||
var allocating = Writer.Allocating.fromArrayList(gpa, out);
|
||||
defer out.* = allocating.toArrayList();
|
||||
renderer.render(parsed_doc, &allocating.writer) catch |err| switch (err) {
|
||||
error.WriteFailed => return error.OutOfMemory,
|
||||
};
|
||||
}
|
||||
|
||||
fn resolve_decl_path(decl_index: Decl.Index, path: []const u8) ?Decl.Index {
|
||||
@ -827,7 +833,7 @@ export fn find_module_root(pkg: Walk.ModuleIndex) Decl.Index {
|
||||
}
|
||||
|
||||
/// Set by `set_input_string`.
|
||||
var input_string: std.ArrayListUnmanaged(u8) = .empty;
|
||||
var input_string: ArrayList(u8) = .empty;
|
||||
|
||||
export fn set_input_string(len: usize) [*]u8 {
|
||||
input_string.resize(gpa, len) catch @panic("OOM");
|
||||
@ -849,7 +855,7 @@ export fn find_decl() Decl.Index {
|
||||
if (result != .none) return result;
|
||||
|
||||
const g = struct {
|
||||
var match_fqn: std.ArrayListUnmanaged(u8) = .empty;
|
||||
var match_fqn: ArrayList(u8) = .empty;
|
||||
};
|
||||
for (Walk.decls.items, 0..) |*decl, decl_index| {
|
||||
g.match_fqn.clearRetainingCapacity();
|
||||
@ -905,7 +911,7 @@ export fn type_fn_members(parent: Decl.Index, include_private: bool) Slice(Decl.
|
||||
|
||||
export fn namespace_members(parent: Decl.Index, include_private: bool) Slice(Decl.Index) {
|
||||
const g = struct {
|
||||
var members: std.ArrayListUnmanaged(Decl.Index) = .empty;
|
||||
var members: ArrayList(Decl.Index) = .empty;
|
||||
};
|
||||
|
||||
g.members.clearRetainingCapacity();
|
||||
|
||||
@ -2,25 +2,26 @@ const std = @import("std");
|
||||
const Document = @import("Document.zig");
|
||||
const Node = Document.Node;
|
||||
const assert = std.debug.assert;
|
||||
const Writer = std.Io.Writer;
|
||||
|
||||
/// A Markdown document renderer.
|
||||
///
|
||||
/// Each concrete `Renderer` type has a `renderDefault` function, with the
|
||||
/// intention that custom `renderFn` implementations can call `renderDefault`
|
||||
/// for node types for which they require no special rendering.
|
||||
pub fn Renderer(comptime Writer: type, comptime Context: type) type {
|
||||
pub fn Renderer(comptime Context: type) type {
|
||||
return struct {
|
||||
renderFn: *const fn (
|
||||
r: Self,
|
||||
doc: Document,
|
||||
node: Node.Index,
|
||||
writer: Writer,
|
||||
writer: *Writer,
|
||||
) Writer.Error!void = renderDefault,
|
||||
context: Context,
|
||||
|
||||
const Self = @This();
|
||||
|
||||
pub fn render(r: Self, doc: Document, writer: Writer) Writer.Error!void {
|
||||
pub fn render(r: Self, doc: Document, writer: *Writer) Writer.Error!void {
|
||||
try r.renderFn(r, doc, .root, writer);
|
||||
}
|
||||
|
||||
@ -28,7 +29,7 @@ pub fn Renderer(comptime Writer: type, comptime Context: type) type {
|
||||
r: Self,
|
||||
doc: Document,
|
||||
node: Node.Index,
|
||||
writer: Writer,
|
||||
writer: *Writer,
|
||||
) Writer.Error!void {
|
||||
const data = doc.nodes.items(.data)[@intFromEnum(node)];
|
||||
switch (doc.nodes.items(.tag)[@intFromEnum(node)]) {
|
||||
@ -188,8 +189,8 @@ pub fn Renderer(comptime Writer: type, comptime Context: type) type {
|
||||
pub fn renderInlineNodeText(
|
||||
doc: Document,
|
||||
node: Node.Index,
|
||||
writer: anytype,
|
||||
) @TypeOf(writer).Error!void {
|
||||
writer: *Writer,
|
||||
) Writer.Error!void {
|
||||
const data = doc.nodes.items(.data)[@intFromEnum(node)];
|
||||
switch (doc.nodes.items(.tag)[@intFromEnum(node)]) {
|
||||
.root,
|
||||
@ -234,14 +235,12 @@ pub fn fmtHtml(bytes: []const u8) std.fmt.Formatter([]const u8, formatHtml) {
|
||||
return .{ .data = bytes };
|
||||
}
|
||||
|
||||
fn formatHtml(bytes: []const u8, writer: *std.io.Writer) std.io.Writer.Error!void {
|
||||
for (bytes) |b| {
|
||||
switch (b) {
|
||||
'<' => try writer.writeAll("<"),
|
||||
'>' => try writer.writeAll(">"),
|
||||
'&' => try writer.writeAll("&"),
|
||||
'"' => try writer.writeAll("""),
|
||||
else => try writer.writeByte(b),
|
||||
}
|
||||
}
|
||||
fn formatHtml(bytes: []const u8, w: *Writer) Writer.Error!void {
|
||||
for (bytes) |b| switch (b) {
|
||||
'<' => try w.writeAll("<"),
|
||||
'>' => try w.writeAll(">"),
|
||||
'&' => try w.writeAll("&"),
|
||||
'"' => try w.writeAll("""),
|
||||
else => try w.writeByte(b),
|
||||
};
|
||||
}
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const io = std.io;
|
||||
const fs = std.fs;
|
||||
const process = std.process;
|
||||
const Progress = std.Progress;
|
||||
@ -8,8 +7,10 @@ const print = std.debug.print;
|
||||
const mem = std.mem;
|
||||
const testing = std.testing;
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ArrayList = std.ArrayList;
|
||||
const getExternalExecutor = std.zig.system.getExternalExecutor;
|
||||
const fatal = std.process.fatal;
|
||||
const Writer = std.Io.Writer;
|
||||
|
||||
const max_doc_file_size = 10 * 1024 * 1024;
|
||||
|
||||
@ -344,10 +345,10 @@ fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc {
|
||||
var last_action: Action = .open;
|
||||
var last_columns: ?u8 = null;
|
||||
|
||||
var toc_buf = std.array_list.Managed(u8).init(allocator);
|
||||
var toc_buf: Writer.Allocating = .init(allocator);
|
||||
defer toc_buf.deinit();
|
||||
|
||||
var toc = toc_buf.writer();
|
||||
const toc = &toc_buf.writer;
|
||||
|
||||
var nodes = std.array_list.Managed(Node).init(allocator);
|
||||
defer nodes.deinit();
|
||||
@ -422,7 +423,7 @@ fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc {
|
||||
}
|
||||
if (last_action == .open) {
|
||||
try toc.writeByte('\n');
|
||||
try toc.writeByteNTimes(' ', header_stack_size * 4);
|
||||
try toc.splatByteAll(' ', header_stack_size * 4);
|
||||
if (last_columns) |n| {
|
||||
try toc.print("<ul style=\"columns: {d}\">\n", .{n});
|
||||
} else {
|
||||
@ -432,7 +433,7 @@ fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc {
|
||||
last_action = .open;
|
||||
}
|
||||
last_columns = columns;
|
||||
try toc.writeByteNTimes(' ', 4 + header_stack_size * 4);
|
||||
try toc.splatByteAll(' ', 4 + header_stack_size * 4);
|
||||
try toc.print("<li><a id=\"toc-{s}\" href=\"#{s}\">{s}</a>", .{ urlized, urlized, content });
|
||||
} else if (mem.eql(u8, tag_name, "header_close")) {
|
||||
if (header_stack_size == 0) {
|
||||
@ -442,7 +443,7 @@ fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc {
|
||||
_ = try eatToken(tokenizer, .bracket_close);
|
||||
|
||||
if (last_action == .close) {
|
||||
try toc.writeByteNTimes(' ', 8 + header_stack_size * 4);
|
||||
try toc.splatByteAll(' ', 8 + header_stack_size * 4);
|
||||
try toc.writeAll("</ul></li>\n");
|
||||
} else {
|
||||
try toc.writeAll("</li>\n");
|
||||
@ -591,30 +592,29 @@ fn genToc(allocator: Allocator, tokenizer: *Tokenizer) !Toc {
|
||||
}
|
||||
}
|
||||
|
||||
return Toc{
|
||||
return .{
|
||||
.nodes = try nodes.toOwnedSlice(),
|
||||
.toc = try toc_buf.toOwnedSlice(),
|
||||
.urls = urls,
|
||||
};
|
||||
}
|
||||
|
||||
fn urlize(allocator: Allocator, input: []const u8) ![]u8 {
|
||||
var buf = std.array_list.Managed(u8).init(allocator);
|
||||
defer buf.deinit();
|
||||
fn urlize(gpa: Allocator, input: []const u8) ![]u8 {
|
||||
var buf: ArrayList(u8) = .empty;
|
||||
defer buf.deinit(gpa);
|
||||
|
||||
const out = buf.writer();
|
||||
for (input) |c| {
|
||||
switch (c) {
|
||||
'a'...'z', 'A'...'Z', '_', '-', '0'...'9' => {
|
||||
try out.writeByte(c);
|
||||
try buf.append(gpa, c);
|
||||
},
|
||||
' ' => {
|
||||
try out.writeByte('-');
|
||||
try buf.append(gpa, '-');
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
return try buf.toOwnedSlice();
|
||||
return try buf.toOwnedSlice(gpa);
|
||||
}
|
||||
|
||||
fn escapeHtml(allocator: Allocator, input: []const u8) ![]u8 {
|
||||
@ -626,7 +626,7 @@ fn escapeHtml(allocator: Allocator, input: []const u8) ![]u8 {
|
||||
return try buf.toOwnedSlice();
|
||||
}
|
||||
|
||||
fn writeEscaped(out: anytype, input: []const u8) !void {
|
||||
fn writeEscaped(out: *Writer, input: []const u8) !void {
|
||||
for (input) |c| {
|
||||
try switch (c) {
|
||||
'&' => out.writeAll("&"),
|
||||
@ -662,14 +662,14 @@ fn isType(name: []const u8) bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
fn writeEscapedLines(out: anytype, text: []const u8) !void {
|
||||
fn writeEscapedLines(out: *Writer, text: []const u8) !void {
|
||||
return writeEscaped(out, text);
|
||||
}
|
||||
|
||||
fn tokenizeAndPrintRaw(
|
||||
allocator: Allocator,
|
||||
docgen_tokenizer: *Tokenizer,
|
||||
out: anytype,
|
||||
out: *Writer,
|
||||
source_token: Token,
|
||||
raw_src: []const u8,
|
||||
) !void {
|
||||
@ -907,14 +907,14 @@ fn tokenizeAndPrintRaw(
|
||||
fn tokenizeAndPrint(
|
||||
allocator: Allocator,
|
||||
docgen_tokenizer: *Tokenizer,
|
||||
out: anytype,
|
||||
out: *Writer,
|
||||
source_token: Token,
|
||||
) !void {
|
||||
const raw_src = docgen_tokenizer.buffer[source_token.start..source_token.end];
|
||||
return tokenizeAndPrintRaw(allocator, docgen_tokenizer, out, source_token, raw_src);
|
||||
}
|
||||
|
||||
fn printSourceBlock(allocator: Allocator, docgen_tokenizer: *Tokenizer, out: anytype, syntax_block: SyntaxBlock) !void {
|
||||
fn printSourceBlock(allocator: Allocator, docgen_tokenizer: *Tokenizer, out: *Writer, syntax_block: SyntaxBlock) !void {
|
||||
const source_type = @tagName(syntax_block.source_type);
|
||||
|
||||
try out.print("<figure><figcaption class=\"{s}-cap\"><cite class=\"file\">{s}</cite></figcaption><pre>", .{ source_type, syntax_block.name });
|
||||
@ -932,7 +932,7 @@ fn printSourceBlock(allocator: Allocator, docgen_tokenizer: *Tokenizer, out: any
|
||||
try out.writeAll("</pre></figure>");
|
||||
}
|
||||
|
||||
fn printShell(out: anytype, shell_content: []const u8, escape: bool) !void {
|
||||
fn printShell(out: *Writer, shell_content: []const u8, escape: bool) !void {
|
||||
const trimmed_shell_content = mem.trim(u8, shell_content, " \r\n");
|
||||
try out.writeAll("<figure><figcaption class=\"shell-cap\">Shell</figcaption><pre><samp>");
|
||||
var cmd_cont: bool = false;
|
||||
@ -984,7 +984,7 @@ fn genHtml(
|
||||
tokenizer: *Tokenizer,
|
||||
toc: *Toc,
|
||||
code_dir: std.fs.Dir,
|
||||
out: anytype,
|
||||
out: *Writer,
|
||||
) !void {
|
||||
for (toc.nodes) |node| {
|
||||
switch (node) {
|
||||
|
||||
@ -127,9 +127,9 @@ fn printOutput(
|
||||
const obj_ext = builtin.object_format.fileExt(builtin.cpu.arch);
|
||||
const print = std.debug.print;
|
||||
|
||||
var shell_buffer = std.array_list.Managed(u8).init(arena);
|
||||
var shell_buffer: std.Io.Writer.Allocating = .init(arena);
|
||||
defer shell_buffer.deinit();
|
||||
var shell_out = shell_buffer.writer();
|
||||
const shell_out = &shell_buffer.writer;
|
||||
|
||||
const code_name = std.fs.path.stem(input_path);
|
||||
|
||||
@ -600,7 +600,7 @@ fn printOutput(
|
||||
}
|
||||
|
||||
if (!code.just_check_syntax) {
|
||||
try printShell(out, shell_buffer.items, false);
|
||||
try printShell(out, shell_buffer.written(), false);
|
||||
}
|
||||
}
|
||||
|
||||
@ -975,26 +975,22 @@ fn skipPrefix(line: []const u8) []const u8 {
|
||||
return line[3..];
|
||||
}
|
||||
|
||||
fn escapeHtml(allocator: Allocator, input: []const u8) ![]u8 {
|
||||
var buf = std.array_list.Managed(u8).init(allocator);
|
||||
defer buf.deinit();
|
||||
|
||||
const out = buf.writer();
|
||||
try writeEscaped(out, input);
|
||||
return try buf.toOwnedSlice();
|
||||
fn escapeHtml(gpa: Allocator, input: []const u8) ![]u8 {
|
||||
var allocating: Writer.Allocating = .init(gpa);
|
||||
defer allocating.deinit();
|
||||
try writeEscaped(&allocating.writer, input);
|
||||
return allocating.toOwnedSlice();
|
||||
}
|
||||
|
||||
fn writeEscaped(out: *Writer, input: []const u8) !void {
|
||||
for (input) |c| {
|
||||
try switch (c) {
|
||||
'&' => out.writeAll("&"),
|
||||
'<' => out.writeAll("<"),
|
||||
'>' => out.writeAll(">"),
|
||||
'"' => out.writeAll("""),
|
||||
else => out.writeByte(c),
|
||||
fn writeEscaped(w: *Writer, input: []const u8) !void {
|
||||
for (input) |c| try switch (c) {
|
||||
'&' => w.writeAll("&"),
|
||||
'<' => w.writeAll("<"),
|
||||
'>' => w.writeAll(">"),
|
||||
'"' => w.writeAll("""),
|
||||
else => w.writeByte(c),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn termColor(allocator: Allocator, input: []const u8) ![]u8 {
|
||||
// The SRG sequences generates by the Zig compiler are in the format:
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user