const std = @import("std"); const log = std.log; const assert = std.debug.assert; const Ast = std.zig.Ast; const Walk = @import("Walk"); const markdown = @import("markdown.zig"); const Decl = Walk.Decl; const fileSourceHtml = @import("html_render.zig").fileSourceHtml; const appendEscaped = @import("html_render.zig").appendEscaped; const resolveDeclLink = @import("html_render.zig").resolveDeclLink; const missing_feature_url_escape = @import("html_render.zig").missing_feature_url_escape; const gpa = std.heap.wasm_allocator; const js = struct { /// Keep in sync with the `LOG_` constants in `main.js`. const LogLevel = enum(u8) { err, warn, info, debug, }; extern "js" fn log(level: LogLevel, ptr: [*]const u8, len: usize) void; }; pub const std_options: std.Options = .{ .logFn = logFn, //.log_level = .debug, }; pub fn panic(msg: []const u8, st: ?*std.builtin.StackTrace, addr: ?usize) noreturn { _ = st; _ = addr; log.err("panic: {s}", .{msg}); @trap(); } fn logFn( comptime message_level: log.Level, comptime scope: @TypeOf(.enum_literal), comptime format: []const u8, args: anytype, ) void { const prefix = if (scope == .default) "" else @tagName(scope) ++ ": "; var buf: [500]u8 = undefined; const line = std.fmt.bufPrint(&buf, prefix ++ format, args) catch l: { buf[buf.len - 3 ..][0..3].* = "...".*; break :l &buf; }; js.log(@field(js.LogLevel, @tagName(message_level)), line.ptr, line.len); } export fn alloc(n: usize) [*]u8 { const slice = gpa.alloc(u8, n) catch @panic("OOM"); return slice.ptr; } export fn unpack(tar_ptr: [*]u8, tar_len: usize) void { const tar_bytes = tar_ptr[0..tar_len]; //log.debug("received {d} bytes of tar file", .{tar_bytes.len}); unpackInner(tar_bytes) catch |err| { std.debug.panic("unable to unpack tar: {s}", .{@errorName(err)}); }; } var query_string: std.ArrayListUnmanaged(u8) = .empty; var query_results: std.ArrayListUnmanaged(Decl.Index) = .empty; /// Resizes the query string to be the correct length; returns the pointer to /// the query string. export fn query_begin(query_string_len: usize) [*]u8 { query_string.resize(gpa, query_string_len) catch @panic("OOM"); return query_string.items.ptr; } /// Executes the query. Returns the pointer to the query results which is an /// array of u32. /// The first element is the length of the array. /// Subsequent elements are Decl.Index values which are all public /// declarations. export fn query_exec(ignore_case: bool) [*]Decl.Index { const query = query_string.items; log.debug("querying '{s}'", .{query}); query_exec_fallible(query, ignore_case) catch |err| switch (err) { error.OutOfMemory => @panic("OOM"), }; query_results.items[0] = @enumFromInt(query_results.items.len - 1); return query_results.items.ptr; } const max_matched_items = 1000; fn query_exec_fallible(query: []const u8, ignore_case: bool) !void { const Score = packed struct(u32) { points: u16, segments: u16, }; const g = struct { var full_path_search_text: std.ArrayListUnmanaged(u8) = .empty; var full_path_search_text_lower: std.ArrayListUnmanaged(u8) = .empty; var doc_search_text: std.ArrayListUnmanaged(u8) = .empty; /// Each element matches a corresponding query_results element. var scores: std.ArrayListUnmanaged(Score) = .empty; }; // First element stores the size of the list. try query_results.resize(gpa, 1); // Corresponding point value is meaningless and therefore undefined. try g.scores.resize(gpa, 1); decl_loop: for (Walk.decls.items, 0..) |*decl, decl_index| { const info = decl.extra_info(); if (!info.is_pub) continue; try decl.reset_with_path(&g.full_path_search_text); if (decl.parent != .none) try Decl.append_parent_ns(&g.full_path_search_text, decl.parent); try g.full_path_search_text.appendSlice(gpa, info.name); try g.full_path_search_text_lower.resize(gpa, g.full_path_search_text.items.len); @memcpy(g.full_path_search_text_lower.items, g.full_path_search_text.items); const ast = decl.file.get_ast(); if (info.first_doc_comment.unwrap()) |first_doc_comment| { try collect_docs(&g.doc_search_text, ast, first_doc_comment); } if (ignore_case) { ascii_lower(g.full_path_search_text_lower.items); ascii_lower(g.doc_search_text.items); } var it = std.mem.tokenizeScalar(u8, query, ' '); var points: u16 = 0; var bypass_limit = false; while (it.next()) |term| { // exact, case sensitive match of full decl path if (std.mem.eql(u8, g.full_path_search_text.items, term)) { points += 4; bypass_limit = true; continue; } // exact, case sensitive match of just decl name if (std.mem.eql(u8, info.name, term)) { points += 3; bypass_limit = true; continue; } // substring, case insensitive match of full decl path if (std.mem.indexOf(u8, g.full_path_search_text_lower.items, term) != null) { points += 2; continue; } if (std.mem.indexOf(u8, g.doc_search_text.items, term) != null) { points += 1; continue; } continue :decl_loop; } if (query_results.items.len < max_matched_items or bypass_limit) { try query_results.append(gpa, @enumFromInt(decl_index)); try g.scores.append(gpa, .{ .points = points, .segments = @intCast(count_scalar(g.full_path_search_text.items, '.')), }); } } const sort_context: struct { pub fn swap(sc: @This(), a_index: usize, b_index: usize) void { _ = sc; std.mem.swap(Score, &g.scores.items[a_index], &g.scores.items[b_index]); std.mem.swap(Decl.Index, &query_results.items[a_index], &query_results.items[b_index]); } pub fn lessThan(sc: @This(), a_index: usize, b_index: usize) bool { _ = sc; const a_score = g.scores.items[a_index]; const b_score = g.scores.items[b_index]; if (b_score.points < a_score.points) { return true; } else if (b_score.points > a_score.points) { return false; } else if (a_score.segments < b_score.segments) { return true; } else if (a_score.segments > b_score.segments) { return false; } else { const a_decl = query_results.items[a_index]; const b_decl = query_results.items[b_index]; const a_file_path = a_decl.get().file.path(); const b_file_path = b_decl.get().file.path(); // This neglects to check the local namespace inside the file. return std.mem.lessThan(u8, b_file_path, a_file_path); } } } = .{}; std.mem.sortUnstableContext(1, query_results.items.len, sort_context); if (query_results.items.len > max_matched_items) query_results.shrinkRetainingCapacity(max_matched_items); } const String = Slice(u8); fn Slice(T: type) type { return packed struct(u64) { ptr: u32, len: u32, fn init(s: []const T) @This() { return .{ .ptr = @intFromPtr(s.ptr), .len = s.len, }; } }; } const ErrorIdentifier = packed struct(u64) { token_index: Ast.TokenIndex, decl_index: Decl.Index, fn hasDocs(ei: ErrorIdentifier) bool { const decl_index = ei.decl_index; const ast = decl_index.get().file.get_ast(); const token_index = ei.token_index; if (token_index == 0) return false; return ast.tokenTag(token_index - 1) == .doc_comment; } fn html(ei: ErrorIdentifier, base_decl: Decl.Index, out: *std.ArrayListUnmanaged(u8)) Oom!void { const decl_index = ei.decl_index; const ast = decl_index.get().file.get_ast(); const name = ast.tokenSlice(ei.token_index); const has_link = base_decl != decl_index; try out.appendSlice(gpa, "
");
try fileSourceHtml(decl.file, out, field_node, .{});
try out.appendSlice(gpa, "");
const field = ast.fullContainerField(field_node).?;
if (Decl.findFirstDocComment(ast, field.firstToken()).unwrap()) |first_doc_comment| {
try out.appendSlice(gpa, "");
try appendEscaped(out, name);
try out.appendSlice(gpa, ": ");
try fileSourceHtml(decl.file, out, param_node, .{});
try out.appendSlice(gpa, "");
if (ast.tokenTag(first_doc_comment) == .doc_comment) {
try out.appendSlice(gpa, "");
const content = doc.string(data.text.content);
if (resolve_decl_path(r.context, content)) |resolved_decl_index| {
g.link_buffer.clearRetainingCapacity();
try resolveDeclLink(resolved_decl_index, &g.link_buffer);
try writer.writeAll("{}", .{markdown.fmtHtml(content)});
} else {
try writer.print("{}", .{markdown.fmtHtml(content)});
}
try writer.writeAll("");
},
else => try Renderer.renderDefault(r, doc, node, writer),
}
}
}.render,
};
try renderer.render(parsed_doc, out.writer(gpa));
}
fn resolve_decl_path(decl_index: Decl.Index, path: []const u8) ?Decl.Index {
var path_components = std.mem.splitScalar(u8, path, '.');
var current_decl_index = decl_index.get().lookup(path_components.first()) orelse return null;
while (path_components.next()) |component| {
switch (current_decl_index.get().categorize()) {
.alias => |aliasee| current_decl_index = aliasee,
else => {},
}
current_decl_index = current_decl_index.get().get_child(component) orelse return null;
}
return current_decl_index;
}
export fn decl_type_html(decl_index: Decl.Index) String {
const decl = decl_index.get();
const ast = decl.file.get_ast();
string_result.clearRetainingCapacity();
t: {
// If there is an explicit type, use it.
if (ast.fullVarDecl(decl.ast_node)) |var_decl| {
if (var_decl.ast.type_node.unwrap()) |type_node| {
string_result.appendSlice(gpa, "") catch @panic("OOM");
fileSourceHtml(decl.file, &string_result, type_node, .{
.skip_comments = true,
.collapse_whitespace = true,
}) catch |e| {
std.debug.panic("unable to render html: {s}", .{@errorName(e)});
};
string_result.appendSlice(gpa, "") catch @panic("OOM");
break :t;
}
}
}
return String.init(string_result.items);
}
const Oom = error{OutOfMemory};
fn unpackInner(tar_bytes: []u8) !void {
var fbs = std.io.fixedBufferStream(tar_bytes);
var file_name_buffer: [1024]u8 = undefined;
var link_name_buffer: [1024]u8 = undefined;
var it = std.tar.iterator(fbs.reader(), .{
.file_name_buffer = &file_name_buffer,
.link_name_buffer = &link_name_buffer,
});
while (try it.next()) |tar_file| {
switch (tar_file.kind) {
.file => {
if (tar_file.size == 0 and tar_file.name.len == 0) break;
if (std.mem.endsWith(u8, tar_file.name, ".zig")) {
log.debug("found file: '{s}'", .{tar_file.name});
const file_name = try gpa.dupe(u8, tar_file.name);
if (std.mem.indexOfScalar(u8, file_name, '/')) |pkg_name_end| {
const pkg_name = file_name[0..pkg_name_end];
const gop = try Walk.modules.getOrPut(gpa, pkg_name);
const file: Walk.File.Index = @enumFromInt(Walk.files.entries.len);
if (!gop.found_existing or
std.mem.eql(u8, file_name[pkg_name_end..], "/root.zig") or
std.mem.eql(u8, file_name[pkg_name_end + 1 .. file_name.len - ".zig".len], pkg_name))
{
gop.value_ptr.* = file;
}
const file_bytes = tar_bytes[fbs.pos..][0..@intCast(tar_file.size)];
assert(file == try Walk.add_file(file_name, file_bytes));
}
} else {
log.warn("skipping: '{s}' - the tar creation should have done that", .{
tar_file.name,
});
}
},
else => continue,
}
}
}
fn ascii_lower(bytes: []u8) void {
for (bytes) |*b| b.* = std.ascii.toLower(b.*);
}
export fn module_name(index: u32) String {
const names = Walk.modules.keys();
return String.init(if (index >= names.len) "" else names[index]);
}
export fn find_module_root(pkg: Walk.ModuleIndex) Decl.Index {
const root_file = Walk.modules.values()[@intFromEnum(pkg)];
const result = root_file.findRootDecl();
assert(result != .none);
return result;
}
/// Set by `set_input_string`.
var input_string: std.ArrayListUnmanaged(u8) = .empty;
export fn set_input_string(len: usize) [*]u8 {
input_string.resize(gpa, len) catch @panic("OOM");
return input_string.items.ptr;
}
/// Looks up the root struct decl corresponding to a file by path.
/// Uses `input_string`.
export fn find_file_root() Decl.Index {
const file: Walk.File.Index = @enumFromInt(Walk.files.getIndex(input_string.items) orelse return .none);
return file.findRootDecl();
}
/// Uses `input_string`.
/// Tries to look up the Decl component-wise but then falls back to a file path
/// based scan.
export fn find_decl() Decl.Index {
const result = Decl.find(input_string.items);
if (result != .none) return result;
const g = struct {
var match_fqn: std.ArrayListUnmanaged(u8) = .empty;
};
for (Walk.decls.items, 0..) |*decl, decl_index| {
g.match_fqn.clearRetainingCapacity();
decl.fqn(&g.match_fqn) catch @panic("OOM");
if (std.mem.eql(u8, g.match_fqn.items, input_string.items)) {
//const path = @as(Decl.Index, @enumFromInt(decl_index)).get().file.path();
//log.debug("find_decl '{s}' found in {s}", .{ input_string.items, path });
return @enumFromInt(decl_index);
}
}
return .none;
}
/// Set only by `categorize_decl`; read only by `get_aliasee`, valid only
/// when `categorize_decl` returns `.alias`.
var global_aliasee: Decl.Index = .none;
export fn get_aliasee() Decl.Index {
return global_aliasee;
}
export fn categorize_decl(decl_index: Decl.Index, resolve_alias_count: usize) Walk.Category.Tag {
global_aliasee = .none;
var chase_alias_n = resolve_alias_count;
var decl = decl_index.get();
while (true) {
const result = decl.categorize();
switch (result) {
.alias => |new_index| {
assert(new_index != .none);
global_aliasee = new_index;
if (chase_alias_n > 0) {
chase_alias_n -= 1;
decl = new_index.get();
continue;
}
},
else => {},
}
return result;
}
}
export fn type_fn_members(parent: Decl.Index, include_private: bool) Slice(Decl.Index) {
const decl = parent.get();
// If the type function returns another type function, get the members of that function
if (decl.get_type_fn_return_type_fn()) |function_decl| {
return namespace_members(function_decl, include_private);
}
return namespace_members(parent, include_private);
}
export fn namespace_members(parent: Decl.Index, include_private: bool) Slice(Decl.Index) {
const g = struct {
var members: std.ArrayListUnmanaged(Decl.Index) = .empty;
};
g.members.clearRetainingCapacity();
for (Walk.decls.items, 0..) |*decl, i| {
if (decl.parent == parent) {
if (include_private or decl.is_pub()) {
g.members.append(gpa, @enumFromInt(i)) catch @panic("OOM");
}
}
}
return Slice(Decl.Index).init(g.members.items);
}
fn count_scalar(haystack: []const u8, needle: u8) usize {
var total: usize = 0;
for (haystack) |elem| {
if (elem == needle)
total += 1;
}
return total;
}