mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 06:13:07 +00:00
delete the stage1 implementation of autodoc
This commit is contained in:
parent
8df1b91d17
commit
7d636f0f9d
@ -340,7 +340,6 @@ set(STAGE1_SOURCES
|
||||
"${CMAKE_SOURCE_DIR}/src/stage1/bigint.cpp"
|
||||
"${CMAKE_SOURCE_DIR}/src/stage1/buffer.cpp"
|
||||
"${CMAKE_SOURCE_DIR}/src/stage1/codegen.cpp"
|
||||
"${CMAKE_SOURCE_DIR}/src/stage1/dump_analysis.cpp"
|
||||
"${CMAKE_SOURCE_DIR}/src/stage1/errmsg.cpp"
|
||||
"${CMAKE_SOURCE_DIR}/src/stage1/error.cpp"
|
||||
"${CMAKE_SOURCE_DIR}/src/stage1/heap.cpp"
|
||||
|
||||
@ -5169,8 +5169,6 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
|
||||
const emit_asm_path = try stage1LocPath(arena, comp.emit_asm, directory);
|
||||
const emit_llvm_ir_path = try stage1LocPath(arena, comp.emit_llvm_ir, directory);
|
||||
const emit_llvm_bc_path = try stage1LocPath(arena, comp.emit_llvm_bc, directory);
|
||||
const emit_analysis_path = try stage1LocPath(arena, comp.emit_analysis, directory);
|
||||
const emit_docs_path = try stage1LocPath(arena, comp.emit_docs, directory);
|
||||
const stage1_pkg = try createStage1Pkg(arena, "root", mod.main_pkg, null);
|
||||
const test_filter = comp.test_filter orelse ""[0..0];
|
||||
const test_name_prefix = comp.test_name_prefix orelse ""[0..0];
|
||||
@ -5191,10 +5189,6 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
|
||||
.emit_llvm_ir_len = emit_llvm_ir_path.len,
|
||||
.emit_bitcode_ptr = emit_llvm_bc_path.ptr,
|
||||
.emit_bitcode_len = emit_llvm_bc_path.len,
|
||||
.emit_analysis_json_ptr = emit_analysis_path.ptr,
|
||||
.emit_analysis_json_len = emit_analysis_path.len,
|
||||
.emit_docs_ptr = emit_docs_path.ptr,
|
||||
.emit_docs_len = emit_docs_path.len,
|
||||
.builtin_zig_path_ptr = builtin_zig_path.ptr,
|
||||
.builtin_zig_path_len = builtin_zig_path.len,
|
||||
.test_filter_ptr = test_filter.ptr,
|
||||
|
||||
@ -96,10 +96,6 @@ pub const Module = extern struct {
|
||||
emit_llvm_ir_len: usize,
|
||||
emit_bitcode_ptr: [*]const u8,
|
||||
emit_bitcode_len: usize,
|
||||
emit_analysis_json_ptr: [*]const u8,
|
||||
emit_analysis_json_len: usize,
|
||||
emit_docs_ptr: [*]const u8,
|
||||
emit_docs_len: usize,
|
||||
builtin_zig_path_ptr: [*]const u8,
|
||||
builtin_zig_path_len: usize,
|
||||
test_filter_ptr: [*]const u8,
|
||||
|
||||
@ -2141,8 +2141,6 @@ struct CodeGen {
|
||||
Buf asm_file_output_path;
|
||||
Buf llvm_ir_file_output_path;
|
||||
Buf bitcode_file_output_path;
|
||||
Buf analysis_json_output_path;
|
||||
Buf docs_output_path;
|
||||
|
||||
Buf *builtin_zig_path;
|
||||
|
||||
|
||||
@ -16,7 +16,6 @@
|
||||
#include "util.hpp"
|
||||
#include "zig_llvm.h"
|
||||
#include "stage2.h"
|
||||
#include "dump_analysis.hpp"
|
||||
#include "softfloat.hpp"
|
||||
#include "zigendian.h"
|
||||
|
||||
@ -10546,57 +10545,6 @@ void codegen_build_object(CodeGen *g) {
|
||||
|
||||
gen_root_source(g);
|
||||
|
||||
if (buf_len(&g->analysis_json_output_path) != 0) {
|
||||
const char *analysis_json_filename = buf_ptr(&g->analysis_json_output_path);
|
||||
FILE *f = fopen(analysis_json_filename, "wb");
|
||||
if (f == nullptr) {
|
||||
fprintf(stderr, "Unable to open '%s': %s\n", analysis_json_filename, strerror(errno));
|
||||
exit(1);
|
||||
}
|
||||
zig_print_analysis_dump(g, f, " ", "\n");
|
||||
if (fclose(f) != 0) {
|
||||
fprintf(stderr, "Unable to write '%s': %s\n", analysis_json_filename, strerror(errno));
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
if (buf_len(&g->docs_output_path) != 0) {
|
||||
Error err;
|
||||
Buf *doc_dir_path = &g->docs_output_path;
|
||||
if ((err = os_make_path(doc_dir_path))) {
|
||||
fprintf(stderr, "Unable to create directory %s: %s\n", buf_ptr(doc_dir_path), err_str(err));
|
||||
exit(1);
|
||||
}
|
||||
Buf *index_html_src_path = buf_sprintf("%s" OS_SEP "docs" OS_SEP "index.html",
|
||||
buf_ptr(g->zig_lib_dir));
|
||||
Buf *index_html_dest_path = buf_sprintf("%s" OS_SEP "index.html", buf_ptr(doc_dir_path));
|
||||
Buf *main_js_src_path = buf_sprintf("%s" OS_SEP "docs" OS_SEP "main.js",
|
||||
buf_ptr(g->zig_lib_dir));
|
||||
Buf *main_js_dest_path = buf_sprintf("%s" OS_SEP "main.js", buf_ptr(doc_dir_path));
|
||||
|
||||
if ((err = os_copy_file(index_html_src_path, index_html_dest_path))) {
|
||||
fprintf(stderr, "Unable to copy %s to %s: %s\n", buf_ptr(index_html_src_path),
|
||||
buf_ptr(index_html_dest_path), err_str(err));
|
||||
exit(1);
|
||||
}
|
||||
if ((err = os_copy_file(main_js_src_path, main_js_dest_path))) {
|
||||
fprintf(stderr, "Unable to copy %s to %s: %s\n", buf_ptr(main_js_src_path),
|
||||
buf_ptr(main_js_dest_path), err_str(err));
|
||||
exit(1);
|
||||
}
|
||||
const char *data_js_filename = buf_ptr(buf_sprintf("%s" OS_SEP "data.js", buf_ptr(doc_dir_path)));
|
||||
FILE *f = fopen(data_js_filename, "wb");
|
||||
if (f == nullptr) {
|
||||
fprintf(stderr, "Unable to open '%s': %s\n", data_js_filename, strerror(errno));
|
||||
exit(1);
|
||||
}
|
||||
fprintf(f, "zigAnalysis=");
|
||||
zig_print_analysis_dump(g, f, "", "");
|
||||
fprintf(f, ";");
|
||||
if (fclose(f) != 0) {
|
||||
fprintf(stderr, "Unable to write '%s': %s\n", data_js_filename, strerror(errno));
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
codegen_add_time_event(g, "Code Generation");
|
||||
{
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,17 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2019 Andrew Kelley
|
||||
*
|
||||
* This file is part of zig, which is MIT licensed.
|
||||
* See http://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
#ifndef ZIG_DUMP_ANALYSIS_HPP
|
||||
#define ZIG_DUMP_ANALYSIS_HPP
|
||||
|
||||
#include "all_types.hpp"
|
||||
#include <stdio.h>
|
||||
|
||||
void zig_print_stack_report(CodeGen *g, FILE *f);
|
||||
void zig_print_analysis_dump(CodeGen *g, FILE *f, const char *one_indent, const char *nl);
|
||||
|
||||
#endif
|
||||
@ -74,8 +74,6 @@ void zig_stage1_build_object(struct ZigStage1 *stage1) {
|
||||
buf_init_from_mem(&g->asm_file_output_path, stage1->emit_asm_ptr, stage1->emit_asm_len);
|
||||
buf_init_from_mem(&g->llvm_ir_file_output_path, stage1->emit_llvm_ir_ptr, stage1->emit_llvm_ir_len);
|
||||
buf_init_from_mem(&g->bitcode_file_output_path, stage1->emit_bitcode_ptr, stage1->emit_bitcode_len);
|
||||
buf_init_from_mem(&g->analysis_json_output_path, stage1->emit_analysis_json_ptr, stage1->emit_analysis_json_len);
|
||||
buf_init_from_mem(&g->docs_output_path, stage1->emit_docs_ptr, stage1->emit_docs_len);
|
||||
|
||||
if (stage1->builtin_zig_path_len != 0) {
|
||||
g->builtin_zig_path = buf_create_from_mem(stage1->builtin_zig_path_ptr, stage1->builtin_zig_path_len);
|
||||
|
||||
@ -161,12 +161,6 @@ struct ZigStage1 {
|
||||
const char *emit_bitcode_ptr;
|
||||
size_t emit_bitcode_len;
|
||||
|
||||
const char *emit_analysis_json_ptr;
|
||||
size_t emit_analysis_json_len;
|
||||
|
||||
const char *emit_docs_ptr;
|
||||
size_t emit_docs_len;
|
||||
|
||||
const char *builtin_zig_path_ptr;
|
||||
size_t builtin_zig_path_len;
|
||||
|
||||
|
||||
@ -1,454 +0,0 @@
|
||||
const builtin = @import("builtin");
|
||||
const std = @import("std");
|
||||
const json = std.json;
|
||||
const mem = std.mem;
|
||||
const fieldIndex = std.meta.fieldIndex;
|
||||
const TypeId = builtin.TypeId;
|
||||
|
||||
pub fn main() anyerror!void {
|
||||
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
const allocator = arena.allocator();
|
||||
|
||||
const args = try std.process.argsAlloc(allocator);
|
||||
|
||||
var parser: json.Parser = undefined;
|
||||
var dump = Dump.init(allocator);
|
||||
for (args[1..]) |arg| {
|
||||
parser = json.Parser.init(allocator, false);
|
||||
const json_text = try std.fs.cwd().readFileAlloc(allocator, arg, std.math.maxInt(usize));
|
||||
const tree = try parser.parse(json_text);
|
||||
try dump.mergeJson(tree.root);
|
||||
}
|
||||
|
||||
const stdout = try std.io.getStdOut();
|
||||
try dump.render(stdout.writer());
|
||||
}
|
||||
|
||||
/// AST source node
|
||||
const Node = struct {
|
||||
file: usize,
|
||||
line: usize,
|
||||
col: usize,
|
||||
fields: []usize,
|
||||
|
||||
fn hash(n: Node) u64 {
|
||||
var hasher = std.hash.Wyhash.init(0);
|
||||
std.hash.autoHash(&hasher, n.file);
|
||||
std.hash.autoHash(&hasher, n.line);
|
||||
std.hash.autoHash(&hasher, n.col);
|
||||
return hasher.final();
|
||||
}
|
||||
|
||||
fn eql(a: Node, b: Node) bool {
|
||||
return a.file == b.file and
|
||||
a.line == b.line and
|
||||
a.col == b.col;
|
||||
}
|
||||
};
|
||||
|
||||
const Error = struct {
|
||||
src: usize,
|
||||
name: []const u8,
|
||||
|
||||
fn hash(n: Error) u64 {
|
||||
var hasher = std.hash.Wyhash.init(0);
|
||||
std.hash.autoHash(&hasher, n.src);
|
||||
return hasher.final();
|
||||
}
|
||||
|
||||
fn eql(a: Error, b: Error) bool {
|
||||
return a.src == b.src;
|
||||
}
|
||||
};
|
||||
|
||||
const simple_types = [_][]const u8{
|
||||
"Type",
|
||||
"Void",
|
||||
"Bool",
|
||||
"NoReturn",
|
||||
"ComptimeFloat",
|
||||
"ComptimeInt",
|
||||
"Undefined",
|
||||
"Null",
|
||||
"AnyFrame",
|
||||
"EnumLiteral",
|
||||
};
|
||||
|
||||
const Type = union(builtin.TypeId) {
|
||||
Type,
|
||||
Void,
|
||||
Bool,
|
||||
NoReturn,
|
||||
ComptimeFloat,
|
||||
ComptimeInt,
|
||||
Undefined,
|
||||
Null,
|
||||
AnyFrame,
|
||||
EnumLiteral,
|
||||
|
||||
Int: Int,
|
||||
Float: usize, // bits
|
||||
|
||||
Vector: Array,
|
||||
Optional: usize, // payload type index
|
||||
Pointer: Pointer,
|
||||
Array: Array,
|
||||
|
||||
Struct, // TODO
|
||||
ErrorUnion, // TODO
|
||||
ErrorSet, // TODO
|
||||
Enum, // TODO
|
||||
Union, // TODO
|
||||
Fn, // TODO
|
||||
BoundFn, // TODO
|
||||
Opaque, // TODO
|
||||
Frame, // TODO
|
||||
|
||||
const Int = struct {
|
||||
bits: usize,
|
||||
signed: bool,
|
||||
};
|
||||
|
||||
const Pointer = struct {
|
||||
elem: usize,
|
||||
alignment: usize,
|
||||
is_const: bool,
|
||||
is_volatile: bool,
|
||||
allow_zero: bool,
|
||||
host_int_bytes: usize,
|
||||
bit_offset_in_host: usize,
|
||||
};
|
||||
|
||||
const Array = struct {
|
||||
elem: usize,
|
||||
len: usize,
|
||||
};
|
||||
|
||||
fn hash(t: Type) u64 {
|
||||
var hasher = std.hash.Wyhash.init(0);
|
||||
std.hash.autoHash(&hasher, t);
|
||||
return hasher.final();
|
||||
}
|
||||
|
||||
fn eql(a: Type, b: Type) bool {
|
||||
return std.meta.eql(a, b);
|
||||
}
|
||||
};
|
||||
|
||||
const Dump = struct {
|
||||
zig_id: ?[]const u8 = null,
|
||||
zig_version: ?[]const u8 = null,
|
||||
root_name: ?[]const u8 = null,
|
||||
targets: std.ArrayList([]const u8),
|
||||
|
||||
file_list: std.ArrayList([]const u8),
|
||||
file_map: FileMap,
|
||||
|
||||
node_list: std.ArrayList(Node),
|
||||
node_map: NodeMap,
|
||||
|
||||
error_list: std.ArrayList(Error),
|
||||
error_map: ErrorMap,
|
||||
|
||||
type_list: std.ArrayList(Type),
|
||||
type_map: TypeMap,
|
||||
|
||||
const FileMap = std.StringHashMap(usize);
|
||||
const NodeMap = std.HashMap(Node, usize, Node.hash, Node.eql, 80);
|
||||
const ErrorMap = std.HashMap(Error, usize, Error.hash, Error.eql, 80);
|
||||
const TypeMap = std.HashMap(Type, usize, Type.hash, Type.eql, 80);
|
||||
|
||||
fn init(allocator: mem.Allocator) Dump {
|
||||
return Dump{
|
||||
.targets = std.ArrayList([]const u8).init(allocator),
|
||||
.file_list = std.ArrayList([]const u8).init(allocator),
|
||||
.file_map = FileMap.init(allocator),
|
||||
.node_list = std.ArrayList(Node).init(allocator),
|
||||
.node_map = NodeMap.init(allocator),
|
||||
.error_list = std.ArrayList(Error).init(allocator),
|
||||
.error_map = ErrorMap.init(allocator),
|
||||
.type_list = std.ArrayList(Type).init(allocator),
|
||||
.type_map = TypeMap.init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
fn mergeJson(self: *Dump, root: json.Value) !void {
|
||||
const params = &root.Object.get("params").?.value.Object;
|
||||
const zig_id = params.get("zigId").?.value.String;
|
||||
const zig_version = params.get("zigVersion").?.value.String;
|
||||
const root_name = params.get("rootName").?.value.String;
|
||||
try mergeSameStrings(&self.zig_id, zig_id);
|
||||
try mergeSameStrings(&self.zig_version, zig_version);
|
||||
try mergeSameStrings(&self.root_name, root_name);
|
||||
|
||||
for (params.get("builds").?.value.Array.items) |json_build| {
|
||||
const target = json_build.Object.get("target").?.value.String;
|
||||
try self.targets.append(target);
|
||||
}
|
||||
|
||||
// Merge files. If the string matches, it's the same file.
|
||||
const other_files = root.Object.get("files").?.value.Array.items;
|
||||
var other_file_to_mine = std.AutoHashMap(usize, usize).init(self.a());
|
||||
for (other_files) |other_file, i| {
|
||||
const gop = try self.file_map.getOrPut(other_file.String);
|
||||
if (!gop.found_existing) {
|
||||
gop.kv.value = self.file_list.items.len;
|
||||
try self.file_list.append(other_file.String);
|
||||
}
|
||||
try other_file_to_mine.putNoClobber(i, gop.kv.value);
|
||||
}
|
||||
|
||||
// Merge AST nodes. If the file id, line, and column all match, it's the same AST node.
|
||||
const other_ast_nodes = root.Object.get("astNodes").?.value.Array.items;
|
||||
var other_ast_node_to_mine = std.AutoHashMap(usize, usize).init(self.a());
|
||||
for (other_ast_nodes) |other_ast_node_json, i| {
|
||||
const other_file_id = jsonObjInt(other_ast_node_json, "file");
|
||||
const other_node = Node{
|
||||
.line = jsonObjInt(other_ast_node_json, "line"),
|
||||
.col = jsonObjInt(other_ast_node_json, "col"),
|
||||
.file = other_file_to_mine.getValue(other_file_id).?,
|
||||
.fields = ([*]usize)(undefined)[0..0],
|
||||
};
|
||||
const gop = try self.node_map.getOrPut(other_node);
|
||||
if (!gop.found_existing) {
|
||||
gop.kv.value = self.node_list.items.len;
|
||||
try self.node_list.append(other_node);
|
||||
}
|
||||
try other_ast_node_to_mine.putNoClobber(i, gop.kv.value);
|
||||
}
|
||||
// convert fields lists
|
||||
for (other_ast_nodes) |other_ast_node_json, i| {
|
||||
const my_node_index = other_ast_node_to_mine.get(i).?.value;
|
||||
const my_node = &self.node_list.items[my_node_index];
|
||||
if (other_ast_node_json.Object.get("fields")) |fields_json_kv| {
|
||||
const other_fields = fields_json_kv.value.Array.items;
|
||||
my_node.fields = try self.a().alloc(usize, other_fields.len);
|
||||
for (other_fields) |other_field_index, field_i| {
|
||||
const other_index = @intCast(usize, other_field_index.Integer);
|
||||
my_node.fields[field_i] = other_ast_node_to_mine.get(other_index).?.value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Merge errors. If the AST Node matches, it's the same error value.
|
||||
const other_errors = root.Object.get("errors").?.value.Array.items;
|
||||
var other_error_to_mine = std.AutoHashMap(usize, usize).init(self.a());
|
||||
for (other_errors) |other_error_json, i| {
|
||||
const other_src_id = jsonObjInt(other_error_json, "src");
|
||||
const other_error = Error{
|
||||
.src = other_ast_node_to_mine.getValue(other_src_id).?,
|
||||
.name = other_error_json.Object.get("name").?.value.String,
|
||||
};
|
||||
const gop = try self.error_map.getOrPut(other_error);
|
||||
if (!gop.found_existing) {
|
||||
gop.kv.value = self.error_list.items.len;
|
||||
try self.error_list.append(other_error);
|
||||
}
|
||||
try other_error_to_mine.putNoClobber(i, gop.kv.value);
|
||||
}
|
||||
|
||||
// Merge types. Now it starts to get advanced.
|
||||
// First we identify all the simple types and merge those.
|
||||
// Example: void, type, noreturn
|
||||
// We can also do integers and floats.
|
||||
const other_types = root.Object.get("types").?.value.Array.items;
|
||||
var other_types_to_mine = std.AutoHashMap(usize, usize).init(self.a());
|
||||
for (other_types) |other_type_json, i| {
|
||||
const type_kind = jsonObjInt(other_type_json, "kind");
|
||||
switch (type_kind) {
|
||||
fieldIndex(TypeId, "Int").? => {
|
||||
var signed: bool = undefined;
|
||||
var bits: usize = undefined;
|
||||
if (other_type_json.Object.get("i")) |kv| {
|
||||
signed = true;
|
||||
bits = @intCast(usize, kv.value.Integer);
|
||||
} else if (other_type_json.Object.get("u")) |kv| {
|
||||
signed = false;
|
||||
bits = @intCast(usize, kv.value.Integer);
|
||||
} else {
|
||||
unreachable;
|
||||
}
|
||||
const other_type = Type{
|
||||
.Int = Type.Int{
|
||||
.bits = bits,
|
||||
.signed = signed,
|
||||
},
|
||||
};
|
||||
try self.mergeOtherType(other_type, i, &other_types_to_mine);
|
||||
},
|
||||
fieldIndex(TypeId, "Float").? => {
|
||||
const other_type = Type{
|
||||
.Float = jsonObjInt(other_type_json, "bits"),
|
||||
};
|
||||
try self.mergeOtherType(other_type, i, &other_types_to_mine);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
inline for (simple_types) |simple_type_name| {
|
||||
if (type_kind == std.meta.fieldIndex(builtin.TypeId, simple_type_name).?) {
|
||||
const other_type = @unionInit(Type, simple_type_name, {});
|
||||
try self.mergeOtherType(other_type, i, &other_types_to_mine);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn mergeOtherType(
|
||||
self: *Dump,
|
||||
other_type: Type,
|
||||
other_type_index: usize,
|
||||
other_types_to_mine: *std.AutoHashMap(usize, usize),
|
||||
) !void {
|
||||
const gop = try self.type_map.getOrPut(other_type);
|
||||
if (!gop.found_existing) {
|
||||
gop.kv.value = self.type_list.items.len;
|
||||
try self.type_list.append(other_type);
|
||||
}
|
||||
try other_types_to_mine.putNoClobber(other_type_index, gop.kv.value);
|
||||
}
|
||||
|
||||
fn render(self: *Dump, stream: anytype) !void {
|
||||
var jw = json.WriteStream(@TypeOf(stream).Child, 10).init(stream);
|
||||
try jw.beginObject();
|
||||
|
||||
try jw.objectField("typeKinds");
|
||||
try jw.beginArray();
|
||||
inline for (@typeInfo(builtin.TypeId).Enum.fields) |field| {
|
||||
try jw.arrayElem();
|
||||
try jw.emitString(field.name);
|
||||
}
|
||||
try jw.endArray();
|
||||
|
||||
try jw.objectField("params");
|
||||
try jw.beginObject();
|
||||
|
||||
try jw.objectField("zigId");
|
||||
try jw.emitString(self.zig_id.?);
|
||||
|
||||
try jw.objectField("zigVersion");
|
||||
try jw.emitString(self.zig_version.?);
|
||||
|
||||
try jw.objectField("rootName");
|
||||
try jw.emitString(self.root_name.?);
|
||||
|
||||
try jw.objectField("builds");
|
||||
try jw.beginArray();
|
||||
for (self.targets.items) |target| {
|
||||
try jw.arrayElem();
|
||||
try jw.beginObject();
|
||||
try jw.objectField("target");
|
||||
try jw.emitString(target);
|
||||
try jw.endObject();
|
||||
}
|
||||
try jw.endArray();
|
||||
|
||||
try jw.endObject();
|
||||
|
||||
try jw.objectField("types");
|
||||
try jw.beginArray();
|
||||
for (self.type_list.items) |t| {
|
||||
try jw.arrayElem();
|
||||
try jw.beginObject();
|
||||
|
||||
try jw.objectField("kind");
|
||||
try jw.emitNumber(@enumToInt(builtin.TypeId(t)));
|
||||
|
||||
switch (t) {
|
||||
.Int => |int| {
|
||||
if (int.signed) {
|
||||
try jw.objectField("i");
|
||||
} else {
|
||||
try jw.objectField("u");
|
||||
}
|
||||
try jw.emitNumber(int.bits);
|
||||
},
|
||||
.Float => |bits| {
|
||||
try jw.objectField("bits");
|
||||
try jw.emitNumber(bits);
|
||||
},
|
||||
|
||||
else => {},
|
||||
}
|
||||
|
||||
try jw.endObject();
|
||||
}
|
||||
try jw.endArray();
|
||||
|
||||
try jw.objectField("errors");
|
||||
try jw.beginArray();
|
||||
for (self.error_list.items) |zig_error| {
|
||||
try jw.arrayElem();
|
||||
try jw.beginObject();
|
||||
|
||||
try jw.objectField("src");
|
||||
try jw.emitNumber(zig_error.src);
|
||||
|
||||
try jw.objectField("name");
|
||||
try jw.emitString(zig_error.name);
|
||||
|
||||
try jw.endObject();
|
||||
}
|
||||
try jw.endArray();
|
||||
|
||||
try jw.objectField("astNodes");
|
||||
try jw.beginArray();
|
||||
for (self.node_list.items) |node| {
|
||||
try jw.arrayElem();
|
||||
try jw.beginObject();
|
||||
|
||||
try jw.objectField("file");
|
||||
try jw.emitNumber(node.file);
|
||||
|
||||
try jw.objectField("line");
|
||||
try jw.emitNumber(node.line);
|
||||
|
||||
try jw.objectField("col");
|
||||
try jw.emitNumber(node.col);
|
||||
|
||||
if (node.fields.len != 0) {
|
||||
try jw.objectField("fields");
|
||||
try jw.beginArray();
|
||||
|
||||
for (node.fields) |field_node_index| {
|
||||
try jw.arrayElem();
|
||||
try jw.emitNumber(field_node_index);
|
||||
}
|
||||
try jw.endArray();
|
||||
}
|
||||
|
||||
try jw.endObject();
|
||||
}
|
||||
try jw.endArray();
|
||||
|
||||
try jw.objectField("files");
|
||||
try jw.beginArray();
|
||||
for (self.file_list.items) |file| {
|
||||
try jw.arrayElem();
|
||||
try jw.emitString(file);
|
||||
}
|
||||
try jw.endArray();
|
||||
|
||||
try jw.endObject();
|
||||
}
|
||||
|
||||
fn a(self: Dump) mem.Allocator {
|
||||
return self.targets.allocator;
|
||||
}
|
||||
|
||||
fn mergeSameStrings(opt_dest: *?[]const u8, src: []const u8) !void {
|
||||
if (opt_dest.*) |dest| {
|
||||
if (!mem.eql(u8, dest, src))
|
||||
return error.MismatchedDumps;
|
||||
} else {
|
||||
opt_dest.* = src;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
fn jsonObjInt(json_val: json.Value, field: []const u8) usize {
|
||||
const uncasted = json_val.Object.get(field).?.value.Integer;
|
||||
return @intCast(usize, uncasted);
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user