merge dumps tool: merging ast nodes

-fgenerate-docs is replaced ith -femit-docs
-fno-emit-bin is added to prevent outputting binary
This commit is contained in:
Andrew Kelley 2019-10-11 17:08:08 -04:00
parent 8aa20227ed
commit 30a555eed4
No known key found for this signature in database
GPG Key ID: 7C5F548F728501A9
8 changed files with 154 additions and 27 deletions

View File

@ -3,6 +3,8 @@ pub const Adler32 = adler.Adler32;
const auto_hash = @import("hash/auto_hash.zig");
pub const autoHash = auto_hash.autoHash;
pub const autoHashStrat = auto_hash.hash;
pub const Strategy = auto_hash.HashStrategy;
// pub for polynomials + generic crc32 construction
pub const crc = @import("hash/crc.zig");

View File

@ -550,3 +550,13 @@ pub fn getAutoEqlFn(comptime K: type) (fn (K, K) bool) {
}
}.eql;
}
pub fn getAutoHashStratFn(comptime K: type, comptime strategy: std.hash.Strategy) (fn (K) u32) {
return struct {
fn hash(key: K) u32 {
var hasher = Wyhash.init(0);
std.hash.autoHashStrat(&hasher, key, strategy);
return @truncate(u32, hasher.final());
}
}.hash;
}

View File

@ -152,10 +152,17 @@ pub fn WriteStream(comptime OutStream: type, comptime max_depth: usize) type {
) !void {
assert(self.state[self.state_index] == State.Value);
switch (@typeInfo(@typeOf(value))) {
.Int => |info| if (info.bits < 53 or (value < 4503599627370496 and value > -4503599627370496)) {
try self.stream.print("{}", value);
self.popState();
return;
.Int => |info| {
if (info.bits < 53) {
try self.stream.print("{}", value);
self.popState();
return;
}
if (value < 4503599627370496 and (!info.is_signed or value > -4503599627370496)) {
try self.stream.print("{}", value);
self.popState();
return;
}
},
.Float => if (@floatCast(f64, value) == value) {
try self.stream.print("{}", value);

View File

@ -2091,6 +2091,7 @@ struct CodeGen {
bool function_sections;
bool enable_dump_analysis;
bool enable_doc_generation;
bool disable_bin_generation;
Buf *mmacosx_version_min;
Buf *mios_version_min;

View File

@ -7586,6 +7586,8 @@ static void zig_llvm_emit_output(CodeGen *g) {
char *err_msg = nullptr;
switch (g->emit_file_type) {
case EmitFileTypeBinary:
if (g->disable_bin_generation)
return;
if (ZigLLVMTargetMachineEmitToFile(g->target_machine, g->module, buf_ptr(output_path),
ZigLLVM_EmitBinary, &err_msg, g->build_mode == BuildModeDebug, is_small,
g->enable_time_report))
@ -10158,6 +10160,7 @@ static Error check_cache(CodeGen *g, Buf *manifest_dir, Buf *digest) {
cache_bool(ch, g->function_sections);
cache_bool(ch, g->enable_dump_analysis);
cache_bool(ch, g->enable_doc_generation);
cache_bool(ch, g->disable_bin_generation);
cache_buf_opt(ch, g->mmacosx_version_min);
cache_buf_opt(ch, g->mios_version_min);
cache_usize(ch, g->version_major);
@ -10396,7 +10399,8 @@ void codegen_build_and_link(CodeGen *g) {
// If there is more than one object, we have to link them (with -r).
// Finally, if we didn't make an object from zig source, and we don't have caching enabled,
// then we have an object from C source that we must copy to the output dir which we do with a -r link.
if (g->emit_file_type == EmitFileTypeBinary && (g->out_type != OutTypeObj || g->link_objects.length > 1 ||
if (!g->disable_bin_generation && g->emit_file_type == EmitFileTypeBinary &&
(g->out_type != OutTypeObj || g->link_objects.length > 1 ||
(!need_llvm_module(g) && !g->enable_cache)))
{
codegen_link(g);

View File

@ -1309,15 +1309,6 @@ void zig_print_analysis_dump(CodeGen *g, FILE *f, const char *one_indent, const
}
jw_end_array(jw);
jw_object_field(jw, "files");
jw_begin_array(jw);
for (uint32_t i = 0; i < ctx.file_list.length; i += 1) {
Buf *file = ctx.file_list.at(i);
jw_array_elem(jw);
anal_dump_file(&ctx, file);
}
jw_end_array(jw);
jw_object_field(jw, "errors");
jw_begin_array(jw);
for (uint32_t i = 0; i < ctx.err_list.length; i += 1) {
@ -1336,5 +1327,14 @@ void zig_print_analysis_dump(CodeGen *g, FILE *f, const char *one_indent, const
}
jw_end_array(jw);
jw_object_field(jw, "files");
jw_begin_array(jw);
for (uint32_t i = 0; i < ctx.file_list.length; i += 1) {
Buf *file = ctx.file_list.at(i);
jw_array_elem(jw);
anal_dump_file(&ctx, file);
}
jw_end_array(jw);
jw_end_object(jw);
}

View File

@ -65,7 +65,8 @@ static int print_full_usage(const char *arg0, FILE *file, int return_code) {
" -ftime-report print timing diagnostics\n"
" -fstack-report print stack size diagnostics\n"
" -fdump-analysis write analysis.json file with type information\n"
" -fgenerate-docs create a docs/ dir with html documentation\n"
" -femit-docs create a docs/ dir with html documentation\n"
" -fno-emit-bin skip emitting machine code\n"
" --libc [file] Provide a file which specifies libc paths\n"
" --name [name] override output name\n"
" --output-dir [dir] override output directory (defaults to cwd)\n"
@ -483,6 +484,7 @@ int main(int argc, char **argv) {
bool stack_report = false;
bool enable_dump_analysis = false;
bool enable_doc_generation = false;
bool disable_bin_generation = false;
const char *cache_dir = nullptr;
CliPkg *cur_pkg = allocate<CliPkg>(1);
BuildMode build_mode = BuildModeDebug;
@ -668,8 +670,10 @@ int main(int argc, char **argv) {
stack_report = true;
} else if (strcmp(arg, "-fdump-analysis") == 0) {
enable_dump_analysis = true;
} else if (strcmp(arg, "-fgenerate-docs") == 0) {
} else if (strcmp(arg, "-femit-docs") == 0) {
enable_doc_generation = true;
} else if (strcmp(arg, "-fno-emit-bin") == 0) {
disable_bin_generation = true;
} else if (strcmp(arg, "--enable-valgrind") == 0) {
valgrind_support = ValgrindSupportEnabled;
} else if (strcmp(arg, "--disable-valgrind") == 0) {
@ -1148,6 +1152,7 @@ int main(int argc, char **argv) {
g->enable_stack_report = stack_report;
g->enable_dump_analysis = enable_dump_analysis;
g->enable_doc_generation = enable_doc_generation;
g->disable_bin_generation = disable_bin_generation;
codegen_set_out_name(g, buf_out_name);
codegen_set_lib_version(g, ver_major, ver_minor, ver_patch);
g->want_single_threaded = want_single_threaded;
@ -1290,6 +1295,11 @@ int main(int argc, char **argv) {
zig_print_stack_report(g, stdout);
}
if (g->disable_bin_generation) {
fprintf(stderr, "Semantic analysis complete. No binary produced due to -fno-emit-bin.\n");
return 0;
}
Buf *test_exe_path_unresolved = &g->output_file_path;
Buf *test_exe_path = buf_alloc();
*test_exe_path = os_path_resolve(&test_exe_path_unresolved, 1);

View File

@ -24,19 +24,49 @@ pub fn main() anyerror!void {
try dump.render(&stdout.outStream().stream);
}
/// AST source node
const Node = struct {
file: usize,
line: usize,
col: usize,
fields: []usize,
fn hash(n: Node) u32 {
var hasher = std.hash.Wyhash.init(0);
std.hash.autoHash(&hasher, n.file);
std.hash.autoHash(&hasher, n.line);
std.hash.autoHash(&hasher, n.col);
return @truncate(u32, hasher.final());
}
fn eql(a: Node, b: Node) bool {
return a.file == b.file and
a.line == b.line and
a.col == b.col;
}
};
const Dump = struct {
zig_id: ?[]const u8 = null,
zig_version: ?[]const u8 = null,
root_name: ?[]const u8 = null,
targets: std.ArrayList([]const u8),
files_list: std.ArrayList([]const u8),
files_map: std.StringHashMap(usize),
const FileMap = std.StringHashMap(usize);
file_list: std.ArrayList([]const u8),
file_map: FileMap,
const NodeMap = std.HashMap(Node, usize, Node.hash, Node.eql);
node_list: std.ArrayList(Node),
node_map: NodeMap,
fn init(allocator: *mem.Allocator) Dump {
return Dump{
.targets = std.ArrayList([]const u8).init(allocator),
.files_list = std.ArrayList([]const u8).init(allocator),
.files_map = std.StringHashMap(usize).init(allocator),
.file_list = std.ArrayList([]const u8).init(allocator),
.file_map = FileMap.init(allocator),
.node_list = std.ArrayList(Node).init(allocator),
.node_map = NodeMap.init(allocator),
};
}
@ -56,17 +86,45 @@ const Dump = struct {
const other_files = root.Object.get("files").?.value.Array.toSliceConst();
var other_file_to_mine = std.AutoHashMap(usize, usize).init(self.a());
for (other_files) |other_file, i| {
const gop = try self.files_map.getOrPut(other_file.String);
if (gop.found_existing) {
try other_file_to_mine.putNoClobber(i, gop.kv.value);
} else {
gop.kv.value = self.files_list.len;
try self.files_list.append(other_file.String);
const gop = try self.file_map.getOrPut(other_file.String);
if (!gop.found_existing) {
gop.kv.value = self.file_list.len;
try self.file_list.append(other_file.String);
}
try other_file_to_mine.putNoClobber(i, gop.kv.value);
}
// Merge ast nodes
const other_ast_nodes = root.Object.get("astNodes").?.value.Array.toSliceConst();
var other_ast_node_to_mine = std.AutoHashMap(usize, usize).init(self.a());
for (other_ast_nodes) |other_ast_node_json, i| {
const other_file_id = jsonObjInt(other_ast_node_json, "file");
const other_node = Node{
.line = jsonObjInt(other_ast_node_json, "line"),
.col = jsonObjInt(other_ast_node_json, "col"),
.file = other_file_to_mine.getValue(other_file_id).?,
.fields = ([*]usize)(undefined)[0..0],
};
const gop = try self.node_map.getOrPut(other_node);
if (!gop.found_existing) {
gop.kv.value = self.node_list.len;
try self.node_list.append(other_node);
}
try other_ast_node_to_mine.putNoClobber(i, gop.kv.value);
}
// convert fields lists
for (other_ast_nodes) |other_ast_node_json, i| {
const my_node_index = other_ast_node_to_mine.get(i).?.value;
const my_node = &self.node_list.toSlice()[my_node_index];
if (other_ast_node_json.Object.get("fields")) |fields_json_kv| {
const other_fields = fields_json_kv.value.Array.toSliceConst();
my_node.fields = try self.a().alloc(usize, other_fields.len);
for (other_fields) |other_field_index, field_i| {
const other_index = @intCast(usize, other_field_index.Integer);
my_node.fields[field_i] = other_ast_node_to_mine.get(other_index).?.value;
}
}
}
}
fn render(self: *Dump, stream: var) !void {
@ -81,9 +139,39 @@ const Dump = struct {
}
try jw.endArray();
try jw.objectField("astNodes");
try jw.beginArray();
for (self.node_list.toSliceConst()) |node| {
try jw.arrayElem();
try jw.beginObject();
try jw.objectField("file");
try jw.emitNumber(node.file);
try jw.objectField("line");
try jw.emitNumber(node.line);
try jw.objectField("col");
try jw.emitNumber(node.col);
if (node.fields.len != 0) {
try jw.objectField("fields");
try jw.beginArray();
for (node.fields) |field_node_index| {
try jw.arrayElem();
try jw.emitNumber(field_node_index);
}
try jw.endArray();
}
try jw.endObject();
}
try jw.endArray();
try jw.objectField("files");
try jw.beginArray();
for (self.files_list.toSliceConst()) |file| {
for (self.file_list.toSliceConst()) |file| {
try jw.arrayElem();
try jw.emitString(file);
}
@ -105,3 +193,8 @@ const Dump = struct {
}
}
};
fn jsonObjInt(json_val: json.Value, field: []const u8) usize {
const uncasted = json_val.Object.get(field).?.value.Integer;
return @intCast(usize, uncasted);
}