mirror of
https://github.com/ziglang/zig.git
synced 2026-01-21 06:45:24 +00:00
zig reduce: add transformation for inlining file-based @import
One thing is missing for it to be useful, however, which is dealing with ambiguous reference errors introduced by the inlining process.
This commit is contained in:
parent
98dc28bbe2
commit
8bd01d2d9b
@ -24,8 +24,11 @@ pub const Fixups = struct {
|
||||
gut_functions: std.AutoHashMapUnmanaged(Ast.Node.Index, void) = .{},
|
||||
/// These global declarations will be omitted.
|
||||
omit_nodes: std.AutoHashMapUnmanaged(Ast.Node.Index, void) = .{},
|
||||
/// These expressions will be replaced with `undefined`.
|
||||
replace_nodes: std.AutoHashMapUnmanaged(Ast.Node.Index, void) = .{},
|
||||
/// These expressions will be replaced with the string value.
|
||||
replace_nodes: std.AutoHashMapUnmanaged(Ast.Node.Index, []const u8) = .{},
|
||||
/// All `@import` builtin calls which refer to a file path will be prefixed
|
||||
/// with this path.
|
||||
rebase_imported_paths: ?[]const u8 = null,
|
||||
|
||||
pub fn count(f: Fixups) usize {
|
||||
return f.unused_var_decls.count() +
|
||||
@ -277,8 +280,8 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
const datas = tree.nodes.items(.data);
|
||||
if (r.fixups.replace_nodes.contains(node)) {
|
||||
try ais.writer().writeAll("undefined");
|
||||
if (r.fixups.replace_nodes.get(node)) |replacement| {
|
||||
try ais.writer().writeAll(replacement);
|
||||
try renderOnlySpace(r, space);
|
||||
return;
|
||||
}
|
||||
@ -1515,6 +1518,7 @@ fn renderBuiltinCall(
|
||||
const tree = r.tree;
|
||||
const ais = r.ais;
|
||||
const token_tags = tree.tokens.items(.tag);
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
|
||||
// TODO remove before release of 0.12.0
|
||||
const slice = tree.tokenSlice(builtin_token);
|
||||
@ -1609,6 +1613,26 @@ fn renderBuiltinCall(
|
||||
return renderToken(r, builtin_token + 2, space); // )
|
||||
}
|
||||
|
||||
if (r.fixups.rebase_imported_paths) |prefix| {
|
||||
if (mem.eql(u8, slice, "@import")) f: {
|
||||
const param = params[0];
|
||||
const str_lit_token = main_tokens[param];
|
||||
assert(token_tags[str_lit_token] == .string_literal);
|
||||
const token_bytes = tree.tokenSlice(str_lit_token);
|
||||
const imported_string = std.zig.string_literal.parseAlloc(r.gpa, token_bytes) catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
error.InvalidLiteral => break :f,
|
||||
};
|
||||
defer r.gpa.free(imported_string);
|
||||
const new_string = try std.fs.path.resolvePosix(r.gpa, &.{ prefix, imported_string });
|
||||
defer r.gpa.free(new_string);
|
||||
|
||||
try renderToken(r, builtin_token + 1, .none); // (
|
||||
try ais.writer().print("\"{}\"", .{std.zig.fmtEscapes(new_string)});
|
||||
return renderToken(r, str_lit_token + 1, space); // )
|
||||
}
|
||||
}
|
||||
|
||||
const last_param = params[params.len - 1];
|
||||
const after_last_param_token = tree.lastToken(last_param) + 1;
|
||||
|
||||
|
||||
@ -109,8 +109,11 @@ pub fn main(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
|
||||
var rendered = std.ArrayList(u8).init(gpa);
|
||||
defer rendered.deinit();
|
||||
|
||||
var tree = try parse(gpa, arena, root_source_file_path);
|
||||
defer tree.deinit(gpa);
|
||||
var tree = try parse(gpa, root_source_file_path);
|
||||
defer {
|
||||
gpa.free(tree.source);
|
||||
tree.deinit(gpa);
|
||||
}
|
||||
|
||||
if (!skip_smoke_test) {
|
||||
std.debug.print("smoke testing the interestingness check...\n", .{});
|
||||
@ -159,7 +162,7 @@ pub fn main(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
|
||||
subset_size = @max(1, subset_size / 2);
|
||||
|
||||
const this_set = transformations.items[start_index..][0..subset_size];
|
||||
try transformationsToFixups(gpa, this_set, &fixups);
|
||||
try transformationsToFixups(gpa, arena, root_source_file_path, this_set, &fixups);
|
||||
|
||||
rendered.clearRetainingCapacity();
|
||||
try tree.renderToArrayList(&rendered, fixups);
|
||||
@ -171,7 +174,8 @@ pub fn main(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
|
||||
});
|
||||
switch (interestingness) {
|
||||
.interesting => {
|
||||
const new_tree = try parse(gpa, arena, root_source_file_path);
|
||||
const new_tree = try parse(gpa, root_source_file_path);
|
||||
gpa.free(tree.source);
|
||||
tree.deinit(gpa);
|
||||
tree = new_tree;
|
||||
|
||||
@ -241,6 +245,8 @@ fn runCheck(arena: std.mem.Allocator, argv: []const []const u8) !Interestingness
|
||||
|
||||
fn transformationsToFixups(
|
||||
gpa: Allocator,
|
||||
arena: Allocator,
|
||||
root_source_file_path: []const u8,
|
||||
transforms: []const Walk.Transformation,
|
||||
fixups: *Ast.Fixups,
|
||||
) !void {
|
||||
@ -254,20 +260,51 @@ fn transformationsToFixups(
|
||||
try fixups.omit_nodes.put(gpa, decl_node, {});
|
||||
},
|
||||
.replace_with_undef => |node| {
|
||||
try fixups.replace_nodes.put(gpa, node, {});
|
||||
try fixups.replace_nodes.put(gpa, node, "undefined");
|
||||
},
|
||||
.inline_imported_file => |inline_imported_file| {
|
||||
defer gpa.free(inline_imported_file.imported_string);
|
||||
const full_imported_path = try std.fs.path.join(gpa, &.{
|
||||
std.fs.path.dirname(root_source_file_path) orelse ".",
|
||||
inline_imported_file.imported_string,
|
||||
});
|
||||
defer gpa.free(full_imported_path);
|
||||
var other_file_ast = try parse(gpa, full_imported_path);
|
||||
defer {
|
||||
gpa.free(other_file_ast.source);
|
||||
other_file_ast.deinit(gpa);
|
||||
}
|
||||
var other_source = std.ArrayList(u8).init(gpa);
|
||||
defer other_source.deinit();
|
||||
var inlined_fixups: Ast.Fixups = .{};
|
||||
defer inlined_fixups.deinit(gpa);
|
||||
try other_source.appendSlice("struct {\n");
|
||||
try other_file_ast.renderToArrayList(&other_source, .{
|
||||
.rebase_imported_paths = std.fs.path.dirname(inline_imported_file.imported_string),
|
||||
});
|
||||
try other_source.appendSlice("}");
|
||||
|
||||
try fixups.replace_nodes.put(
|
||||
gpa,
|
||||
inline_imported_file.builtin_call_node,
|
||||
try arena.dupe(u8, other_source.items),
|
||||
);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
fn parse(gpa: Allocator, arena: Allocator, root_source_file_path: []const u8) !Ast {
|
||||
const source_code = try std.fs.cwd().readFileAllocOptions(
|
||||
arena,
|
||||
root_source_file_path,
|
||||
fn parse(gpa: Allocator, file_path: []const u8) !Ast {
|
||||
const source_code = std.fs.cwd().readFileAllocOptions(
|
||||
gpa,
|
||||
file_path,
|
||||
std.math.maxInt(u32),
|
||||
null,
|
||||
1,
|
||||
0,
|
||||
);
|
||||
) catch |err| {
|
||||
fatal("unable to open '{s}': {s}", .{ file_path, @errorName(err) });
|
||||
};
|
||||
errdefer gpa.free(source_code);
|
||||
|
||||
var tree = try Ast.parse(gpa, source_code, .zig);
|
||||
errdefer tree.deinit(gpa);
|
||||
|
||||
@ -2,6 +2,7 @@ const std = @import("std");
|
||||
const Ast = std.zig.Ast;
|
||||
const Walk = @This();
|
||||
const assert = std.debug.assert;
|
||||
const BuiltinFn = @import("../BuiltinFn.zig");
|
||||
|
||||
ast: *const Ast,
|
||||
transformations: *std.ArrayList(Transformation),
|
||||
@ -16,6 +17,11 @@ pub const Transformation = union(enum) {
|
||||
delete_node: Ast.Node.Index,
|
||||
/// Replace an expression with `undefined`.
|
||||
replace_with_undef: Ast.Node.Index,
|
||||
/// Replace an `@import` with the imported file contents wrapped in a struct.
|
||||
inline_imported_file: struct {
|
||||
builtin_call_node: Ast.Node.Index,
|
||||
imported_string: []const u8,
|
||||
},
|
||||
};
|
||||
|
||||
pub const Error = error{OutOfMemory};
|
||||
@ -437,16 +443,16 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
|
||||
|
||||
.builtin_call_two, .builtin_call_two_comma => {
|
||||
if (datas[node].lhs == 0) {
|
||||
return walkBuiltinCall(w, main_tokens[node], &.{});
|
||||
return walkBuiltinCall(w, node, &.{});
|
||||
} else if (datas[node].rhs == 0) {
|
||||
return walkBuiltinCall(w, main_tokens[node], &.{datas[node].lhs});
|
||||
return walkBuiltinCall(w, node, &.{datas[node].lhs});
|
||||
} else {
|
||||
return walkBuiltinCall(w, main_tokens[node], &.{ datas[node].lhs, datas[node].rhs });
|
||||
return walkBuiltinCall(w, node, &.{ datas[node].lhs, datas[node].rhs });
|
||||
}
|
||||
},
|
||||
.builtin_call, .builtin_call_comma => {
|
||||
const params = ast.extra_data[datas[node].lhs..datas[node].rhs];
|
||||
return walkBuiltinCall(w, main_tokens[node], params);
|
||||
return walkBuiltinCall(w, node, params);
|
||||
},
|
||||
|
||||
.fn_proto_simple,
|
||||
@ -680,10 +686,31 @@ fn walkContainerDecl(
|
||||
|
||||
fn walkBuiltinCall(
|
||||
w: *Walk,
|
||||
builtin_token: Ast.TokenIndex,
|
||||
call_node: Ast.Node.Index,
|
||||
params: []const Ast.Node.Index,
|
||||
) Error!void {
|
||||
_ = builtin_token;
|
||||
const ast = w.ast;
|
||||
const gpa = w.gpa;
|
||||
const main_tokens = ast.nodes.items(.main_token);
|
||||
const builtin_token = main_tokens[call_node];
|
||||
const builtin_name = ast.tokenSlice(builtin_token);
|
||||
const info = BuiltinFn.list.get(builtin_name).?;
|
||||
switch (info.tag) {
|
||||
.import => {
|
||||
const operand_node = params[0];
|
||||
const str_lit_token = main_tokens[operand_node];
|
||||
const token_bytes = ast.tokenSlice(str_lit_token);
|
||||
const imported_string = std.zig.string_literal.parseAlloc(gpa, token_bytes) catch
|
||||
unreachable;
|
||||
if (std.mem.endsWith(u8, imported_string, ".zig")) {
|
||||
try w.transformations.append(.{ .inline_imported_file = .{
|
||||
.builtin_call_node = call_node,
|
||||
.imported_string = imported_string,
|
||||
} });
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
for (params) |param_node| {
|
||||
try walkExpression(w, param_node);
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user