mirror of
https://github.com/ziglang/zig.git
synced 2026-02-12 20:37:54 +00:00
astgen: fix remaining compile errors
Now it builds and what remains in this branch is: * fix the stage2 compiler regressions from this branch * finish the rest of zig fmt test cases, get them passing * Merge in Vexu's translate-c AST branch & fix translate-c regressions
This commit is contained in:
parent
9010bd8aec
commit
914540ddb5
@ -381,7 +381,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
|
||||
return addZIRNoOp(mod, scope, src, .unreachable_safe);
|
||||
},
|
||||
.@"return" => return ret(mod, scope, node),
|
||||
.field_access => return field(mod, scope, rl, node),
|
||||
.field_access => return fieldAccess(mod, scope, rl, node),
|
||||
.float_literal => return floatLiteral(mod, scope, rl, node),
|
||||
|
||||
.if_simple => return ifExpr(mod, scope, rl, tree.ifSimple(node)),
|
||||
@ -1423,17 +1423,17 @@ fn arrayTypeSentinel(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.
|
||||
fn containerField(
|
||||
mod: *Module,
|
||||
scope: *Scope,
|
||||
node: *ast.Node.ContainerField,
|
||||
field: ast.full.ContainerField,
|
||||
) InnerError!*zir.Inst {
|
||||
const tree = scope.tree();
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
|
||||
const src = token_starts[tree.firstToken(node)];
|
||||
const name = try mod.identifierTokenString(scope, node.name_token);
|
||||
const src = token_starts[field.ast.name_token];
|
||||
const name = try mod.identifierTokenString(scope, field.ast.name_token);
|
||||
|
||||
if (node.comptime_token == null and node.value_expr == null and node.align_expr == null) {
|
||||
if (node.type_expr) |some| {
|
||||
const ty = try typeExpr(mod, scope, some);
|
||||
if (field.comptime_token == null and field.ast.value_expr == 0 and field.ast.align_expr == 0) {
|
||||
if (field.ast.type_expr != 0) {
|
||||
const ty = try typeExpr(mod, scope, field.ast.type_expr);
|
||||
return addZIRInst(mod, scope, src, zir.Inst.ContainerFieldTyped, .{
|
||||
.bytes = name,
|
||||
.ty = ty,
|
||||
@ -1445,9 +1445,11 @@ fn containerField(
|
||||
}
|
||||
}
|
||||
|
||||
const ty = if (node.type_expr) |some| try typeExpr(mod, scope, some) else null;
|
||||
const alignment = if (node.align_expr) |some| try expr(mod, scope, .none, some) else null;
|
||||
const init = if (node.value_expr) |some| try expr(mod, scope, .none, some) else null;
|
||||
const ty = if (field.ast.type_expr != 0) try typeExpr(mod, scope, field.ast.type_expr) else null;
|
||||
// TODO result location should be alignment type
|
||||
const alignment = if (field.ast.align_expr != 0) try expr(mod, scope, .none, field.ast.align_expr) else null;
|
||||
// TODO result location should be the field type
|
||||
const init = if (field.ast.value_expr != 0) try expr(mod, scope, .none, field.ast.value_expr) else null;
|
||||
|
||||
return addZIRInst(mod, scope, src, zir.Inst.ContainerField, .{
|
||||
.bytes = name,
|
||||
@ -1455,7 +1457,7 @@ fn containerField(
|
||||
.ty = ty,
|
||||
.init = init,
|
||||
.alignment = alignment,
|
||||
.is_comptime = node.comptime_token != null,
|
||||
.is_comptime = field.comptime_token != null,
|
||||
});
|
||||
}
|
||||
|
||||
@ -1485,12 +1487,15 @@ fn containerDecl(
|
||||
defer fields.deinit();
|
||||
|
||||
for (container_decl.ast.members) |member| {
|
||||
switch (node_tags[member]) {
|
||||
.container_field_init, .container_field_align, .container_field => {
|
||||
try fields.append(try containerField(mod, &gen_scope.base, member));
|
||||
},
|
||||
// TODO just handle these cases differently since they end up with different ZIR
|
||||
// instructions anyway. It will be simpler & have fewer branches.
|
||||
const field = switch (node_tags[member]) {
|
||||
.container_field_init => try containerField(mod, &gen_scope.base, tree.containerFieldInit(member)),
|
||||
.container_field_align => try containerField(mod, &gen_scope.base, tree.containerFieldAlign(member)),
|
||||
.container_field => try containerField(mod, &gen_scope.base, tree.containerField(member)),
|
||||
else => continue,
|
||||
}
|
||||
};
|
||||
try fields.append(field);
|
||||
}
|
||||
|
||||
var decl_arena = std.heap.ArenaAllocator.init(mod.gpa);
|
||||
@ -1847,7 +1852,7 @@ fn tokenIdentEql(mod: *Module, scope: *Scope, token1: ast.TokenIndex, token2: as
|
||||
return mem.eql(u8, ident_name_1, ident_name_2);
|
||||
}
|
||||
|
||||
pub fn field(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!*zir.Inst {
|
||||
pub fn fieldAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!*zir.Inst {
|
||||
const tree = scope.tree();
|
||||
const token_starts = tree.tokens.items(.start);
|
||||
const main_tokens = tree.nodes.items(.main_token);
|
||||
@ -3269,7 +3274,7 @@ fn asRlPtr(
|
||||
rl: ResultLoc,
|
||||
src: usize,
|
||||
result_ptr: *zir.Inst,
|
||||
operand_node: *ast.Node,
|
||||
operand_node: ast.Node.Index,
|
||||
dest_type: *zir.Inst,
|
||||
) InnerError!*zir.Inst {
|
||||
// Detect whether this expr() call goes into rvalue() to store the result into the
|
||||
|
||||
@ -550,11 +550,11 @@ pub const File = struct {
|
||||
id_symlink_basename,
|
||||
&prev_digest_buf,
|
||||
) catch |err| b: {
|
||||
log.debug("archive new_digest={} readFile error: {s}", .{ digest, @errorName(err) });
|
||||
log.debug("archive new_digest={x} readFile error: {s}", .{ digest, @errorName(err) });
|
||||
break :b prev_digest_buf[0..0];
|
||||
};
|
||||
if (mem.eql(u8, prev_digest, &digest)) {
|
||||
log.debug("archive digest={} match - skipping invocation", .{digest});
|
||||
log.debug("archive digest={x} match - skipping invocation", .{digest});
|
||||
base.lock = man.toOwnedLock();
|
||||
return;
|
||||
}
|
||||
|
||||
@ -892,17 +892,17 @@ fn linkWithLLD(self: *Coff, comp: *Compilation) !void {
|
||||
id_symlink_basename,
|
||||
&prev_digest_buf,
|
||||
) catch |err| blk: {
|
||||
log.debug("COFF LLD new_digest={} error: {s}", .{ digest, @errorName(err) });
|
||||
log.debug("COFF LLD new_digest={x} error: {s}", .{ digest, @errorName(err) });
|
||||
// Handle this as a cache miss.
|
||||
break :blk prev_digest_buf[0..0];
|
||||
};
|
||||
if (mem.eql(u8, prev_digest, &digest)) {
|
||||
log.debug("COFF LLD digest={} match - skipping invocation", .{digest});
|
||||
log.debug("COFF LLD digest={x} match - skipping invocation", .{digest});
|
||||
// Hot diggity dog! The output binary is already there.
|
||||
self.base.lock = man.toOwnedLock();
|
||||
return;
|
||||
}
|
||||
log.debug("COFF LLD prev_digest={} new_digest={}", .{ prev_digest, digest });
|
||||
log.debug("COFF LLD prev_digest={x} new_digest={x}", .{ prev_digest, digest });
|
||||
|
||||
// We are about to change the output file to be different, so we invalidate the build hash now.
|
||||
directory.handle.deleteFile(id_symlink_basename) catch |err| switch (err) {
|
||||
|
||||
@ -1365,17 +1365,17 @@ fn linkWithLLD(self: *Elf, comp: *Compilation) !void {
|
||||
id_symlink_basename,
|
||||
&prev_digest_buf,
|
||||
) catch |err| blk: {
|
||||
log.debug("ELF LLD new_digest={} error: {s}", .{ digest, @errorName(err) });
|
||||
log.debug("ELF LLD new_digest={x} error: {s}", .{ digest, @errorName(err) });
|
||||
// Handle this as a cache miss.
|
||||
break :blk prev_digest_buf[0..0];
|
||||
};
|
||||
if (mem.eql(u8, prev_digest, &digest)) {
|
||||
log.debug("ELF LLD digest={} match - skipping invocation", .{digest});
|
||||
log.debug("ELF LLD digest={x} match - skipping invocation", .{digest});
|
||||
// Hot diggity dog! The output binary is already there.
|
||||
self.base.lock = man.toOwnedLock();
|
||||
return;
|
||||
}
|
||||
log.debug("ELF LLD prev_digest={} new_digest={}", .{ prev_digest, digest });
|
||||
log.debug("ELF LLD prev_digest={x} new_digest={x}", .{ prev_digest, digest });
|
||||
|
||||
// We are about to change the output file to be different, so we invalidate the build hash now.
|
||||
directory.handle.deleteFile(id_symlink_basename) catch |err| switch (err) {
|
||||
|
||||
@ -556,17 +556,17 @@ fn linkWithLLD(self: *MachO, comp: *Compilation) !void {
|
||||
id_symlink_basename,
|
||||
&prev_digest_buf,
|
||||
) catch |err| blk: {
|
||||
log.debug("MachO LLD new_digest={} error: {s}", .{ digest, @errorName(err) });
|
||||
log.debug("MachO LLD new_digest={x} error: {s}", .{ digest, @errorName(err) });
|
||||
// Handle this as a cache miss.
|
||||
break :blk prev_digest_buf[0..0];
|
||||
};
|
||||
if (mem.eql(u8, prev_digest, &digest)) {
|
||||
log.debug("MachO LLD digest={} match - skipping invocation", .{digest});
|
||||
log.debug("MachO LLD digest={x} match - skipping invocation", .{digest});
|
||||
// Hot diggity dog! The output binary is already there.
|
||||
self.base.lock = man.toOwnedLock();
|
||||
return;
|
||||
}
|
||||
log.debug("MachO LLD prev_digest={} new_digest={}", .{ prev_digest, digest });
|
||||
log.debug("MachO LLD prev_digest={x} new_digest={x}", .{ prev_digest, digest });
|
||||
|
||||
// We are about to change the output file to be different, so we invalidate the build hash now.
|
||||
directory.handle.deleteFile(id_symlink_basename) catch |err| switch (err) {
|
||||
|
||||
@ -333,17 +333,17 @@ fn linkWithLLD(self: *Wasm, comp: *Compilation) !void {
|
||||
id_symlink_basename,
|
||||
&prev_digest_buf,
|
||||
) catch |err| blk: {
|
||||
log.debug("WASM LLD new_digest={} error: {s}", .{ digest, @errorName(err) });
|
||||
log.debug("WASM LLD new_digest={x} error: {s}", .{ digest, @errorName(err) });
|
||||
// Handle this as a cache miss.
|
||||
break :blk prev_digest_buf[0..0];
|
||||
};
|
||||
if (mem.eql(u8, prev_digest, &digest)) {
|
||||
log.debug("WASM LLD digest={} match - skipping invocation", .{digest});
|
||||
log.debug("WASM LLD digest={x} match - skipping invocation", .{digest});
|
||||
// Hot diggity dog! The output binary is already there.
|
||||
self.base.lock = man.toOwnedLock();
|
||||
return;
|
||||
}
|
||||
log.debug("WASM LLD prev_digest={} new_digest={}", .{ prev_digest, digest });
|
||||
log.debug("WASM LLD prev_digest={x} new_digest={x}", .{ prev_digest, digest });
|
||||
|
||||
// We are about to change the output file to be different, so we invalidate the build hash now.
|
||||
directory.handle.deleteFile(id_symlink_basename) catch |err| switch (err) {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user