stage2: fix error sets

This commit is contained in:
Andrew Kelley 2021-03-28 19:38:19 -07:00
parent 0005b34637
commit 8f469c1127
7 changed files with 218 additions and 237 deletions

View File

@ -197,7 +197,7 @@ pub fn typeExpr(gz: *GenZir, scope: *Scope, type_node: ast.Node.Index) InnerErro
}
fn lvalExpr(gz: *GenZir, scope: *Scope, node: ast.Node.Index) InnerError!zir.Inst.Ref {
const tree = scope.tree();
const tree = gz.tree();
const node_tags = tree.nodes.items(.tag);
const main_tokens = tree.nodes.items(.main_token);
switch (node_tags[node]) {
@ -392,7 +392,7 @@ fn lvalExpr(gz: *GenZir, scope: *Scope, node: ast.Node.Index) InnerError!zir.Ins
/// it must otherwise not be used.
pub fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!zir.Inst.Ref {
const mod = gz.astgen.mod;
const tree = scope.tree();
const tree = gz.tree();
const main_tokens = tree.nodes.items(.main_token);
const token_tags = tree.tokens.items(.tag);
const node_datas = tree.nodes.items(.data);
@ -925,7 +925,7 @@ pub fn blockExpr(
const tracy = trace(@src());
defer tracy.end();
const tree = scope.tree();
const tree = gz.tree();
const main_tokens = tree.nodes.items(.main_token);
const token_tags = tree.tokens.items(.tag);
@ -996,7 +996,7 @@ fn labeledBlockExpr(
assert(zir_tag == .block);
const mod = gz.astgen.mod;
const tree = parent_scope.tree();
const tree = gz.tree();
const main_tokens = tree.nodes.items(.main_token);
const token_tags = tree.tokens.items(.tag);
@ -1074,7 +1074,7 @@ fn blockExprStmts(
node: ast.Node.Index,
statements: []const ast.Node.Index,
) !void {
const tree = parent_scope.tree();
const tree = gz.tree();
const main_tokens = tree.nodes.items(.main_token);
const node_tags = tree.nodes.items(.tag);
@ -1235,7 +1235,6 @@ fn blockExprStmts(
.merge_error_sets,
.error_union_type,
.bit_not,
.error_set,
.error_value,
.error_to_int,
.int_to_error,
@ -1305,7 +1304,7 @@ fn varDecl(
return mod.failNode(scope, var_decl.ast.align_node, "TODO implement alignment on locals", .{});
}
const astgen = gz.astgen;
const tree = scope.tree();
const tree = gz.tree();
const token_tags = tree.tokens.items(.tag);
const name_token = var_decl.ast.mut_token + 1;
@ -1365,7 +1364,7 @@ fn varDecl(
// Depending on the type of AST the initialization expression is, we may need an lvalue
// or an rvalue as a result location. If it is an rvalue, we can use the instruction as
// the variable, no memory location needed.
if (!nodeMayNeedMemoryLocation(scope, var_decl.ast.init_node)) {
if (!nodeMayNeedMemoryLocation(tree, var_decl.ast.init_node)) {
const result_loc: ResultLoc = if (var_decl.ast.type_node != 0) .{
.ty = try typeExpr(gz, scope, var_decl.ast.type_node),
} else .none;
@ -1502,7 +1501,7 @@ fn varDecl(
}
fn assign(gz: *GenZir, scope: *Scope, infix_node: ast.Node.Index) InnerError!void {
const tree = scope.tree();
const tree = gz.tree();
const node_datas = tree.nodes.items(.data);
const main_tokens = tree.nodes.items(.main_token);
const node_tags = tree.nodes.items(.tag);
@ -1527,7 +1526,7 @@ fn assignOp(
infix_node: ast.Node.Index,
op_inst_tag: zir.Inst.Tag,
) InnerError!void {
const tree = scope.tree();
const tree = gz.tree();
const node_datas = tree.nodes.items(.data);
const lhs_ptr = try lvalExpr(gz, scope, node_datas[infix_node].lhs);
@ -1543,7 +1542,7 @@ fn assignOp(
}
fn boolNot(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!zir.Inst.Ref {
const tree = scope.tree();
const tree = gz.tree();
const node_datas = tree.nodes.items(.data);
const operand = try expr(gz, scope, .{ .ty = .bool_type }, node_datas[node].lhs);
@ -1552,7 +1551,7 @@ fn boolNot(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) Inne
}
fn bitNot(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!zir.Inst.Ref {
const tree = scope.tree();
const tree = gz.tree();
const node_datas = tree.nodes.items(.data);
const operand = try expr(gz, scope, .none, node_datas[node].lhs);
@ -1567,7 +1566,7 @@ fn negation(
node: ast.Node.Index,
tag: zir.Inst.Tag,
) InnerError!zir.Inst.Ref {
const tree = scope.tree();
const tree = gz.tree();
const node_datas = tree.nodes.items(.data);
const operand = try expr(gz, scope, .none, node_datas[node].lhs);
@ -1582,7 +1581,7 @@ fn ptrType(
node: ast.Node.Index,
ptr_info: ast.full.PtrType,
) InnerError!zir.Inst.Ref {
const tree = scope.tree();
const tree = gz.tree();
const elem_type = try typeExpr(gz, scope, ptr_info.ast.child_type);
@ -1664,7 +1663,7 @@ fn ptrType(
}
fn arrayType(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !zir.Inst.Ref {
const tree = scope.tree();
const tree = gz.tree();
const node_datas = tree.nodes.items(.data);
// TODO check for [_]T
@ -1676,7 +1675,7 @@ fn arrayType(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !z
}
fn arrayTypeSentinel(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !zir.Inst.Ref {
const tree = scope.tree();
const tree = gz.tree();
const node_datas = tree.nodes.items(.data);
const extra = tree.extraData(node_datas[node].rhs, ast.Node.ArrayTypeSentinel);
@ -1704,10 +1703,11 @@ fn errorSetDecl(
rl: ResultLoc,
node: ast.Node.Index,
) InnerError!zir.Inst.Ref {
if (true) @panic("TODO update for zir-memory-layout branch");
const mod = gz.astgen.mod;
const tree = gz.tree();
const main_tokens = tree.nodes.items(.main_token);
const token_tags = tree.tokens.items(.tag);
const arena = gz.astgen.arena;
// Count how many fields there are.
const error_token = main_tokens[node];
@ -1724,7 +1724,7 @@ fn errorSetDecl(
} else unreachable; // TODO should not need else unreachable here
};
const fields = try scope.arena().alloc([]const u8, count);
const fields = try arena.alloc([]const u8, count);
{
var tok_i = error_token + 2;
var field_i: usize = 0;
@ -1740,8 +1740,21 @@ fn errorSetDecl(
}
}
}
const result = try addZIRInst(mod, scope, src, zir.Inst.ErrorSet, .{ .fields = fields }, .{});
return rvalue(gz, scope, rl, result);
const error_set = try arena.create(Module.ErrorSet);
error_set.* = .{
.owner_decl = gz.astgen.decl,
.node_offset = gz.astgen.decl.nodeIndexToRelative(node),
.names_ptr = fields.ptr,
.names_len = @intCast(u32, fields.len),
};
const error_set_ty = try Type.Tag.error_set.create(arena, error_set);
const typed_value = try arena.create(TypedValue);
typed_value.* = .{
.ty = Type.initTag(.type),
.val = try Value.Tag.ty.create(arena, error_set_ty),
};
const result = try gz.addConst(typed_value);
return rvalue(gz, scope, rl, result, node);
}
fn orelseCatchExpr(
@ -2518,13 +2531,12 @@ fn getRangeNode(
}
fn switchExpr(
gz: *GenZir,
parent_gz: *GenZir,
scope: *Scope,
rl: ResultLoc,
switch_node: ast.Node.Index,
) InnerError!zir.Inst.Ref {
if (true) @panic("TODO update for zir-memory-layout");
const parent_gz = scope.getGenZir();
const tree = parent_gz.tree();
const node_datas = tree.nodes.items(.data);
const main_tokens = tree.nodes.items(.main_token);
@ -2541,7 +2553,7 @@ fn switchExpr(
var block_scope: GenZir = .{
.parent = scope,
.decl = scope.ownerDecl().?,
.arena = scope.arena(),
.arena = parent_gz.astgen.arena,
.force_comptime = parent_gz.force_comptime,
.instructions = .{},
};
@ -2727,7 +2739,7 @@ fn switchExpr(
cases[case_index] = .{
.item = item,
.body = .{ .instructions = try scope.arena().dupe(zir.Inst.Ref, case_scope.instructions.items) },
.body = .{ .instructions = try parent_gz.astgen.arena.dupe(zir.Inst.Ref, case_scope.instructions.items) },
};
case_index += 1;
continue;
@ -2774,14 +2786,14 @@ fn switchExpr(
.else_body = undefined, // populated below
}, .{});
const cond_block = try addZIRInstBlock(mod, &else_scope.base, case_src, .block, .{
.instructions = try scope.arena().dupe(zir.Inst.Ref, case_scope.instructions.items),
.instructions = try parent_gz.astgen.arena.dupe(zir.Inst.Ref, case_scope.instructions.items),
});
// reset cond_scope for then_body
case_scope.instructions.items.len = 0;
try switchCaseExpr(mod, &case_scope.base, block_scope.break_result_loc, block, case, target);
condbr.positionals.then_body = .{
.instructions = try scope.arena().dupe(zir.Inst.Ref, case_scope.instructions.items),
.instructions = try parent_gz.astgen.arena.dupe(zir.Inst.Ref, case_scope.instructions.items),
};
// reset cond_scope for else_body
@ -2790,7 +2802,7 @@ fn switchExpr(
.block = cond_block,
}, .{});
condbr.positionals.else_body = .{
.instructions = try scope.arena().dupe(zir.Inst.Ref, case_scope.instructions.items),
.instructions = try parent_gz.astgen.arena.dupe(zir.Inst.Ref, case_scope.instructions.items),
};
}
@ -2816,7 +2828,7 @@ fn switchCaseExpr(
case: ast.full.SwitchCase,
target: zir.Inst.Ref,
) !void {
const tree = scope.tree();
const tree = gz.tree();
const node_datas = tree.nodes.items(.data);
const main_tokens = tree.nodes.items(.main_token);
const token_tags = tree.tokens.items(.tag);
@ -2849,13 +2861,13 @@ fn switchCaseExpr(
}
fn ret(gz: *GenZir, scope: *Scope, node: ast.Node.Index) InnerError!zir.Inst.Ref {
const tree = scope.tree();
const tree = gz.tree();
const node_datas = tree.nodes.items(.data);
const main_tokens = tree.nodes.items(.main_token);
const operand_node = node_datas[node].lhs;
const operand: zir.Inst.Ref = if (operand_node != 0) operand: {
const rl: ResultLoc = if (nodeMayNeedMemoryLocation(scope, operand_node)) .{
const rl: ResultLoc = if (nodeMayNeedMemoryLocation(tree, operand_node)) .{
.ptr = try gz.addNode(.ret_ptr, node),
} else .{
.ty = try gz.addNode(.ret_type, node),
@ -2876,7 +2888,7 @@ fn identifier(
defer tracy.end();
const mod = gz.astgen.mod;
const tree = scope.tree();
const tree = gz.tree();
const main_tokens = tree.nodes.items(.main_token);
const ident_token = main_tokens[ident];
@ -2961,7 +2973,7 @@ fn stringLiteral(
rl: ResultLoc,
node: ast.Node.Index,
) InnerError!zir.Inst.Ref {
const tree = scope.tree();
const tree = gz.tree();
const main_tokens = tree.nodes.items(.main_token);
const string_bytes = &gz.astgen.string_bytes;
const str_index = string_bytes.items.len;
@ -3048,7 +3060,7 @@ fn integerLiteral(
rl: ResultLoc,
node: ast.Node.Index,
) InnerError!zir.Inst.Ref {
const tree = scope.tree();
const tree = gz.tree();
const main_tokens = tree.nodes.items(.main_token);
const int_token = main_tokens[node];
const prefixed_bytes = tree.tokenSlice(int_token);
@ -3070,8 +3082,8 @@ fn floatLiteral(
rl: ResultLoc,
node: ast.Node.Index,
) InnerError!zir.Inst.Ref {
const arena = scope.arena();
const tree = scope.tree();
const arena = gz.astgen.arena;
const tree = gz.tree();
const main_tokens = tree.nodes.items(.main_token);
const main_token = main_tokens[node];
@ -3088,10 +3100,7 @@ fn floatLiteral(
.ty = Type.initTag(.comptime_float),
.val = try Value.Tag.float_128.create(arena, float_number),
};
const result = try gz.add(.{
.tag = .@"const",
.data = .{ .@"const" = typed_value },
});
const result = try gz.addConst(typed_value);
return rvalue(gz, scope, rl, result, node);
}
@ -3103,8 +3112,8 @@ fn asmExpr(
full: ast.full.Asm,
) InnerError!zir.Inst.Ref {
const mod = gz.astgen.mod;
const arena = scope.arena();
const tree = scope.tree();
const arena = gz.astgen.arena;
const tree = gz.tree();
const main_tokens = tree.nodes.items(.main_token);
const node_datas = tree.nodes.items(.data);
@ -3289,7 +3298,7 @@ fn typeOf(
const result = try gz.addUnTok(.typeof, try expr(gz, scope, .none, params[0]), node);
return rvalue(gz, scope, rl, result, node);
}
const arena = scope.arena();
const arena = gz.astgen.arena;
var items = try arena.alloc(zir.Inst.Ref, params.len);
for (params) |param, param_i| {
items[param_i] = try expr(gz, scope, .none, param);
@ -3311,7 +3320,7 @@ fn builtinCall(
params: []const ast.Node.Index,
) InnerError!zir.Inst.Ref {
const mod = gz.astgen.mod;
const tree = scope.tree();
const tree = gz.tree();
const main_tokens = tree.nodes.items(.main_token);
const builtin_token = main_tokens[node];
@ -3608,8 +3617,7 @@ pub const simple_types = std.ComptimeStringMap(zir.Inst.Ref, .{
.{ "false", .bool_false },
});
fn nodeMayNeedMemoryLocation(scope: *Scope, start_node: ast.Node.Index) bool {
const tree = scope.tree();
fn nodeMayNeedMemoryLocation(tree: *const ast.Tree, start_node: ast.Node.Index) bool {
const node_tags = tree.nodes.items(.tag);
const node_datas = tree.nodes.items(.data);
const main_tokens = tree.nodes.items(.main_token);
@ -3842,7 +3850,7 @@ fn rvalue(
},
.ref => {
// We need a pointer but we have a value.
const tree = scope.tree();
const tree = gz.tree();
const src_token = tree.firstToken(src_node);
return gz.addUnTok(.ref, result, src_token);
},

View File

@ -78,9 +78,11 @@ next_anon_name_index: usize = 0,
deletion_set: ArrayListUnmanaged(*Decl) = .{},
/// Error tags and their values, tag names are duped with mod.gpa.
global_error_set: std.StringHashMapUnmanaged(u16) = .{},
/// Corresponds with `error_name_list`.
global_error_set: std.StringHashMapUnmanaged(ErrorInt) = .{},
/// error u16 -> []const u8 for fast lookups for @intToError at comptime
/// ErrorInt -> []const u8 for fast lookups for @intToError at comptime
/// Corresponds with `global_error_set`.
error_name_list: ArrayListUnmanaged([]const u8) = .{},
/// Keys are fully qualified paths
@ -108,6 +110,8 @@ emit_h: ?Compilation.EmitLoc,
compile_log_text: ArrayListUnmanaged(u8) = .{},
pub const ErrorInt = u32;
pub const Export = struct {
options: std.builtin.ExportOptions,
src: LazySrcLoc,
@ -341,6 +345,17 @@ pub const EmitH = struct {
fwd_decl: ArrayListUnmanaged(u8) = .{},
};
/// Represents the data that an explicit error set syntax provides.
pub const ErrorSet = struct {
owner_decl: *Decl,
/// Offset from Decl node index, points to the error set AST node.
node_offset: i32,
names_len: u32,
/// The string bytes are stored in the owner Decl arena.
/// They are in the same order they appear in the AST.
names_ptr: [*]const []const u8,
};
/// Some Fn struct memory is owned by the Decl's TypedValue.Managed arena allocator.
/// Extern functions do not have this data structure; they are represented by
/// the `Decl` only, with a `Value` tag of `extern_fn`.
@ -1363,6 +1378,13 @@ pub const Scope = struct {
return new_index;
}
pub fn addConst(gz: *GenZir, typed_value: *TypedValue) !zir.Inst.Ref {
return gz.add(.{
.tag = .@"const",
.data = .{ .@"const" = typed_value },
});
}
pub fn add(gz: *GenZir, inst: zir.Inst) !zir.Inst.Ref {
return gz.astgen.indexToRef(try gz.addAsIndex(inst));
}
@ -3362,7 +3384,7 @@ fn createNewDecl(
}
/// Get error value for error tag `name`.
pub fn getErrorValue(mod: *Module, name: []const u8) !std.StringHashMapUnmanaged(u16).Entry {
pub fn getErrorValue(mod: *Module, name: []const u8) !std.StringHashMapUnmanaged(ErrorInt).Entry {
const gop = try mod.global_error_set.getOrPut(mod.gpa, name);
if (gop.found_existing)
return gop.entry.*;
@ -3370,7 +3392,7 @@ pub fn getErrorValue(mod: *Module, name: []const u8) !std.StringHashMapUnmanaged
errdefer mod.global_error_set.removeAssertDiscard(name);
try mod.error_name_list.ensureCapacity(mod.gpa, mod.error_name_list.items.len + 1);
gop.entry.key = try mod.gpa.dupe(u8, name);
gop.entry.value = @intCast(u16, mod.error_name_list.items.len);
gop.entry.value = @intCast(ErrorInt, mod.error_name_list.items.len);
mod.error_name_list.appendAssumeCapacity(gop.entry.key);
return gop.entry.*;
}
@ -3580,9 +3602,9 @@ pub fn createAnonymousDecl(
new_decl.analysis = .complete;
new_decl.generation = mod.generation;
// TODO: This generates the Decl into the machine code file if it is of a type that is non-zero size.
// We should be able to further improve the compiler to not omit Decls which are only referenced at
// compile-time and not runtime.
// TODO: This generates the Decl into the machine code file if it is of a
// type that is non-zero size. We should be able to further improve the
// compiler to omit Decls which are only referenced at compile-time and not runtime.
if (typed_value.ty.hasCodeGenBits()) {
try mod.comp.bin_file.allocateDeclIndexes(new_decl);
try mod.comp.work_queue.writeItem(.{ .codegen_decl = new_decl });

View File

@ -174,7 +174,6 @@ pub fn analyzeBody(
.err_union_payload_safe_ptr => try sema.zirErrUnionPayloadPtr(block, inst, true),
.err_union_payload_unsafe => try sema.zirErrUnionPayload(block, inst, false),
.err_union_payload_unsafe_ptr => try sema.zirErrUnionPayloadPtr(block, inst, false),
.error_set => try sema.zirErrorSet(block, inst),
.error_union_type => try sema.zirErrorUnionType(block, inst),
.error_value => try sema.zirErrorValue(block, inst),
.error_to_int => try sema.zirErrorToInt(block, inst),
@ -1409,41 +1408,6 @@ fn zirErrorUnionType(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) Inn
return sema.mod.constType(sema.arena, src, err_union_ty);
}
fn zirErrorSet(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
if (true) @panic("TODO update for zir-memory-layout branch");
// The owner Decl arena will store the hashmap.
var new_decl_arena = std.heap.ArenaAllocator.init(sema.gpa);
errdefer new_decl_arena.deinit();
const payload = try new_decl_arena.allocator.create(Value.Payload.ErrorSet);
payload.* = .{
.base = .{ .tag = .error_set },
.data = .{
.fields = .{},
.decl = undefined, // populated below
},
};
try payload.data.fields.ensureCapacity(&new_decl_arena.allocator, @intCast(u32, inst.positionals.fields.len));
for (inst.positionals.fields) |field_name| {
const entry = try sema.mod.getErrorValue(field_name);
if (payload.data.fields.fetchPutAssumeCapacity(entry.key, {})) |_| {
return sema.mod.fail(&block.base, inst.base.src, "duplicate error: '{s}'", .{field_name});
}
}
// TODO create name in format "error:line:column"
const new_decl = try sema.mod.createAnonymousDecl(&block.base, &new_decl_arena, .{
.ty = Type.initTag(.type),
.val = Value.initPayload(&payload.base),
});
payload.data.decl = new_decl;
return sema.analyzeDeclVal(block, inst.base.src, new_decl);
}
fn zirErrorValue(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@ -1537,71 +1501,67 @@ fn zirMergeErrorSets(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) Inn
if (lhs_ty.zigTypeTag() != .ErrorSet)
return sema.mod.fail(&block.base, lhs_src, "expected error set type, found {}", .{lhs_ty});
// anything merged with anyerror is anyerror
if (lhs_ty.tag() == .anyerror or rhs_ty.tag() == .anyerror)
// Anything merged with anyerror is anyerror.
if (lhs_ty.tag() == .anyerror or rhs_ty.tag() == .anyerror) {
return sema.mod.constInst(sema.arena, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.anyerror_type),
});
// The declarations arena will store the hashmap.
var new_decl_arena = std.heap.ArenaAllocator.init(sema.gpa);
errdefer new_decl_arena.deinit();
const payload = try new_decl_arena.allocator.create(Value.Payload.ErrorSet);
payload.* = .{
.base = .{ .tag = .error_set },
.data = .{
.fields = .{},
.decl = undefined, // populated below
},
};
try payload.data.fields.ensureCapacity(&new_decl_arena.allocator, @intCast(u32, switch (rhs_ty.tag()) {
.error_set_single => 1,
.error_set => rhs_ty.castTag(.error_set).?.data.typed_value.most_recent.typed_value.val.castTag(.error_set).?.data.fields.size,
else => unreachable,
} + switch (lhs_ty.tag()) {
.error_set_single => 1,
.error_set => lhs_ty.castTag(.error_set).?.data.typed_value.most_recent.typed_value.val.castTag(.error_set).?.data.fields.size,
else => unreachable,
}));
}
// When we support inferred error sets, we'll want to use a data structure that can
// represent a merged set of errors without forcing them to be resolved here. Until then
// we re-use the same data structure that is used for explicit error set declarations.
var set: std.StringHashMapUnmanaged(void) = .{};
defer set.deinit(sema.gpa);
switch (lhs_ty.tag()) {
.error_set_single => {
const name = lhs_ty.castTag(.error_set_single).?.data;
payload.data.fields.putAssumeCapacity(name, {});
try set.put(sema.gpa, name, {});
},
.error_set => {
var multiple = lhs_ty.castTag(.error_set).?.data.typed_value.most_recent.typed_value.val.castTag(.error_set).?.data.fields;
var it = multiple.iterator();
while (it.next()) |entry| {
payload.data.fields.putAssumeCapacity(entry.key, entry.value);
const lhs_set = lhs_ty.castTag(.error_set).?.data;
try set.ensureCapacity(sema.gpa, set.count() + lhs_set.names_len);
for (lhs_set.names_ptr[0..lhs_set.names_len]) |name| {
set.putAssumeCapacityNoClobber(name, {});
}
},
else => unreachable,
}
switch (rhs_ty.tag()) {
.error_set_single => {
const name = rhs_ty.castTag(.error_set_single).?.data;
payload.data.fields.putAssumeCapacity(name, {});
try set.put(sema.gpa, name, {});
},
.error_set => {
var multiple = rhs_ty.castTag(.error_set).?.data.typed_value.most_recent.typed_value.val.castTag(.error_set).?.data.fields;
var it = multiple.iterator();
while (it.next()) |entry| {
payload.data.fields.putAssumeCapacity(entry.key, entry.value);
const rhs_set = rhs_ty.castTag(.error_set).?.data;
try set.ensureCapacity(sema.gpa, set.count() + rhs_set.names_len);
for (rhs_set.names_ptr[0..rhs_set.names_len]) |name| {
set.putAssumeCapacity(name, {});
}
},
else => unreachable,
}
// TODO create name in format "error:line:column"
const new_decl = try sema.mod.createAnonymousDecl(&block.base, &new_decl_arena, .{
.ty = Type.initTag(.type),
.val = Value.initPayload(&payload.base),
});
payload.data.decl = new_decl;
return sema.analyzeDeclVal(block, src, new_decl);
const new_names = try sema.arena.alloc([]const u8, set.count());
var it = set.iterator();
var i: usize = 0;
while (it.next()) |entry| : (i += 1) {
new_names[i] = entry.key;
}
const new_error_set = try sema.arena.create(Module.ErrorSet);
new_error_set.* = .{
.owner_decl = sema.owner_decl,
.node_offset = inst_data.src_node,
.names_ptr = new_names.ptr,
.names_len = @intCast(u32, new_names.len),
};
const error_set_ty = try Type.Tag.error_set.create(sema.arena, new_error_set);
return sema.mod.constInst(sema.arena, src, .{
.ty = Type.initTag(.type),
.val = try Value.Tag.ty.create(sema.arena, error_set_ty),
});
}
fn zirEnumLiteral(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
@ -3441,20 +3401,25 @@ fn namedFieldPtr(
const child_type = try val.toType(sema.arena);
switch (child_type.zigTypeTag()) {
.ErrorSet => {
var name: []const u8 = undefined;
// TODO resolve inferred error sets
if (val.castTag(.error_set)) |payload|
name = (payload.data.fields.getEntry(field_name) orelse return sema.mod.fail(&block.base, src, "no error named '{s}' in '{}'", .{ field_name, child_type })).key
else
name = (try sema.mod.getErrorValue(field_name)).key;
const result_type = if (child_type.tag() == .anyerror)
try Type.Tag.error_set_single.create(sema.arena, name)
else
child_type;
const name: []const u8 = if (child_type.castTag(.error_set)) |payload| blk: {
const error_set = payload.data;
// TODO this is O(N). I'm putting off solving this until we solve inferred
// error sets at the same time.
const names = error_set.names_ptr[0..error_set.names_len];
for (names) |name| {
if (mem.eql(u8, field_name, name)) {
break :blk name;
}
}
return sema.mod.fail(&block.base, src, "no error named '{s}' in '{}'", .{
field_name,
child_type,
});
} else (try sema.mod.getErrorValue(field_name)).key;
return sema.mod.constInst(sema.arena, src, .{
.ty = try sema.mod.simplePtrType(sema.arena, result_type, false, .One),
.ty = try sema.mod.simplePtrType(sema.arena, child_type, false, .One),
.val = try Value.Tag.ref_val.create(
sema.arena,
try Value.Tag.@"error".create(sema.arena, .{
@ -4201,15 +4166,35 @@ fn wrapErrorUnion(sema: *Sema, block: *Scope.Block, dest_type: Type, inst: *Inst
} else switch (err_union.data.error_set.tag()) {
.anyerror => val,
.error_set_single => blk: {
const expected_name = val.castTag(.@"error").?.data.name;
const n = err_union.data.error_set.castTag(.error_set_single).?.data;
if (!mem.eql(u8, val.castTag(.@"error").?.data.name, n))
return sema.mod.fail(&block.base, inst.src, "expected type '{}', found type '{}'", .{ err_union.data.error_set, inst.ty });
if (!mem.eql(u8, expected_name, n)) {
return sema.mod.fail(
&block.base,
inst.src,
"expected type '{}', found type '{}'",
.{ err_union.data.error_set, inst.ty },
);
}
break :blk val;
},
.error_set => blk: {
const f = err_union.data.error_set.castTag(.error_set).?.data.typed_value.most_recent.typed_value.val.castTag(.error_set).?.data.fields;
if (f.get(val.castTag(.@"error").?.data.name) == null)
return sema.mod.fail(&block.base, inst.src, "expected type '{}', found type '{}'", .{ err_union.data.error_set, inst.ty });
const expected_name = val.castTag(.@"error").?.data.name;
const error_set = err_union.data.error_set.castTag(.error_set).?.data;
const names = error_set.names_ptr[0..error_set.names_len];
// TODO this is O(N). I'm putting off solving this until we solve inferred
// error sets at the same time.
const found = for (names) |name| {
if (mem.eql(u8, expected_name, name)) break true;
} else false;
if (!found) {
return sema.mod.fail(
&block.base,
inst.src,
"expected type '{}', found type '{}'",
.{ err_union.data.error_set, inst.ty },
);
}
break :blk val;
},
else => unreachable,

View File

@ -606,7 +606,7 @@ pub const Type = extern union {
.payload = try payload.payload.copy(allocator),
});
},
.error_set => return self.copyPayloadShallow(allocator, Payload.Decl),
.error_set => return self.copyPayloadShallow(allocator, Payload.ErrorSet),
.error_set_single => return self.copyPayloadShallow(allocator, Payload.Name),
.empty_struct => return self.copyPayloadShallow(allocator, Payload.ContainerScope),
@ -831,8 +831,8 @@ pub const Type = extern union {
continue;
},
.error_set => {
const decl = ty.castTag(.error_set).?.data;
return out_stream.writeAll(std.mem.spanZ(decl.name));
const error_set = ty.castTag(.error_set).?.data;
return out_stream.writeAll(std.mem.spanZ(error_set.owner_decl.name));
},
.error_set_single => {
const name = ty.castTag(.error_set_single).?.data;
@ -3464,7 +3464,7 @@ pub const Type = extern union {
.int_unsigned,
=> Payload.Bits,
.error_set => Payload.Decl,
.error_set => Payload.ErrorSet,
.array => Payload.Array,
.array_sentinel => Payload.ArraySentinel,
@ -3548,6 +3548,13 @@ pub const Type = extern union {
},
};
pub const ErrorSet = struct {
pub const base_tag = Tag.error_set;
base: Payload = Payload{ .tag = base_tag },
data: *Module.ErrorSet,
};
pub const Pointer = struct {
pub const base_tag = Tag.pointer;

View File

@ -102,7 +102,6 @@ pub const Value = extern union {
float_64,
float_128,
enum_literal,
error_set,
@"error",
error_union,
/// This is a special value that tracks a set of types that have been stored
@ -196,7 +195,6 @@ pub const Value = extern union {
.float_32 => Payload.Float_32,
.float_64 => Payload.Float_64,
.float_128 => Payload.Float_128,
.error_set => Payload.ErrorSet,
.@"error" => Payload.Error,
.inferred_alloc => Payload.InferredAlloc,
};
@ -404,7 +402,6 @@ pub const Value = extern union {
return Value{ .ptr_otherwise = &new_payload.base };
},
.error_set => return self.copyPayloadShallow(allocator, Payload.ErrorSet),
.inferred_alloc => unreachable,
}
}
@ -515,15 +512,6 @@ pub const Value = extern union {
.float_32 => return out_stream.print("{}", .{val.castTag(.float_32).?.data}),
.float_64 => return out_stream.print("{}", .{val.castTag(.float_64).?.data}),
.float_128 => return out_stream.print("{}", .{val.castTag(.float_128).?.data}),
.error_set => {
const error_set = val.castTag(.error_set).?.data;
try out_stream.writeAll("error{");
var it = error_set.fields.iterator();
while (it.next()) |entry| {
try out_stream.print("{},", .{entry.value});
}
return out_stream.writeAll("}");
},
.@"error" => return out_stream.print("error.{s}", .{val.castTag(.@"error").?.data.name}),
// TODO to print this it should be error{ Set, Items }!T(val), but we need the type for that
.error_union => return out_stream.print("error_union_val({})", .{val.castTag(.error_union).?.data}),
@ -608,10 +596,6 @@ pub const Value = extern union {
};
return Type.initPayload(&new.base);
},
.error_set => {
const payload = self.castTag(.error_set).?.data;
return Type.Tag.error_set.create(allocator, payload.decl);
},
.undef,
.zero,
@ -711,7 +695,6 @@ pub const Value = extern union {
.unreachable_value,
.empty_array,
.enum_literal,
.error_set,
.error_union,
.@"error",
.empty_struct_value,
@ -799,7 +782,6 @@ pub const Value = extern union {
.unreachable_value,
.empty_array,
.enum_literal,
.error_set,
.@"error",
.error_union,
.empty_struct_value,
@ -887,7 +869,6 @@ pub const Value = extern union {
.unreachable_value,
.empty_array,
.enum_literal,
.error_set,
.@"error",
.error_union,
.empty_struct_value,
@ -1003,7 +984,6 @@ pub const Value = extern union {
.unreachable_value,
.empty_array,
.enum_literal,
.error_set,
.@"error",
.error_union,
.empty_struct_value,
@ -1095,7 +1075,6 @@ pub const Value = extern union {
.unreachable_value,
.empty_array,
.enum_literal,
.error_set,
.@"error",
.error_union,
.empty_struct_value,
@ -1256,7 +1235,6 @@ pub const Value = extern union {
.void_value,
.unreachable_value,
.enum_literal,
.error_set,
.@"error",
.error_union,
.empty_struct_value,
@ -1335,7 +1313,6 @@ pub const Value = extern union {
.unreachable_value,
.empty_array,
.enum_literal,
.error_set,
.@"error",
.error_union,
.empty_struct_value,
@ -1476,15 +1453,10 @@ pub const Value = extern union {
.enum_literal_type,
.ty,
=> {
// Directly return Type.hash, toType can only fail for .int_type and .error_set.
// Directly return Type.hash, toType can only fail for .int_type.
var allocator = std.heap.FixedBufferAllocator.init(&[_]u8{});
return (self.toType(&allocator.allocator) catch unreachable).hash();
},
.error_set => {
// Payload.decl should be same for all instances of the type.
const payload = self.castTag(.error_set).?.data;
std.hash.autoHash(&hasher, payload.decl);
},
.int_type => {
const payload = self.castTag(.int_type).?.data;
var int_payload = Type.Payload.Bits{
@ -1656,7 +1628,6 @@ pub const Value = extern union {
.unreachable_value,
.empty_array,
.enum_literal,
.error_set,
.@"error",
.error_union,
.empty_struct_value,
@ -1744,7 +1715,6 @@ pub const Value = extern union {
.void_value,
.unreachable_value,
.enum_literal,
.error_set,
.@"error",
.error_union,
.empty_struct_value,
@ -1849,7 +1819,6 @@ pub const Value = extern union {
.float_128,
.void_value,
.enum_literal,
.error_set,
.@"error",
.error_union,
.empty_struct_value,
@ -1933,7 +1902,6 @@ pub const Value = extern union {
.float_128,
.void_value,
.enum_literal,
.error_set,
.empty_struct_value,
=> null,
@ -2012,7 +1980,6 @@ pub const Value = extern union {
.single_const_pointer_to_comptime_int_type,
.const_slice_u8_type,
.enum_literal_type,
.error_set,
=> true,
.zero,
@ -2156,18 +2123,6 @@ pub const Value = extern union {
data: f128,
};
/// TODO move to type.zig
pub const ErrorSet = struct {
pub const base_tag = Tag.error_set;
base: Payload = .{ .tag = base_tag },
data: struct {
/// TODO revisit this when we have the concept of the error tag type
fields: std.StringHashMapUnmanaged(void),
decl: *Module.Decl,
},
};
pub const Error = struct {
base: Payload = .{ .tag = .@"error" },
data: struct {

View File

@ -314,11 +314,6 @@ pub const Inst = struct {
/// Create a `E!T` type.
/// Uses the `pl_node` field with `Bin` payload.
error_union_type,
/// Create an error set. TODO can't we just do this in astgen? reconsider
/// memory layout of error sets. if astgen wants to make Sema do the work,
/// this ZIR instruction could just be an AST node index. If astgen wants to
/// do the work, it could use a const instruction.
error_set,
/// `error.Foo` syntax. Uses the `str_tok` field of the Data union.
error_value,
/// Given a pointer to a struct or object that contains virtual fields, returns a pointer
@ -742,7 +737,6 @@ pub const Inst = struct {
.merge_error_sets,
.error_union_type,
.bit_not,
.error_set,
.error_value,
.slice_start,
.slice_end,
@ -1459,8 +1453,6 @@ const Writer = struct {
.asm_volatile,
.elem_ptr_node,
.elem_val_node,
.field_ptr,
.field_val,
.field_ptr_named,
.field_val_named,
.floatcast,
@ -1519,6 +1511,10 @@ const Writer = struct {
.decl_val,
=> try self.writePlNodeDecl(stream, inst),
.field_ptr,
.field_val,
=> try self.writePlNodeField(stream, inst),
.as_node => try self.writeAs(stream, inst),
.breakpoint,
@ -1544,7 +1540,6 @@ const Writer = struct {
.bitcast,
.bitcast_ref,
.bitcast_result_ptr,
.error_set,
.store_to_inferred_ptr,
=> try stream.writeAll("TODO)"),
}
@ -1733,6 +1728,15 @@ const Writer = struct {
try self.writeSrc(stream, inst_data.src());
}
fn writePlNodeField(self: *Writer, stream: anytype, inst: Inst.Index) !void {
const inst_data = self.code.instructions.items(.data)[inst].pl_node;
const extra = self.code.extraData(Inst.Field, inst_data.payload_index).data;
const name = self.code.nullTerminatedString(extra.field_name_start);
try self.writeInstRef(stream, extra.lhs);
try stream.print(", \"{}\") ", .{std.zig.fmtEscapes(name)});
try self.writeSrc(stream, inst_data.src());
}
fn writeAs(self: *Writer, stream: anytype, inst: Inst.Index) !void {
const inst_data = self.code.instructions.items(.data)[inst].pl_node;
const extra = self.code.extraData(Inst.As, inst_data.payload_index).data;

View File

@ -1565,37 +1565,37 @@ pub fn addCases(ctx: *TestContext) !void {
\\}
, "");
}
//{
// var case = ctx.exe("merge error sets", linux_x64);
{
var case = ctx.exe("merge error sets", linux_x64);
// case.addCompareOutput(
// \\export fn _start() noreturn {
// \\ const E = error{ A, B, D } || error { A, B, C };
// \\ const a = E.A;
// \\ const b = E.B;
// \\ const c = E.C;
// \\ const d = E.D;
// \\ const E2 = error { X, Y } || @TypeOf(error.Z);
// \\ const x = E2.X;
// \\ const y = E2.Y;
// \\ const z = E2.Z;
// \\ assert(anyerror || error { Z } == anyerror);
// \\ exit();
// \\}
// \\fn assert(b: bool) void {
// \\ if (!b) unreachable;
// \\}
// \\fn exit() noreturn {
// \\ asm volatile ("syscall"
// \\ :
// \\ : [number] "{rax}" (231),
// \\ [arg1] "{rdi}" (0)
// \\ : "rcx", "r11", "memory"
// \\ );
// \\ unreachable;
// \\}
// ,
// "",
// );
//}
case.addCompareOutput(
\\export fn _start() noreturn {
\\ const E = error{ A, B, D } || error { A, B, C };
\\ const a = E.A;
\\ const b = E.B;
\\ const c = E.C;
\\ const d = E.D;
\\ const E2 = error { X, Y } || @TypeOf(error.Z);
\\ const x = E2.X;
\\ const y = E2.Y;
\\ const z = E2.Z;
\\ assert(anyerror || error { Z } == anyerror);
\\ exit();
\\}
\\fn assert(b: bool) void {
\\ if (!b) unreachable;
\\}
\\fn exit() noreturn {
\\ asm volatile ("syscall"
\\ :
\\ : [number] "{rax}" (231),
\\ [arg1] "{rdi}" (0)
\\ : "rcx", "r11", "memory"
\\ );
\\ unreachable;
\\}
,
"",
);
}
}