stage2: rework Type Payload layout

Add `Type.castTag` and note that it is preferable to call than
`Type.cast`. This matches other abstractions in the codebase.

Added a convenience function `Type.Tag.create` which really cleans up
the callsites of creating `Type` objects.

`Type` payloads can now share types. This is in preparation for another
improvement that I want to do.
This commit is contained in:
Andrew Kelley 2020-12-30 19:57:11 -07:00
parent 2622575fde
commit 133da8692e
8 changed files with 432 additions and 403 deletions

View File

@ -825,9 +825,11 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation {
const root_scope = rs: {
if (mem.endsWith(u8, root_pkg.root_src_path, ".zig")) {
const struct_payload = try gpa.create(Type.Payload.EmptyStruct);
const root_scope = try gpa.create(Module.Scope.File);
struct_payload.* = .{ .scope = &root_scope.root_container };
const struct_ty = try Type.Tag.empty_struct.create(
gpa,
&root_scope.root_container,
);
root_scope.* = .{
// TODO this is duped so it can be freed in Container.deinit
.sub_file_path = try gpa.dupe(u8, root_pkg.root_src_path),
@ -838,7 +840,7 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation {
.root_container = .{
.file_scope = root_scope,
.decls = .{},
.ty = Type.initPayload(&struct_payload.base),
.ty = struct_ty,
},
};
break :rs &root_scope.base;

View File

@ -562,7 +562,7 @@ pub const Scope = struct {
pub fn deinit(self: *Container, gpa: *Allocator) void {
self.decls.deinit(gpa);
// TODO either Container of File should have an arena for sub_file_path and ty
gpa.destroy(self.ty.cast(Type.Payload.EmptyStruct).?);
gpa.destroy(self.ty.castTag(.empty_struct).?);
gpa.free(self.file_scope.sub_file_path);
self.* = undefined;
}
@ -2528,12 +2528,11 @@ pub fn analyzeImport(self: *Module, scope: *Scope, src: usize, target_string: []
}
// TODO Scope.Container arena for ty and sub_file_path
const struct_payload = try self.gpa.create(Type.Payload.EmptyStruct);
errdefer self.gpa.destroy(struct_payload);
const file_scope = try self.gpa.create(Scope.File);
errdefer self.gpa.destroy(file_scope);
const struct_ty = try Type.Tag.empty_struct.create(self.gpa, &file_scope.root_container);
errdefer self.gpa.destroy(struct_ty.castTag(.empty_struct).?);
struct_payload.* = .{ .scope = &file_scope.root_container };
file_scope.* = .{
.sub_file_path = resolved_path,
.source = .{ .unloaded = {} },
@ -2543,7 +2542,7 @@ pub fn analyzeImport(self: *Module, scope: *Scope, src: usize, target_string: []
.root_container = .{
.file_scope = file_scope,
.decls = .{},
.ty = Type.initPayload(&struct_payload.base),
.ty = struct_ty,
},
};
self.analyzeContainer(&file_scope.root_container) catch |err| switch (err) {
@ -2564,7 +2563,7 @@ pub fn cmpNumeric(
lhs: *Inst,
rhs: *Inst,
op: std.math.CompareOperator,
) !*Inst {
) InnerError!*Inst {
assert(lhs.ty.isNumeric());
assert(rhs.ty.isNumeric());
@ -2738,15 +2737,14 @@ fn wrapOptional(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*In
}
fn makeIntType(self: *Module, scope: *Scope, signed: bool, bits: u16) !Type {
if (signed) {
const int_payload = try scope.arena().create(Type.Payload.IntSigned);
int_payload.* = .{ .bits = bits };
return Type.initPayload(&int_payload.base);
} else {
const int_payload = try scope.arena().create(Type.Payload.IntUnsigned);
int_payload.* = .{ .bits = bits };
return Type.initPayload(&int_payload.base);
}
const int_payload = try scope.arena().create(Type.Payload.Bits);
int_payload.* = .{
.base = .{
.tag = if (signed) .int_signed else .int_unsigned,
},
.data = bits,
};
return Type.initPayload(&int_payload.base);
}
pub fn resolvePeerTypes(self: *Module, scope: *Scope, instructions: []*Inst) !Type {
@ -2829,7 +2827,7 @@ pub fn coerce(self: *Module, scope: *Scope, dest_type: Type, inst: *Inst) !*Inst
// T to ?T
if (dest_type.zigTypeTag() == .Optional) {
var buf: Type.Payload.PointerSimple = undefined;
var buf: Type.Payload.ElemType = undefined;
const child_type = dest_type.optionalChild(&buf);
if (child_type.eql(inst.ty)) {
return self.wrapOptional(scope, dest_type, inst);
@ -3225,7 +3223,7 @@ pub fn simplePtrType(self: *Module, scope: *Scope, src: usize, elem_ty: Type, mu
// TODO stage1 type inference bug
const T = Type.Tag;
const type_payload = try scope.arena().create(Type.Payload.PointerSimple);
const type_payload = try scope.arena().create(Type.Payload.ElemType);
type_payload.* = .{
.base = .{
.tag = switch (size) {
@ -3235,7 +3233,7 @@ pub fn simplePtrType(self: *Module, scope: *Scope, src: usize, elem_ty: Type, mu
.Slice => if (mutable) T.mut_slice else T.const_slice,
},
},
.pointee_type = elem_ty,
.data = elem_ty,
};
return Type.initPayload(&type_payload.base);
}
@ -3257,8 +3255,7 @@ pub fn ptrType(
assert(host_size == 0 or bit_offset < host_size * 8);
// TODO check if type can be represented by simplePtrType
const type_payload = try scope.arena().create(Type.Payload.Pointer);
type_payload.* = .{
return Type.Tag.pointer.create(scope.arena(), .{
.pointee_type = elem_ty,
.sentinel = sentinel,
.@"align" = @"align",
@ -3268,95 +3265,73 @@ pub fn ptrType(
.mutable = mutable,
.@"volatile" = @"volatile",
.size = size,
};
return Type.initPayload(&type_payload.base);
}
pub fn optionalType(self: *Module, scope: *Scope, child_type: Type) Allocator.Error!Type {
return Type.initPayload(switch (child_type.tag()) {
.single_const_pointer => blk: {
const payload = try scope.arena().create(Type.Payload.PointerSimple);
payload.* = .{
.base = .{ .tag = .optional_single_const_pointer },
.pointee_type = child_type.elemType(),
};
break :blk &payload.base;
},
.single_mut_pointer => blk: {
const payload = try scope.arena().create(Type.Payload.PointerSimple);
payload.* = .{
.base = .{ .tag = .optional_single_mut_pointer },
.pointee_type = child_type.elemType(),
};
break :blk &payload.base;
},
else => blk: {
const payload = try scope.arena().create(Type.Payload.Optional);
payload.* = .{
.child_type = child_type,
};
break :blk &payload.base;
},
});
}
pub fn arrayType(self: *Module, scope: *Scope, len: u64, sentinel: ?Value, elem_type: Type) Allocator.Error!Type {
pub fn optionalType(self: *Module, scope: *Scope, child_type: Type) Allocator.Error!Type {
switch (child_type.tag()) {
.single_const_pointer => return Type.Tag.optional_single_const_pointer.create(
scope.arena(),
child_type.elemType(),
),
.single_mut_pointer => return Type.Tag.optional_single_mut_pointer.create(
scope.arena(),
child_type.elemType(),
),
else => return Type.Tag.optional.create(scope.arena(), child_type),
}
}
pub fn arrayType(
self: *Module,
scope: *Scope,
len: u64,
sentinel: ?Value,
elem_type: Type,
) Allocator.Error!Type {
if (elem_type.eql(Type.initTag(.u8))) {
if (sentinel) |some| {
if (some.eql(Value.initTag(.zero))) {
const payload = try scope.arena().create(Type.Payload.Array_u8_Sentinel0);
payload.* = .{
.len = len,
};
return Type.initPayload(&payload.base);
return Type.Tag.array_u8_sentinel_0.create(scope.arena(), len);
}
} else {
const payload = try scope.arena().create(Type.Payload.Array_u8);
payload.* = .{
.len = len,
};
return Type.initPayload(&payload.base);
return Type.Tag.array_u8.create(scope.arena(), len);
}
}
if (sentinel) |some| {
const payload = try scope.arena().create(Type.Payload.ArraySentinel);
payload.* = .{
return Type.Tag.array_sentinel.create(scope.arena(), .{
.len = len,
.sentinel = some,
.elem_type = elem_type,
};
return Type.initPayload(&payload.base);
});
}
const payload = try scope.arena().create(Type.Payload.Array);
payload.* = .{
return Type.Tag.array.create(scope.arena(), .{
.len = len,
.elem_type = elem_type,
};
return Type.initPayload(&payload.base);
});
}
pub fn errorUnionType(self: *Module, scope: *Scope, error_set: Type, payload: Type) Allocator.Error!Type {
pub fn errorUnionType(
self: *Module,
scope: *Scope,
error_set: Type,
payload: Type,
) Allocator.Error!Type {
assert(error_set.zigTypeTag() == .ErrorSet);
if (error_set.eql(Type.initTag(.anyerror)) and payload.eql(Type.initTag(.void))) {
return Type.initTag(.anyerror_void_error_union);
}
const result = try scope.arena().create(Type.Payload.ErrorUnion);
result.* = .{
return Type.Tag.error_union.create(scope.arena(), .{
.error_set = error_set,
.payload = payload,
};
return Type.initPayload(&result.base);
});
}
pub fn anyframeType(self: *Module, scope: *Scope, return_type: Type) Allocator.Error!Type {
const result = try scope.arena().create(Type.Payload.AnyFrame);
result.* = .{
.return_type = return_type,
};
return Type.initPayload(&result.base);
return Type.Tag.anyframe_T.create(scope.arena(), return_type);
}
pub fn dumpInst(self: *Module, scope: *Scope, inst: *Inst) void {

View File

@ -2723,7 +2723,7 @@ fn rlWrap(mod: *Module, scope: *Scope, rl: ResultLoc, result: *zir.Inst) InnerEr
return mod.fail(scope, result.src, "TODO implement rlWrap .bitcasted_ptr", .{});
},
.inferred_ptr => |alloc| {
return mod.fail(scope, result.src, "TODO implement rlWrap .inferred_ptr", .{});
return addZIRBinOp(mod, scope, result.src, .store, &alloc.base, result);
},
.block_ptr => |block_ptr| {
return mod.fail(scope, result.src, "TODO implement rlWrap .block_ptr", .{});

View File

@ -3262,7 +3262,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
if (typed_value.val.isNull())
return MCValue{ .immediate = 0 };
var buf: Type.Payload.PointerSimple = undefined;
var buf: Type.Payload.ElemType = undefined;
return self.genTypedValue(src, .{
.ty = typed_value.ty.optionalChild(&buf),
.val = typed_value.val,

File diff suppressed because it is too large Load Diff

View File

@ -440,21 +440,18 @@ pub const Value = extern union {
.int_type => {
const payload = self.cast(Payload.IntType).?;
if (payload.signed) {
const new = try allocator.create(Type.Payload.IntSigned);
new.* = .{ .bits = payload.bits };
return Type.initPayload(&new.base);
} else {
const new = try allocator.create(Type.Payload.IntUnsigned);
new.* = .{ .bits = payload.bits };
return Type.initPayload(&new.base);
}
const new = try allocator.create(Type.Payload.Bits);
new.* = .{
.base = .{
.tag = if (payload.signed) .int_signed else .int_unsigned,
},
.data = payload.bits,
};
return Type.initPayload(&new.base);
},
.error_set => {
const payload = self.cast(Payload.ErrorSet).?;
const new = try allocator.create(Type.Payload.ErrorSet);
new.* = .{ .decl = payload.decl };
return Type.initPayload(&new.base);
return Type.Tag.error_set.create(allocator, payload.decl);
},
.undef,
@ -1321,13 +1318,13 @@ pub const Value = extern union {
},
.int_type => {
const payload = self.cast(Payload.IntType).?;
if (payload.signed) {
var new = Type.Payload.IntSigned{ .bits = payload.bits };
return Type.initPayload(&new.base).hash();
} else {
var new = Type.Payload.IntUnsigned{ .bits = payload.bits };
return Type.initPayload(&new.base).hash();
}
var int_payload = Type.Payload.Bits{
.base = .{
.tag = if (payload.signed) .int_signed else .int_unsigned,
},
.data = payload.bits,
};
return Type.initPayload(&int_payload.base).hash();
},
.empty_struct_value,

View File

@ -2785,7 +2785,7 @@ const EmitZIR = struct {
}
},
.Optional => {
var buf: Type.Payload.PointerSimple = undefined;
var buf: Type.Payload.ElemType = undefined;
const inst = try self.arena.allocator.create(Inst.UnOp);
inst.* = .{
.base = .{

View File

@ -480,14 +480,11 @@ fn analyzeInstStr(mod: *Module, scope: *Scope, str_inst: *zir.Inst.Str) InnerErr
errdefer new_decl_arena.deinit();
const arena_bytes = try new_decl_arena.allocator.dupe(u8, str_inst.positionals.bytes);
const ty_payload = try scope.arena().create(Type.Payload.Array_u8_Sentinel0);
ty_payload.* = .{ .len = arena_bytes.len };
const bytes_payload = try scope.arena().create(Value.Payload.Bytes);
bytes_payload.* = .{ .data = arena_bytes };
const new_decl = try mod.createAnonymousDecl(scope, &new_decl_arena, .{
.ty = Type.initPayload(&ty_payload.base),
.ty = try Type.Tag.array_u8_sentinel_0.create(scope.arena(), arena_bytes.len),
.val = Value.initPayload(&bytes_payload.base),
});
return mod.analyzeDeclRef(scope, str_inst.base.src, new_decl);
@ -952,13 +949,12 @@ fn analyzeInstFnType(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnType) Inne
param_types[i] = resolved;
}
const payload = try arena.create(Type.Payload.Function);
payload.* = .{
const fn_ty = try Type.Tag.function.create(arena, .{
.cc = fntype.kw_args.cc,
.return_type = return_type,
.param_types = param_types,
};
return mod.constType(scope, fntype.base.src, Type.initPayload(&payload.base));
});
return mod.constType(scope, fntype.base.src, fn_ty);
}
fn analyzeInstPrimitive(mod: *Module, scope: *Scope, primitive: *zir.Inst.Primitive) InnerError!*Inst {
@ -1062,11 +1058,10 @@ fn analyzeInstFieldPtr(mod: *Module, scope: *Scope, fieldptr: *zir.Inst.FieldPtr
const ref_payload = try scope.arena().create(Value.Payload.RefVal);
ref_payload.* = .{ .val = Value.initPayload(&error_payload.base) };
const result_type = if (child_type.tag() == .anyerror) blk: {
const result_payload = try scope.arena().create(Type.Payload.ErrorSetSingle);
result_payload.* = .{ .name = entry.key };
break :blk Type.initPayload(&result_payload.base);
} else child_type;
const result_type = if (child_type.tag() == .anyerror)
try Type.Tag.error_set_single.create(scope.arena(), entry.key)
else
child_type;
return mod.constInst(scope, fieldptr.base.src, .{
.ty = try mod.simplePtrType(scope, fieldptr.base.src, result_type, false, .One),
@ -1195,15 +1190,10 @@ fn analyzeInstElemPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.ElemPtr) Inne
// @intCast here because it would have been impossible to construct a value that
// required a larger index.
const elem_ptr = try array_ptr_val.elemPtr(scope.arena(), @intCast(usize, index_u64));
const type_payload = try scope.arena().create(Type.Payload.PointerSimple);
type_payload.* = .{
.base = .{ .tag = .single_const_pointer },
.pointee_type = elem_ty.elemType().elemType(),
};
const pointee_type = elem_ty.elemType().elemType();
return mod.constInst(scope, inst.base.src, .{
.ty = Type.initPayload(&type_payload.base),
.ty = try Type.Tag.single_const_pointer.create(scope.arena(), pointee_type),
.val = elem_ptr,
});
}