mirror of
https://github.com/ziglang/zig.git
synced 2026-01-05 04:53:17 +00:00
Merge pull request #19470 from jacobly0/field-parent-ptr
Rework `@fieldParentPtr` to use RLS
This commit is contained in:
commit
a6ed3e6d29
@ -564,7 +564,7 @@ set(ZIG_STAGE2_SOURCES
|
||||
"${CMAKE_SOURCE_DIR}/src/clang_options_data.zig"
|
||||
"${CMAKE_SOURCE_DIR}/src/codegen.zig"
|
||||
"${CMAKE_SOURCE_DIR}/src/codegen/c.zig"
|
||||
"${CMAKE_SOURCE_DIR}/src/codegen/c/type.zig"
|
||||
"${CMAKE_SOURCE_DIR}/src/codegen/c/Type.zig"
|
||||
"${CMAKE_SOURCE_DIR}/src/codegen/llvm.zig"
|
||||
"${CMAKE_SOURCE_DIR}/src/codegen/llvm/bindings.zig"
|
||||
"${CMAKE_SOURCE_DIR}/src/glibc.zig"
|
||||
|
||||
@ -16,9 +16,7 @@ pub fn build(b: *std.Build) !void {
|
||||
const only_c = b.option(bool, "only-c", "Translate the Zig compiler to C code, with only the C backend enabled") orelse false;
|
||||
const target = t: {
|
||||
var default_target: std.zig.CrossTarget = .{};
|
||||
if (only_c) {
|
||||
default_target.ofmt = .c;
|
||||
}
|
||||
default_target.ofmt = b.option(std.Target.ObjectFormat, "ofmt", "Object format to target") orelse if (only_c) .c else null;
|
||||
break :t b.standardTargetOptions(.{ .default_target = default_target });
|
||||
};
|
||||
|
||||
|
||||
@ -3107,7 +3107,7 @@ test "struct namespaced variable" {
|
||||
// struct field order is determined by the compiler for optimal performance.
|
||||
// however, you can still calculate a struct base pointer given a field pointer:
|
||||
fn setYBasedOnX(x: *f32, y: f32) void {
|
||||
const point = @fieldParentPtr(Point, "x", x);
|
||||
const point: *Point = @fieldParentPtr("x", x);
|
||||
point.y = y;
|
||||
}
|
||||
test "field parent pointer" {
|
||||
@ -8757,8 +8757,7 @@ test "decl access by string" {
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|@fieldParentPtr#}
|
||||
<pre>{#syntax#}@fieldParentPtr(comptime ParentType: type, comptime field_name: []const u8,
|
||||
field_ptr: *T) *ParentType{#endsyntax#}</pre>
|
||||
<pre>{#syntax#}@fieldParentPtr(comptime field_name: []const u8, field_ptr: *T) anytype{#endsyntax#}</pre>
|
||||
<p>
|
||||
Given a pointer to a field, returns the base pointer of a struct.
|
||||
</p>
|
||||
|
||||
12
lib/compiler/aro/aro/pragmas/gcc.zig
vendored
12
lib/compiler/aro/aro/pragmas/gcc.zig
vendored
@ -37,18 +37,18 @@ const Directive = enum {
|
||||
};
|
||||
|
||||
fn beforePreprocess(pragma: *Pragma, comp: *Compilation) void {
|
||||
var self = @fieldParentPtr(GCC, "pragma", pragma);
|
||||
var self: *GCC = @fieldParentPtr("pragma", pragma);
|
||||
self.original_options = comp.diagnostics.options;
|
||||
}
|
||||
|
||||
fn beforeParse(pragma: *Pragma, comp: *Compilation) void {
|
||||
var self = @fieldParentPtr(GCC, "pragma", pragma);
|
||||
var self: *GCC = @fieldParentPtr("pragma", pragma);
|
||||
comp.diagnostics.options = self.original_options;
|
||||
self.options_stack.items.len = 0;
|
||||
}
|
||||
|
||||
fn afterParse(pragma: *Pragma, comp: *Compilation) void {
|
||||
var self = @fieldParentPtr(GCC, "pragma", pragma);
|
||||
var self: *GCC = @fieldParentPtr("pragma", pragma);
|
||||
comp.diagnostics.options = self.original_options;
|
||||
self.options_stack.items.len = 0;
|
||||
}
|
||||
@ -60,7 +60,7 @@ pub fn init(allocator: mem.Allocator) !*Pragma {
|
||||
}
|
||||
|
||||
fn deinit(pragma: *Pragma, comp: *Compilation) void {
|
||||
var self = @fieldParentPtr(GCC, "pragma", pragma);
|
||||
var self: *GCC = @fieldParentPtr("pragma", pragma);
|
||||
self.options_stack.deinit(comp.gpa);
|
||||
comp.gpa.destroy(self);
|
||||
}
|
||||
@ -108,7 +108,7 @@ fn diagnosticHandler(self: *GCC, pp: *Preprocessor, start_idx: TokenIndex) Pragm
|
||||
}
|
||||
|
||||
fn preprocessorHandler(pragma: *Pragma, pp: *Preprocessor, start_idx: TokenIndex) Pragma.Error!void {
|
||||
var self = @fieldParentPtr(GCC, "pragma", pragma);
|
||||
var self: *GCC = @fieldParentPtr("pragma", pragma);
|
||||
const directive_tok = pp.tokens.get(start_idx + 1);
|
||||
if (directive_tok.id == .nl) return;
|
||||
|
||||
@ -174,7 +174,7 @@ fn preprocessorHandler(pragma: *Pragma, pp: *Preprocessor, start_idx: TokenIndex
|
||||
}
|
||||
|
||||
fn parserHandler(pragma: *Pragma, p: *Parser, start_idx: TokenIndex) Compilation.Error!void {
|
||||
var self = @fieldParentPtr(GCC, "pragma", pragma);
|
||||
var self: *GCC = @fieldParentPtr("pragma", pragma);
|
||||
const directive_tok = p.pp.tokens.get(start_idx + 1);
|
||||
if (directive_tok.id == .nl) return;
|
||||
const name = p.pp.expandedSlice(directive_tok);
|
||||
|
||||
2
lib/compiler/aro/aro/pragmas/message.zig
vendored
2
lib/compiler/aro/aro/pragmas/message.zig
vendored
@ -22,7 +22,7 @@ pub fn init(allocator: mem.Allocator) !*Pragma {
|
||||
}
|
||||
|
||||
fn deinit(pragma: *Pragma, comp: *Compilation) void {
|
||||
const self = @fieldParentPtr(Message, "pragma", pragma);
|
||||
const self: *Message = @fieldParentPtr("pragma", pragma);
|
||||
comp.gpa.destroy(self);
|
||||
}
|
||||
|
||||
|
||||
6
lib/compiler/aro/aro/pragmas/once.zig
vendored
6
lib/compiler/aro/aro/pragmas/once.zig
vendored
@ -27,18 +27,18 @@ pub fn init(allocator: mem.Allocator) !*Pragma {
|
||||
}
|
||||
|
||||
fn afterParse(pragma: *Pragma, _: *Compilation) void {
|
||||
var self = @fieldParentPtr(Once, "pragma", pragma);
|
||||
var self: *Once = @fieldParentPtr("pragma", pragma);
|
||||
self.pragma_once.clearRetainingCapacity();
|
||||
}
|
||||
|
||||
fn deinit(pragma: *Pragma, comp: *Compilation) void {
|
||||
var self = @fieldParentPtr(Once, "pragma", pragma);
|
||||
var self: *Once = @fieldParentPtr("pragma", pragma);
|
||||
self.pragma_once.deinit();
|
||||
comp.gpa.destroy(self);
|
||||
}
|
||||
|
||||
fn preprocessorHandler(pragma: *Pragma, pp: *Preprocessor, start_idx: TokenIndex) Pragma.Error!void {
|
||||
var self = @fieldParentPtr(Once, "pragma", pragma);
|
||||
var self: *Once = @fieldParentPtr("pragma", pragma);
|
||||
const name_tok = pp.tokens.get(start_idx);
|
||||
const next = pp.tokens.get(start_idx + 1);
|
||||
if (next.id != .nl) {
|
||||
|
||||
4
lib/compiler/aro/aro/pragmas/pack.zig
vendored
4
lib/compiler/aro/aro/pragmas/pack.zig
vendored
@ -24,13 +24,13 @@ pub fn init(allocator: mem.Allocator) !*Pragma {
|
||||
}
|
||||
|
||||
fn deinit(pragma: *Pragma, comp: *Compilation) void {
|
||||
var self = @fieldParentPtr(Pack, "pragma", pragma);
|
||||
var self: *Pack = @fieldParentPtr("pragma", pragma);
|
||||
self.stack.deinit(comp.gpa);
|
||||
comp.gpa.destroy(self);
|
||||
}
|
||||
|
||||
fn parserHandler(pragma: *Pragma, p: *Parser, start_idx: TokenIndex) Compilation.Error!void {
|
||||
var pack = @fieldParentPtr(Pack, "pragma", pragma);
|
||||
var pack: *Pack = @fieldParentPtr("pragma", pragma);
|
||||
var idx = start_idx + 1;
|
||||
const l_paren = p.pp.tokens.get(idx);
|
||||
if (l_paren.id != .l_paren) {
|
||||
|
||||
10
lib/compiler/aro/backend/Object.zig
vendored
10
lib/compiler/aro/backend/Object.zig
vendored
@ -16,7 +16,7 @@ pub fn create(gpa: Allocator, target: std.Target) !*Object {
|
||||
|
||||
pub fn deinit(obj: *Object) void {
|
||||
switch (obj.format) {
|
||||
.elf => @fieldParentPtr(Elf, "obj", obj).deinit(),
|
||||
.elf => @as(*Elf, @fieldParentPtr("obj", obj)).deinit(),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
@ -32,7 +32,7 @@ pub const Section = union(enum) {
|
||||
|
||||
pub fn getSection(obj: *Object, section: Section) !*std.ArrayList(u8) {
|
||||
switch (obj.format) {
|
||||
.elf => return @fieldParentPtr(Elf, "obj", obj).getSection(section),
|
||||
.elf => return @as(*Elf, @fieldParentPtr("obj", obj)).getSection(section),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
@ -53,21 +53,21 @@ pub fn declareSymbol(
|
||||
size: u64,
|
||||
) ![]const u8 {
|
||||
switch (obj.format) {
|
||||
.elf => return @fieldParentPtr(Elf, "obj", obj).declareSymbol(section, name, linkage, @"type", offset, size),
|
||||
.elf => return @as(*Elf, @fieldParentPtr("obj", obj)).declareSymbol(section, name, linkage, @"type", offset, size),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn addRelocation(obj: *Object, name: []const u8, section: Section, address: u64, addend: i64) !void {
|
||||
switch (obj.format) {
|
||||
.elf => return @fieldParentPtr(Elf, "obj", obj).addRelocation(name, section, address, addend),
|
||||
.elf => return @as(*Elf, @fieldParentPtr("obj", obj)).addRelocation(name, section, address, addend),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finish(obj: *Object, file: std.fs.File) !void {
|
||||
switch (obj.format) {
|
||||
.elf => return @fieldParentPtr(Elf, "obj", obj).finish(file),
|
||||
.elf => return @as(*Elf, @fieldParentPtr("obj", obj)).finish(file),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
@ -1098,13 +1098,13 @@ pub fn ScopeExtra(comptime ScopeExtraContext: type, comptime ScopeExtraType: typ
|
||||
}
|
||||
};
|
||||
|
||||
pub fn findBlockScope(inner: *ScopeExtraScope, c: *ScopeExtraContext) !*ScopeExtraScope.Block {
|
||||
pub fn findBlockScope(inner: *ScopeExtraScope, c: *ScopeExtraContext) !*Block {
|
||||
var scope = inner;
|
||||
while (true) {
|
||||
switch (scope.id) {
|
||||
.root => unreachable,
|
||||
.block => return @fieldParentPtr(Block, "base", scope),
|
||||
.condition => return @fieldParentPtr(Condition, "base", scope).getBlockScope(c),
|
||||
.block => return @fieldParentPtr("base", scope),
|
||||
.condition => return @as(*Condition, @fieldParentPtr("base", scope)).getBlockScope(c),
|
||||
else => scope = scope.parent.?,
|
||||
}
|
||||
}
|
||||
@ -1116,7 +1116,7 @@ pub fn ScopeExtra(comptime ScopeExtraContext: type, comptime ScopeExtraType: typ
|
||||
switch (scope.id) {
|
||||
.root => unreachable,
|
||||
.block => {
|
||||
const block = @fieldParentPtr(Block, "base", scope);
|
||||
const block: *Block = @fieldParentPtr("base", scope);
|
||||
if (block.return_type) |ty| return ty;
|
||||
scope = scope.parent.?;
|
||||
},
|
||||
@ -1128,15 +1128,15 @@ pub fn ScopeExtra(comptime ScopeExtraContext: type, comptime ScopeExtraType: typ
|
||||
pub fn getAlias(scope: *ScopeExtraScope, name: []const u8) []const u8 {
|
||||
return switch (scope.id) {
|
||||
.root => return name,
|
||||
.block => @fieldParentPtr(Block, "base", scope).getAlias(name),
|
||||
.block => @as(*Block, @fieldParentPtr("base", scope)).getAlias(name),
|
||||
.loop, .do_loop, .condition => scope.parent.?.getAlias(name),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn contains(scope: *ScopeExtraScope, name: []const u8) bool {
|
||||
return switch (scope.id) {
|
||||
.root => @fieldParentPtr(Root, "base", scope).contains(name),
|
||||
.block => @fieldParentPtr(Block, "base", scope).contains(name),
|
||||
.root => @as(*Root, @fieldParentPtr("base", scope)).contains(name),
|
||||
.block => @as(*Block, @fieldParentPtr("base", scope)).contains(name),
|
||||
.loop, .do_loop, .condition => scope.parent.?.contains(name),
|
||||
};
|
||||
}
|
||||
@ -1158,11 +1158,11 @@ pub fn ScopeExtra(comptime ScopeExtraContext: type, comptime ScopeExtraType: typ
|
||||
while (true) {
|
||||
switch (scope.id) {
|
||||
.root => {
|
||||
const root = @fieldParentPtr(Root, "base", scope);
|
||||
const root: *Root = @fieldParentPtr("base", scope);
|
||||
return root.nodes.append(node);
|
||||
},
|
||||
.block => {
|
||||
const block = @fieldParentPtr(Block, "base", scope);
|
||||
const block: *Block = @fieldParentPtr("base", scope);
|
||||
return block.statements.append(node);
|
||||
},
|
||||
else => scope = scope.parent.?,
|
||||
@ -1184,7 +1184,7 @@ pub fn ScopeExtra(comptime ScopeExtraContext: type, comptime ScopeExtraType: typ
|
||||
switch (scope.id) {
|
||||
.root => return,
|
||||
.block => {
|
||||
const block = @fieldParentPtr(Block, "base", scope);
|
||||
const block: *Block = @fieldParentPtr("base", scope);
|
||||
if (block.variable_discards.get(name)) |discard| {
|
||||
discard.data.should_skip = true;
|
||||
return;
|
||||
|
||||
@ -409,7 +409,7 @@ pub const Node = extern union {
|
||||
return null;
|
||||
|
||||
if (self.ptr_otherwise.tag == t)
|
||||
return @fieldParentPtr(t.Type(), "base", self.ptr_otherwise);
|
||||
return @alignCast(@fieldParentPtr("base", self.ptr_otherwise));
|
||||
|
||||
return null;
|
||||
}
|
||||
@ -1220,7 +1220,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
|
||||
});
|
||||
},
|
||||
.pub_var_simple, .var_simple => {
|
||||
const payload = @fieldParentPtr(Payload.SimpleVarDecl, "base", node.ptr_otherwise).data;
|
||||
const payload = @as(*Payload.SimpleVarDecl, @alignCast(@fieldParentPtr("base", node.ptr_otherwise))).data;
|
||||
if (node.tag() == .pub_var_simple) _ = try c.addToken(.keyword_pub, "pub");
|
||||
const const_tok = try c.addToken(.keyword_const, "const");
|
||||
_ = try c.addIdentifier(payload.name);
|
||||
@ -1293,7 +1293,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
|
||||
},
|
||||
.var_decl => return renderVar(c, node),
|
||||
.arg_redecl, .alias => {
|
||||
const payload = @fieldParentPtr(Payload.ArgRedecl, "base", node.ptr_otherwise).data;
|
||||
const payload = @as(*Payload.ArgRedecl, @alignCast(@fieldParentPtr("base", node.ptr_otherwise))).data;
|
||||
if (node.tag() == .alias) _ = try c.addToken(.keyword_pub, "pub");
|
||||
const mut_tok = if (node.tag() == .alias)
|
||||
try c.addToken(.keyword_const, "const")
|
||||
@ -1492,7 +1492,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
|
||||
});
|
||||
},
|
||||
.c_pointer, .single_pointer => {
|
||||
const payload = @fieldParentPtr(Payload.Pointer, "base", node.ptr_otherwise).data;
|
||||
const payload = @as(*Payload.Pointer, @alignCast(@fieldParentPtr("base", node.ptr_otherwise))).data;
|
||||
|
||||
const asterisk = if (node.tag() == .single_pointer)
|
||||
try c.addToken(.asterisk, "*")
|
||||
@ -2085,7 +2085,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
|
||||
}
|
||||
|
||||
fn renderRecord(c: *Context, node: Node) !NodeIndex {
|
||||
const payload = @fieldParentPtr(Payload.Record, "base", node.ptr_otherwise).data;
|
||||
const payload = @as(*Payload.Record, @alignCast(@fieldParentPtr("base", node.ptr_otherwise))).data;
|
||||
if (payload.layout == .@"packed")
|
||||
_ = try c.addToken(.keyword_packed, "packed")
|
||||
else if (payload.layout == .@"extern")
|
||||
@ -2487,7 +2487,7 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex {
|
||||
}
|
||||
|
||||
fn renderPrefixOp(c: *Context, node: Node, tag: std.zig.Ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex {
|
||||
const payload = @fieldParentPtr(Payload.UnOp, "base", node.ptr_otherwise).data;
|
||||
const payload = @as(*Payload.UnOp, @alignCast(@fieldParentPtr("base", node.ptr_otherwise))).data;
|
||||
return c.addNode(.{
|
||||
.tag = tag,
|
||||
.main_token = try c.addToken(tok_tag, bytes),
|
||||
@ -2499,7 +2499,7 @@ fn renderPrefixOp(c: *Context, node: Node, tag: std.zig.Ast.Node.Tag, tok_tag: T
|
||||
}
|
||||
|
||||
fn renderBinOpGrouped(c: *Context, node: Node, tag: std.zig.Ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex {
|
||||
const payload = @fieldParentPtr(Payload.BinOp, "base", node.ptr_otherwise).data;
|
||||
const payload = @as(*Payload.BinOp, @alignCast(@fieldParentPtr("base", node.ptr_otherwise))).data;
|
||||
const lhs = try renderNodeGrouped(c, payload.lhs);
|
||||
return c.addNode(.{
|
||||
.tag = tag,
|
||||
@ -2512,7 +2512,7 @@ fn renderBinOpGrouped(c: *Context, node: Node, tag: std.zig.Ast.Node.Tag, tok_ta
|
||||
}
|
||||
|
||||
fn renderBinOp(c: *Context, node: Node, tag: std.zig.Ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex {
|
||||
const payload = @fieldParentPtr(Payload.BinOp, "base", node.ptr_otherwise).data;
|
||||
const payload = @as(*Payload.BinOp, @alignCast(@fieldParentPtr("base", node.ptr_otherwise))).data;
|
||||
const lhs = try renderNode(c, payload.lhs);
|
||||
return c.addNode(.{
|
||||
.tag = tag,
|
||||
|
||||
@ -19,7 +19,7 @@ pub const Tree = struct {
|
||||
}
|
||||
|
||||
pub fn root(self: *Tree) *Node.Root {
|
||||
return @fieldParentPtr(Node.Root, "base", self.node);
|
||||
return @alignCast(@fieldParentPtr("base", self.node));
|
||||
}
|
||||
|
||||
pub fn dump(self: *Tree, writer: anytype) @TypeOf(writer).Error!void {
|
||||
@ -174,7 +174,7 @@ pub const Node = struct {
|
||||
|
||||
pub fn cast(base: *Node, comptime id: Id) ?*id.Type() {
|
||||
if (base.id == id) {
|
||||
return @fieldParentPtr(id.Type(), "base", base);
|
||||
return @alignCast(@fieldParentPtr("base", base));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@ -461,7 +461,7 @@ pub const Node = struct {
|
||||
pub fn isNumberExpression(node: *const Node) bool {
|
||||
switch (node.id) {
|
||||
.literal => {
|
||||
const literal = @fieldParentPtr(Node.Literal, "base", node);
|
||||
const literal: *const Node.Literal = @alignCast(@fieldParentPtr("base", node));
|
||||
return switch (literal.token.id) {
|
||||
.number => true,
|
||||
else => false,
|
||||
@ -475,7 +475,7 @@ pub const Node = struct {
|
||||
pub fn isStringLiteral(node: *const Node) bool {
|
||||
switch (node.id) {
|
||||
.literal => {
|
||||
const literal = @fieldParentPtr(Node.Literal, "base", node);
|
||||
const literal: *const Node.Literal = @alignCast(@fieldParentPtr("base", node));
|
||||
return switch (literal.token.id) {
|
||||
.quoted_ascii_string, .quoted_wide_string => true,
|
||||
else => false,
|
||||
@ -489,105 +489,103 @@ pub const Node = struct {
|
||||
switch (node.id) {
|
||||
.root => unreachable,
|
||||
.resource_external => {
|
||||
const casted = @fieldParentPtr(Node.ResourceExternal, "base", node);
|
||||
const casted: *const Node.ResourceExternal = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.id;
|
||||
},
|
||||
.resource_raw_data => {
|
||||
const casted = @fieldParentPtr(Node.ResourceRawData, "base", node);
|
||||
const casted: *const Node.ResourceRawData = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.id;
|
||||
},
|
||||
.literal => {
|
||||
const casted = @fieldParentPtr(Node.Literal, "base", node);
|
||||
const casted: *const Node.Literal = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.token;
|
||||
},
|
||||
.binary_expression => {
|
||||
const casted = @fieldParentPtr(Node.BinaryExpression, "base", node);
|
||||
const casted: *const Node.BinaryExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.left.getFirstToken();
|
||||
},
|
||||
.grouped_expression => {
|
||||
const casted = @fieldParentPtr(Node.GroupedExpression, "base", node);
|
||||
const casted: *const Node.GroupedExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.open_token;
|
||||
},
|
||||
.not_expression => {
|
||||
const casted = @fieldParentPtr(Node.NotExpression, "base", node);
|
||||
const casted: *const Node.NotExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.not_token;
|
||||
},
|
||||
.accelerators => {
|
||||
const casted = @fieldParentPtr(Node.Accelerators, "base", node);
|
||||
const casted: *const Node.Accelerators = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.id;
|
||||
},
|
||||
.accelerator => {
|
||||
const casted = @fieldParentPtr(Node.Accelerator, "base", node);
|
||||
const casted: *const Node.Accelerator = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.event.getFirstToken();
|
||||
},
|
||||
.dialog => {
|
||||
const casted = @fieldParentPtr(Node.Dialog, "base", node);
|
||||
const casted: *const Node.Dialog = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.id;
|
||||
},
|
||||
.control_statement => {
|
||||
const casted = @fieldParentPtr(Node.ControlStatement, "base", node);
|
||||
const casted: *const Node.ControlStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.type;
|
||||
},
|
||||
.toolbar => {
|
||||
const casted = @fieldParentPtr(Node.Toolbar, "base", node);
|
||||
const casted: *const Node.Toolbar = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.id;
|
||||
},
|
||||
.menu => {
|
||||
const casted = @fieldParentPtr(Node.Menu, "base", node);
|
||||
const casted: *const Node.Menu = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.id;
|
||||
},
|
||||
inline .menu_item, .menu_item_separator, .menu_item_ex => |menu_item_type| {
|
||||
const node_type = menu_item_type.Type();
|
||||
const casted = @fieldParentPtr(node_type, "base", node);
|
||||
const casted: *const menu_item_type.Type() = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.menuitem;
|
||||
},
|
||||
inline .popup, .popup_ex => |popup_type| {
|
||||
const node_type = popup_type.Type();
|
||||
const casted = @fieldParentPtr(node_type, "base", node);
|
||||
const casted: *const popup_type.Type() = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.popup;
|
||||
},
|
||||
.version_info => {
|
||||
const casted = @fieldParentPtr(Node.VersionInfo, "base", node);
|
||||
const casted: *const Node.VersionInfo = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.id;
|
||||
},
|
||||
.version_statement => {
|
||||
const casted = @fieldParentPtr(Node.VersionStatement, "base", node);
|
||||
const casted: *const Node.VersionStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.type;
|
||||
},
|
||||
.block => {
|
||||
const casted = @fieldParentPtr(Node.Block, "base", node);
|
||||
const casted: *const Node.Block = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.identifier;
|
||||
},
|
||||
.block_value => {
|
||||
const casted = @fieldParentPtr(Node.BlockValue, "base", node);
|
||||
const casted: *const Node.BlockValue = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.identifier;
|
||||
},
|
||||
.block_value_value => {
|
||||
const casted = @fieldParentPtr(Node.BlockValueValue, "base", node);
|
||||
const casted: *const Node.BlockValueValue = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.expression.getFirstToken();
|
||||
},
|
||||
.string_table => {
|
||||
const casted = @fieldParentPtr(Node.StringTable, "base", node);
|
||||
const casted: *const Node.StringTable = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.type;
|
||||
},
|
||||
.string_table_string => {
|
||||
const casted = @fieldParentPtr(Node.StringTableString, "base", node);
|
||||
const casted: *const Node.StringTableString = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.id.getFirstToken();
|
||||
},
|
||||
.language_statement => {
|
||||
const casted = @fieldParentPtr(Node.LanguageStatement, "base", node);
|
||||
const casted: *const Node.LanguageStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.language_token;
|
||||
},
|
||||
.font_statement => {
|
||||
const casted = @fieldParentPtr(Node.FontStatement, "base", node);
|
||||
const casted: *const Node.FontStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.identifier;
|
||||
},
|
||||
.simple_statement => {
|
||||
const casted = @fieldParentPtr(Node.SimpleStatement, "base", node);
|
||||
const casted: *const Node.SimpleStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.identifier;
|
||||
},
|
||||
.invalid => {
|
||||
const casted = @fieldParentPtr(Node.Invalid, "base", node);
|
||||
const casted: *const Node.Invalid = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.context[0];
|
||||
},
|
||||
}
|
||||
@ -597,44 +595,44 @@ pub const Node = struct {
|
||||
switch (node.id) {
|
||||
.root => unreachable,
|
||||
.resource_external => {
|
||||
const casted = @fieldParentPtr(Node.ResourceExternal, "base", node);
|
||||
const casted: *const Node.ResourceExternal = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.filename.getLastToken();
|
||||
},
|
||||
.resource_raw_data => {
|
||||
const casted = @fieldParentPtr(Node.ResourceRawData, "base", node);
|
||||
const casted: *const Node.ResourceRawData = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.literal => {
|
||||
const casted = @fieldParentPtr(Node.Literal, "base", node);
|
||||
const casted: *const Node.Literal = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.token;
|
||||
},
|
||||
.binary_expression => {
|
||||
const casted = @fieldParentPtr(Node.BinaryExpression, "base", node);
|
||||
const casted: *const Node.BinaryExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.right.getLastToken();
|
||||
},
|
||||
.grouped_expression => {
|
||||
const casted = @fieldParentPtr(Node.GroupedExpression, "base", node);
|
||||
const casted: *const Node.GroupedExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.close_token;
|
||||
},
|
||||
.not_expression => {
|
||||
const casted = @fieldParentPtr(Node.NotExpression, "base", node);
|
||||
const casted: *const Node.NotExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.number_token;
|
||||
},
|
||||
.accelerators => {
|
||||
const casted = @fieldParentPtr(Node.Accelerators, "base", node);
|
||||
const casted: *const Node.Accelerators = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.accelerator => {
|
||||
const casted = @fieldParentPtr(Node.Accelerator, "base", node);
|
||||
const casted: *const Node.Accelerator = @alignCast(@fieldParentPtr("base", node));
|
||||
if (casted.type_and_options.len > 0) return casted.type_and_options[casted.type_and_options.len - 1];
|
||||
return casted.idvalue.getLastToken();
|
||||
},
|
||||
.dialog => {
|
||||
const casted = @fieldParentPtr(Node.Dialog, "base", node);
|
||||
const casted: *const Node.Dialog = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.control_statement => {
|
||||
const casted = @fieldParentPtr(Node.ControlStatement, "base", node);
|
||||
const casted: *const Node.ControlStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
if (casted.extra_data_end) |token| return token;
|
||||
if (casted.help_id) |help_id_node| return help_id_node.getLastToken();
|
||||
if (casted.exstyle) |exstyle_node| return exstyle_node.getLastToken();
|
||||
@ -647,80 +645,79 @@ pub const Node = struct {
|
||||
return casted.height.getLastToken();
|
||||
},
|
||||
.toolbar => {
|
||||
const casted = @fieldParentPtr(Node.Toolbar, "base", node);
|
||||
const casted: *const Node.Toolbar = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.menu => {
|
||||
const casted = @fieldParentPtr(Node.Menu, "base", node);
|
||||
const casted: *const Node.Menu = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.menu_item => {
|
||||
const casted = @fieldParentPtr(Node.MenuItem, "base", node);
|
||||
const casted: *const Node.MenuItem = @alignCast(@fieldParentPtr("base", node));
|
||||
if (casted.option_list.len > 0) return casted.option_list[casted.option_list.len - 1];
|
||||
return casted.result.getLastToken();
|
||||
},
|
||||
.menu_item_separator => {
|
||||
const casted = @fieldParentPtr(Node.MenuItemSeparator, "base", node);
|
||||
const casted: *const Node.MenuItemSeparator = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.separator;
|
||||
},
|
||||
.menu_item_ex => {
|
||||
const casted = @fieldParentPtr(Node.MenuItemEx, "base", node);
|
||||
const casted: *const Node.MenuItemEx = @alignCast(@fieldParentPtr("base", node));
|
||||
if (casted.state) |state_node| return state_node.getLastToken();
|
||||
if (casted.type) |type_node| return type_node.getLastToken();
|
||||
if (casted.id) |id_node| return id_node.getLastToken();
|
||||
return casted.text;
|
||||
},
|
||||
inline .popup, .popup_ex => |popup_type| {
|
||||
const node_type = popup_type.Type();
|
||||
const casted = @fieldParentPtr(node_type, "base", node);
|
||||
const casted: *const popup_type.Type() = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.version_info => {
|
||||
const casted = @fieldParentPtr(Node.VersionInfo, "base", node);
|
||||
const casted: *const Node.VersionInfo = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.version_statement => {
|
||||
const casted = @fieldParentPtr(Node.VersionStatement, "base", node);
|
||||
const casted: *const Node.VersionStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.parts[casted.parts.len - 1].getLastToken();
|
||||
},
|
||||
.block => {
|
||||
const casted = @fieldParentPtr(Node.Block, "base", node);
|
||||
const casted: *const Node.Block = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.block_value => {
|
||||
const casted = @fieldParentPtr(Node.BlockValue, "base", node);
|
||||
const casted: *const Node.BlockValue = @alignCast(@fieldParentPtr("base", node));
|
||||
if (casted.values.len > 0) return casted.values[casted.values.len - 1].getLastToken();
|
||||
return casted.key;
|
||||
},
|
||||
.block_value_value => {
|
||||
const casted = @fieldParentPtr(Node.BlockValueValue, "base", node);
|
||||
const casted: *const Node.BlockValueValue = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.expression.getLastToken();
|
||||
},
|
||||
.string_table => {
|
||||
const casted = @fieldParentPtr(Node.StringTable, "base", node);
|
||||
const casted: *const Node.StringTable = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.end_token;
|
||||
},
|
||||
.string_table_string => {
|
||||
const casted = @fieldParentPtr(Node.StringTableString, "base", node);
|
||||
const casted: *const Node.StringTableString = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.string;
|
||||
},
|
||||
.language_statement => {
|
||||
const casted = @fieldParentPtr(Node.LanguageStatement, "base", node);
|
||||
const casted: *const Node.LanguageStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.sublanguage_id.getLastToken();
|
||||
},
|
||||
.font_statement => {
|
||||
const casted = @fieldParentPtr(Node.FontStatement, "base", node);
|
||||
const casted: *const Node.FontStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
if (casted.char_set) |char_set_node| return char_set_node.getLastToken();
|
||||
if (casted.italic) |italic_node| return italic_node.getLastToken();
|
||||
if (casted.weight) |weight_node| return weight_node.getLastToken();
|
||||
return casted.typeface;
|
||||
},
|
||||
.simple_statement => {
|
||||
const casted = @fieldParentPtr(Node.SimpleStatement, "base", node);
|
||||
const casted: *const Node.SimpleStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.value.getLastToken();
|
||||
},
|
||||
.invalid => {
|
||||
const casted = @fieldParentPtr(Node.Invalid, "base", node);
|
||||
const casted: *const Node.Invalid = @alignCast(@fieldParentPtr("base", node));
|
||||
return casted.context[casted.context.len - 1];
|
||||
},
|
||||
}
|
||||
@ -737,31 +734,31 @@ pub const Node = struct {
|
||||
switch (node.id) {
|
||||
.root => {
|
||||
try writer.writeAll("\n");
|
||||
const root = @fieldParentPtr(Node.Root, "base", node);
|
||||
const root: *Node.Root = @alignCast(@fieldParentPtr("base", node));
|
||||
for (root.body) |body_node| {
|
||||
try body_node.dump(tree, writer, indent + 1);
|
||||
}
|
||||
},
|
||||
.resource_external => {
|
||||
const resource = @fieldParentPtr(Node.ResourceExternal, "base", node);
|
||||
const resource: *Node.ResourceExternal = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} common_resource_attributes]\n", .{ resource.id.slice(tree.source), resource.type.slice(tree.source), resource.common_resource_attributes.len });
|
||||
try resource.filename.dump(tree, writer, indent + 1);
|
||||
},
|
||||
.resource_raw_data => {
|
||||
const resource = @fieldParentPtr(Node.ResourceRawData, "base", node);
|
||||
const resource: *Node.ResourceRawData = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} common_resource_attributes] raw data: {}\n", .{ resource.id.slice(tree.source), resource.type.slice(tree.source), resource.common_resource_attributes.len, resource.raw_data.len });
|
||||
for (resource.raw_data) |data_expression| {
|
||||
try data_expression.dump(tree, writer, indent + 1);
|
||||
}
|
||||
},
|
||||
.literal => {
|
||||
const literal = @fieldParentPtr(Node.Literal, "base", node);
|
||||
const literal: *Node.Literal = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.writeAll(" ");
|
||||
try writer.writeAll(literal.token.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.binary_expression => {
|
||||
const binary = @fieldParentPtr(Node.BinaryExpression, "base", node);
|
||||
const binary: *Node.BinaryExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.writeAll(" ");
|
||||
try writer.writeAll(binary.operator.slice(tree.source));
|
||||
try writer.writeAll("\n");
|
||||
@ -769,7 +766,7 @@ pub const Node = struct {
|
||||
try binary.right.dump(tree, writer, indent + 1);
|
||||
},
|
||||
.grouped_expression => {
|
||||
const grouped = @fieldParentPtr(Node.GroupedExpression, "base", node);
|
||||
const grouped: *Node.GroupedExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.writeAll("\n");
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.writeAll(grouped.open_token.slice(tree.source));
|
||||
@ -780,7 +777,7 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.not_expression => {
|
||||
const not = @fieldParentPtr(Node.NotExpression, "base", node);
|
||||
const not: *Node.NotExpression = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.writeAll(" ");
|
||||
try writer.writeAll(not.not_token.slice(tree.source));
|
||||
try writer.writeAll(" ");
|
||||
@ -788,7 +785,7 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.accelerators => {
|
||||
const accelerators = @fieldParentPtr(Node.Accelerators, "base", node);
|
||||
const accelerators: *Node.Accelerators = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} common_resource_attributes]\n", .{ accelerators.id.slice(tree.source), accelerators.type.slice(tree.source), accelerators.common_resource_attributes.len });
|
||||
for (accelerators.optional_statements) |statement| {
|
||||
try statement.dump(tree, writer, indent + 1);
|
||||
@ -804,7 +801,7 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.accelerator => {
|
||||
const accelerator = @fieldParentPtr(Node.Accelerator, "base", node);
|
||||
const accelerator: *Node.Accelerator = @alignCast(@fieldParentPtr("base", node));
|
||||
for (accelerator.type_and_options, 0..) |option, i| {
|
||||
if (i != 0) try writer.writeAll(",");
|
||||
try writer.writeByte(' ');
|
||||
@ -815,7 +812,7 @@ pub const Node = struct {
|
||||
try accelerator.idvalue.dump(tree, writer, indent + 1);
|
||||
},
|
||||
.dialog => {
|
||||
const dialog = @fieldParentPtr(Node.Dialog, "base", node);
|
||||
const dialog: *Node.Dialog = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} common_resource_attributes]\n", .{ dialog.id.slice(tree.source), dialog.type.slice(tree.source), dialog.common_resource_attributes.len });
|
||||
inline for (.{ "x", "y", "width", "height" }) |arg| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
@ -841,7 +838,7 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.control_statement => {
|
||||
const control = @fieldParentPtr(Node.ControlStatement, "base", node);
|
||||
const control: *Node.ControlStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s}", .{control.type.slice(tree.source)});
|
||||
if (control.text) |text| {
|
||||
try writer.print(" text: {s}", .{text.slice(tree.source)});
|
||||
@ -877,7 +874,7 @@ pub const Node = struct {
|
||||
}
|
||||
},
|
||||
.toolbar => {
|
||||
const toolbar = @fieldParentPtr(Node.Toolbar, "base", node);
|
||||
const toolbar: *Node.Toolbar = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} common_resource_attributes]\n", .{ toolbar.id.slice(tree.source), toolbar.type.slice(tree.source), toolbar.common_resource_attributes.len });
|
||||
inline for (.{ "button_width", "button_height" }) |arg| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
@ -895,7 +892,7 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.menu => {
|
||||
const menu = @fieldParentPtr(Node.Menu, "base", node);
|
||||
const menu: *Node.Menu = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} common_resource_attributes]\n", .{ menu.id.slice(tree.source), menu.type.slice(tree.source), menu.common_resource_attributes.len });
|
||||
for (menu.optional_statements) |statement| {
|
||||
try statement.dump(tree, writer, indent + 1);
|
||||
@ -916,16 +913,16 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.menu_item => {
|
||||
const menu_item = @fieldParentPtr(Node.MenuItem, "base", node);
|
||||
const menu_item: *Node.MenuItem = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} options]\n", .{ menu_item.menuitem.slice(tree.source), menu_item.text.slice(tree.source), menu_item.option_list.len });
|
||||
try menu_item.result.dump(tree, writer, indent + 1);
|
||||
},
|
||||
.menu_item_separator => {
|
||||
const menu_item = @fieldParentPtr(Node.MenuItemSeparator, "base", node);
|
||||
const menu_item: *Node.MenuItemSeparator = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s}\n", .{ menu_item.menuitem.slice(tree.source), menu_item.separator.slice(tree.source) });
|
||||
},
|
||||
.menu_item_ex => {
|
||||
const menu_item = @fieldParentPtr(Node.MenuItemEx, "base", node);
|
||||
const menu_item: *Node.MenuItemEx = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s}\n", .{ menu_item.menuitem.slice(tree.source), menu_item.text.slice(tree.source) });
|
||||
inline for (.{ "id", "type", "state" }) |arg| {
|
||||
if (@field(menu_item, arg)) |val_node| {
|
||||
@ -936,7 +933,7 @@ pub const Node = struct {
|
||||
}
|
||||
},
|
||||
.popup => {
|
||||
const popup = @fieldParentPtr(Node.Popup, "base", node);
|
||||
const popup: *Node.Popup = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} options]\n", .{ popup.popup.slice(tree.source), popup.text.slice(tree.source), popup.option_list.len });
|
||||
try writer.writeByteNTimes(' ', indent);
|
||||
try writer.writeAll(popup.begin_token.slice(tree.source));
|
||||
@ -949,7 +946,7 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.popup_ex => {
|
||||
const popup = @fieldParentPtr(Node.PopupEx, "base", node);
|
||||
const popup: *Node.PopupEx = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s}\n", .{ popup.popup.slice(tree.source), popup.text.slice(tree.source) });
|
||||
inline for (.{ "id", "type", "state", "help_id" }) |arg| {
|
||||
if (@field(popup, arg)) |val_node| {
|
||||
@ -969,7 +966,7 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.version_info => {
|
||||
const version_info = @fieldParentPtr(Node.VersionInfo, "base", node);
|
||||
const version_info: *Node.VersionInfo = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s} [{d} common_resource_attributes]\n", .{ version_info.id.slice(tree.source), version_info.versioninfo.slice(tree.source), version_info.common_resource_attributes.len });
|
||||
for (version_info.fixed_info) |fixed_info| {
|
||||
try fixed_info.dump(tree, writer, indent + 1);
|
||||
@ -985,14 +982,14 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.version_statement => {
|
||||
const version_statement = @fieldParentPtr(Node.VersionStatement, "base", node);
|
||||
const version_statement: *Node.VersionStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s}\n", .{version_statement.type.slice(tree.source)});
|
||||
for (version_statement.parts) |part| {
|
||||
try part.dump(tree, writer, indent + 1);
|
||||
}
|
||||
},
|
||||
.block => {
|
||||
const block = @fieldParentPtr(Node.Block, "base", node);
|
||||
const block: *Node.Block = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s}\n", .{ block.identifier.slice(tree.source), block.key.slice(tree.source) });
|
||||
for (block.values) |value| {
|
||||
try value.dump(tree, writer, indent + 1);
|
||||
@ -1008,14 +1005,14 @@ pub const Node = struct {
|
||||
try writer.writeAll("\n");
|
||||
},
|
||||
.block_value => {
|
||||
const block_value = @fieldParentPtr(Node.BlockValue, "base", node);
|
||||
const block_value: *Node.BlockValue = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} {s}\n", .{ block_value.identifier.slice(tree.source), block_value.key.slice(tree.source) });
|
||||
for (block_value.values) |value| {
|
||||
try value.dump(tree, writer, indent + 1);
|
||||
}
|
||||
},
|
||||
.block_value_value => {
|
||||
const block_value = @fieldParentPtr(Node.BlockValueValue, "base", node);
|
||||
const block_value: *Node.BlockValueValue = @alignCast(@fieldParentPtr("base", node));
|
||||
if (block_value.trailing_comma) {
|
||||
try writer.writeAll(" ,");
|
||||
}
|
||||
@ -1023,7 +1020,7 @@ pub const Node = struct {
|
||||
try block_value.expression.dump(tree, writer, indent + 1);
|
||||
},
|
||||
.string_table => {
|
||||
const string_table = @fieldParentPtr(Node.StringTable, "base", node);
|
||||
const string_table: *Node.StringTable = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} [{d} common_resource_attributes]\n", .{ string_table.type.slice(tree.source), string_table.common_resource_attributes.len });
|
||||
for (string_table.optional_statements) |statement| {
|
||||
try statement.dump(tree, writer, indent + 1);
|
||||
@ -1040,19 +1037,19 @@ pub const Node = struct {
|
||||
},
|
||||
.string_table_string => {
|
||||
try writer.writeAll("\n");
|
||||
const string = @fieldParentPtr(Node.StringTableString, "base", node);
|
||||
const string: *Node.StringTableString = @alignCast(@fieldParentPtr("base", node));
|
||||
try string.id.dump(tree, writer, indent + 1);
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.print("{s}\n", .{string.string.slice(tree.source)});
|
||||
},
|
||||
.language_statement => {
|
||||
const language = @fieldParentPtr(Node.LanguageStatement, "base", node);
|
||||
const language: *Node.LanguageStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s}\n", .{language.language_token.slice(tree.source)});
|
||||
try language.primary_language_id.dump(tree, writer, indent + 1);
|
||||
try language.sublanguage_id.dump(tree, writer, indent + 1);
|
||||
},
|
||||
.font_statement => {
|
||||
const font = @fieldParentPtr(Node.FontStatement, "base", node);
|
||||
const font: *Node.FontStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s} typeface: {s}\n", .{ font.identifier.slice(tree.source), font.typeface.slice(tree.source) });
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
try writer.writeAll("point_size:\n");
|
||||
@ -1066,12 +1063,12 @@ pub const Node = struct {
|
||||
}
|
||||
},
|
||||
.simple_statement => {
|
||||
const statement = @fieldParentPtr(Node.SimpleStatement, "base", node);
|
||||
const statement: *Node.SimpleStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" {s}\n", .{statement.identifier.slice(tree.source)});
|
||||
try statement.value.dump(tree, writer, indent + 1);
|
||||
},
|
||||
.invalid => {
|
||||
const invalid = @fieldParentPtr(Node.Invalid, "base", node);
|
||||
const invalid: *Node.Invalid = @alignCast(@fieldParentPtr("base", node));
|
||||
try writer.print(" context.len: {}\n", .{invalid.context.len});
|
||||
for (invalid.context) |context_token| {
|
||||
try writer.writeByteNTimes(' ', indent + 1);
|
||||
|
||||
@ -229,34 +229,34 @@ pub const Compiler = struct {
|
||||
pub fn writeNode(self: *Compiler, node: *Node, writer: anytype) !void {
|
||||
switch (node.id) {
|
||||
.root => unreachable, // writeRoot should be called directly instead
|
||||
.resource_external => try self.writeResourceExternal(@fieldParentPtr(Node.ResourceExternal, "base", node), writer),
|
||||
.resource_raw_data => try self.writeResourceRawData(@fieldParentPtr(Node.ResourceRawData, "base", node), writer),
|
||||
.resource_external => try self.writeResourceExternal(@alignCast(@fieldParentPtr("base", node)), writer),
|
||||
.resource_raw_data => try self.writeResourceRawData(@alignCast(@fieldParentPtr("base", node)), writer),
|
||||
.literal => unreachable, // this is context dependent and should be handled by its parent
|
||||
.binary_expression => unreachable,
|
||||
.grouped_expression => unreachable,
|
||||
.not_expression => unreachable,
|
||||
.invalid => {}, // no-op, currently only used for dangling literals at EOF
|
||||
.accelerators => try self.writeAccelerators(@fieldParentPtr(Node.Accelerators, "base", node), writer),
|
||||
.accelerators => try self.writeAccelerators(@alignCast(@fieldParentPtr("base", node)), writer),
|
||||
.accelerator => unreachable, // handled by writeAccelerators
|
||||
.dialog => try self.writeDialog(@fieldParentPtr(Node.Dialog, "base", node), writer),
|
||||
.dialog => try self.writeDialog(@alignCast(@fieldParentPtr("base", node)), writer),
|
||||
.control_statement => unreachable,
|
||||
.toolbar => try self.writeToolbar(@fieldParentPtr(Node.Toolbar, "base", node), writer),
|
||||
.menu => try self.writeMenu(@fieldParentPtr(Node.Menu, "base", node), writer),
|
||||
.toolbar => try self.writeToolbar(@alignCast(@fieldParentPtr("base", node)), writer),
|
||||
.menu => try self.writeMenu(@alignCast(@fieldParentPtr("base", node)), writer),
|
||||
.menu_item => unreachable,
|
||||
.menu_item_separator => unreachable,
|
||||
.menu_item_ex => unreachable,
|
||||
.popup => unreachable,
|
||||
.popup_ex => unreachable,
|
||||
.version_info => try self.writeVersionInfo(@fieldParentPtr(Node.VersionInfo, "base", node), writer),
|
||||
.version_info => try self.writeVersionInfo(@alignCast(@fieldParentPtr("base", node)), writer),
|
||||
.version_statement => unreachable,
|
||||
.block => unreachable,
|
||||
.block_value => unreachable,
|
||||
.block_value_value => unreachable,
|
||||
.string_table => try self.writeStringTable(@fieldParentPtr(Node.StringTable, "base", node)),
|
||||
.string_table => try self.writeStringTable(@alignCast(@fieldParentPtr("base", node))),
|
||||
.string_table_string => unreachable, // handled by writeStringTable
|
||||
.language_statement => self.writeLanguageStatement(@fieldParentPtr(Node.LanguageStatement, "base", node)),
|
||||
.language_statement => self.writeLanguageStatement(@alignCast(@fieldParentPtr("base", node))),
|
||||
.font_statement => unreachable,
|
||||
.simple_statement => self.writeTopLevelSimpleStatement(@fieldParentPtr(Node.SimpleStatement, "base", node)),
|
||||
.simple_statement => self.writeTopLevelSimpleStatement(@alignCast(@fieldParentPtr("base", node))),
|
||||
}
|
||||
}
|
||||
|
||||
@ -1289,7 +1289,7 @@ pub const Compiler = struct {
|
||||
return evaluateNumberExpression(node, self.source, self.input_code_pages).asWord();
|
||||
} else {
|
||||
std.debug.assert(node.isStringLiteral());
|
||||
const literal = @fieldParentPtr(Node.Literal, "base", node);
|
||||
const literal: *Node.Literal = @alignCast(@fieldParentPtr("base", node));
|
||||
const bytes = SourceBytes{
|
||||
.slice = literal.token.slice(self.source),
|
||||
.code_page = self.input_code_pages.getForToken(literal.token),
|
||||
@ -1342,7 +1342,7 @@ pub const Compiler = struct {
|
||||
/// the writer within this function could return error.NoSpaceLeft
|
||||
pub fn writeAcceleratorsData(self: *Compiler, node: *Node.Accelerators, data_writer: anytype) !void {
|
||||
for (node.accelerators, 0..) |accel_node, i| {
|
||||
const accelerator = @fieldParentPtr(Node.Accelerator, "base", accel_node);
|
||||
const accelerator: *Node.Accelerator = @alignCast(@fieldParentPtr("base", accel_node));
|
||||
var modifiers = res.AcceleratorModifiers{};
|
||||
for (accelerator.type_and_options) |type_or_option| {
|
||||
const modifier = rc.AcceleratorTypeAndOptions.map.get(type_or_option.slice(self.source)).?;
|
||||
@ -1426,7 +1426,7 @@ pub const Compiler = struct {
|
||||
for (node.optional_statements) |optional_statement| {
|
||||
switch (optional_statement.id) {
|
||||
.simple_statement => {
|
||||
const simple_statement = @fieldParentPtr(Node.SimpleStatement, "base", optional_statement);
|
||||
const simple_statement: *Node.SimpleStatement = @alignCast(@fieldParentPtr("base", optional_statement));
|
||||
const statement_identifier = simple_statement.identifier;
|
||||
const statement_type = rc.OptionalStatements.dialog_map.get(statement_identifier.slice(self.source)) orelse continue;
|
||||
switch (statement_type) {
|
||||
@ -1440,7 +1440,7 @@ pub const Compiler = struct {
|
||||
},
|
||||
.caption => {
|
||||
std.debug.assert(simple_statement.value.id == .literal);
|
||||
const literal_node = @fieldParentPtr(Node.Literal, "base", simple_statement.value);
|
||||
const literal_node: *Node.Literal = @alignCast(@fieldParentPtr("base", simple_statement.value));
|
||||
optional_statement_values.caption = literal_node.token;
|
||||
},
|
||||
.class => {
|
||||
@ -1466,7 +1466,7 @@ pub const Compiler = struct {
|
||||
optional_statement_values.class = NameOrOrdinal{ .ordinal = class_ordinal.asWord() };
|
||||
} else {
|
||||
std.debug.assert(simple_statement.value.isStringLiteral());
|
||||
const literal_node = @fieldParentPtr(Node.Literal, "base", simple_statement.value);
|
||||
const literal_node: *Node.Literal = @alignCast(@fieldParentPtr("base", simple_statement.value));
|
||||
const parsed = try self.parseQuotedStringAsWideString(literal_node.token);
|
||||
optional_statement_values.class = NameOrOrdinal{ .name = parsed };
|
||||
}
|
||||
@ -1492,7 +1492,7 @@ pub const Compiler = struct {
|
||||
}
|
||||
|
||||
std.debug.assert(simple_statement.value.id == .literal);
|
||||
const literal_node = @fieldParentPtr(Node.Literal, "base", simple_statement.value);
|
||||
const literal_node: *Node.Literal = @alignCast(@fieldParentPtr("base", simple_statement.value));
|
||||
|
||||
const token_slice = literal_node.token.slice(self.source);
|
||||
const bytes = SourceBytes{
|
||||
@ -1542,7 +1542,7 @@ pub const Compiler = struct {
|
||||
}
|
||||
},
|
||||
.font_statement => {
|
||||
const font = @fieldParentPtr(Node.FontStatement, "base", optional_statement);
|
||||
const font: *Node.FontStatement = @alignCast(@fieldParentPtr("base", optional_statement));
|
||||
if (optional_statement_values.font != null) {
|
||||
optional_statement_values.font.?.node = font;
|
||||
} else {
|
||||
@ -1581,7 +1581,7 @@ pub const Compiler = struct {
|
||||
// Multiple CLASS parameters are specified and any of them are treated as a number, then
|
||||
// the last CLASS is always treated as a number no matter what
|
||||
if (last_class_would_be_forced_ordinal and optional_statement_values.class.? == .name) {
|
||||
const literal_node = @fieldParentPtr(Node.Literal, "base", last_class.value);
|
||||
const literal_node: *Node.Literal = @alignCast(@fieldParentPtr("base", last_class.value));
|
||||
const ordinal_value = res.ForcedOrdinal.fromUtf16Le(optional_statement_values.class.?.name);
|
||||
|
||||
try self.addErrorDetails(.{
|
||||
@ -1611,7 +1611,7 @@ pub const Compiler = struct {
|
||||
// 2. Multiple MENU parameters are specified and any of them are treated as a number, then
|
||||
// the last MENU is always treated as a number no matter what
|
||||
if ((last_menu_would_be_forced_ordinal or last_menu_has_digit_as_first_char) and optional_statement_values.menu.? == .name) {
|
||||
const literal_node = @fieldParentPtr(Node.Literal, "base", last_menu.value);
|
||||
const literal_node: *Node.Literal = @alignCast(@fieldParentPtr("base", last_menu.value));
|
||||
const token_slice = literal_node.token.slice(self.source);
|
||||
const bytes = SourceBytes{
|
||||
.slice = token_slice,
|
||||
@ -1658,7 +1658,7 @@ pub const Compiler = struct {
|
||||
// between resinator and the Win32 RC compiler, we only emit a hint instead of
|
||||
// a warning.
|
||||
if (last_menu_did_uppercase) {
|
||||
const literal_node = @fieldParentPtr(Node.Literal, "base", last_menu.value);
|
||||
const literal_node: *Node.Literal = @alignCast(@fieldParentPtr("base", last_menu.value));
|
||||
try self.addErrorDetails(.{
|
||||
.err = .dialog_menu_id_was_uppercased,
|
||||
.type = .hint,
|
||||
@ -1704,7 +1704,7 @@ pub const Compiler = struct {
|
||||
defer controls_by_id.deinit();
|
||||
|
||||
for (node.controls) |control_node| {
|
||||
const control = @fieldParentPtr(Node.ControlStatement, "base", control_node);
|
||||
const control: *Node.ControlStatement = @alignCast(@fieldParentPtr("base", control_node));
|
||||
|
||||
self.writeDialogControl(
|
||||
control,
|
||||
@ -1940,7 +1940,7 @@ pub const Compiler = struct {
|
||||
// And then write out the ordinal using a proper a NameOrOrdinal encoding.
|
||||
try ordinal.write(data_writer);
|
||||
} else if (class_node.isStringLiteral()) {
|
||||
const literal_node = @fieldParentPtr(Node.Literal, "base", class_node);
|
||||
const literal_node: *Node.Literal = @alignCast(@fieldParentPtr("base", class_node));
|
||||
const parsed = try self.parseQuotedStringAsWideString(literal_node.token);
|
||||
defer self.allocator.free(parsed);
|
||||
if (rc.ControlClass.fromWideString(parsed)) |control_class| {
|
||||
@ -1955,7 +1955,7 @@ pub const Compiler = struct {
|
||||
try name.write(data_writer);
|
||||
}
|
||||
} else {
|
||||
const literal_node = @fieldParentPtr(Node.Literal, "base", class_node);
|
||||
const literal_node: *Node.Literal = @alignCast(@fieldParentPtr("base", class_node));
|
||||
const literal_slice = literal_node.token.slice(self.source);
|
||||
// This succeeding is guaranteed by the parser
|
||||
const control_class = rc.ControlClass.map.get(literal_slice) orelse unreachable;
|
||||
@ -2178,7 +2178,7 @@ pub const Compiler = struct {
|
||||
try writer.writeInt(u16, 0, .little); // null-terminated UTF-16 text
|
||||
},
|
||||
.menu_item => {
|
||||
const menu_item = @fieldParentPtr(Node.MenuItem, "base", node);
|
||||
const menu_item: *Node.MenuItem = @alignCast(@fieldParentPtr("base", node));
|
||||
var flags = res.MenuItemFlags{};
|
||||
for (menu_item.option_list) |option_token| {
|
||||
// This failing would be a bug in the parser
|
||||
@ -2196,7 +2196,7 @@ pub const Compiler = struct {
|
||||
try writer.writeAll(std.mem.sliceAsBytes(text[0 .. text.len + 1]));
|
||||
},
|
||||
.popup => {
|
||||
const popup = @fieldParentPtr(Node.Popup, "base", node);
|
||||
const popup: *Node.Popup = @alignCast(@fieldParentPtr("base", node));
|
||||
var flags = res.MenuItemFlags{ .value = res.MF.POPUP };
|
||||
for (popup.option_list) |option_token| {
|
||||
// This failing would be a bug in the parser
|
||||
@ -2216,7 +2216,7 @@ pub const Compiler = struct {
|
||||
}
|
||||
},
|
||||
inline .menu_item_ex, .popup_ex => |node_type| {
|
||||
const menu_item = @fieldParentPtr(node_type.Type(), "base", node);
|
||||
const menu_item: *node_type.Type() = @alignCast(@fieldParentPtr("base", node));
|
||||
|
||||
if (menu_item.type) |flags| {
|
||||
const value = evaluateNumberExpression(flags, self.source, self.input_code_pages);
|
||||
@ -2295,7 +2295,7 @@ pub const Compiler = struct {
|
||||
for (node.fixed_info) |fixed_info| {
|
||||
switch (fixed_info.id) {
|
||||
.version_statement => {
|
||||
const version_statement = @fieldParentPtr(Node.VersionStatement, "base", fixed_info);
|
||||
const version_statement: *Node.VersionStatement = @alignCast(@fieldParentPtr("base", fixed_info));
|
||||
const version_type = rc.VersionInfo.map.get(version_statement.type.slice(self.source)).?;
|
||||
|
||||
// Ensure that all parts are cleared for each version, to properly account for
|
||||
@ -2345,7 +2345,7 @@ pub const Compiler = struct {
|
||||
}
|
||||
},
|
||||
.simple_statement => {
|
||||
const statement = @fieldParentPtr(Node.SimpleStatement, "base", fixed_info);
|
||||
const statement: *Node.SimpleStatement = @alignCast(@fieldParentPtr("base", fixed_info));
|
||||
const statement_type = rc.VersionInfo.map.get(statement.identifier.slice(self.source)).?;
|
||||
const value = evaluateNumberExpression(statement.value, self.source, self.input_code_pages);
|
||||
switch (statement_type) {
|
||||
@ -2416,7 +2416,7 @@ pub const Compiler = struct {
|
||||
|
||||
switch (node.id) {
|
||||
inline .block, .block_value => |node_type| {
|
||||
const block_or_value = @fieldParentPtr(node_type.Type(), "base", node);
|
||||
const block_or_value: *node_type.Type() = @alignCast(@fieldParentPtr("base", node));
|
||||
const parsed_key = try self.parseQuotedStringAsWideString(block_or_value.key);
|
||||
defer self.allocator.free(parsed_key);
|
||||
|
||||
@ -2506,7 +2506,7 @@ pub const Compiler = struct {
|
||||
const language = getLanguageFromOptionalStatements(node.optional_statements, self.source, self.input_code_pages) orelse self.state.language;
|
||||
|
||||
for (node.strings) |string_node| {
|
||||
const string = @fieldParentPtr(Node.StringTableString, "base", string_node);
|
||||
const string: *Node.StringTableString = @alignCast(@fieldParentPtr("base", string_node));
|
||||
const string_id_data = try self.evaluateDataExpression(string.id);
|
||||
const string_id = string_id_data.number.asWord();
|
||||
|
||||
@ -2795,11 +2795,11 @@ pub const Compiler = struct {
|
||||
fn applyToOptionalStatements(language: *res.Language, version: *u32, characteristics: *u32, statements: []*Node, source: []const u8, code_page_lookup: *const CodePageLookup) void {
|
||||
for (statements) |node| switch (node.id) {
|
||||
.language_statement => {
|
||||
const language_statement = @fieldParentPtr(Node.LanguageStatement, "base", node);
|
||||
const language_statement: *Node.LanguageStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
language.* = languageFromLanguageStatement(language_statement, source, code_page_lookup);
|
||||
},
|
||||
.simple_statement => {
|
||||
const simple_statement = @fieldParentPtr(Node.SimpleStatement, "base", node);
|
||||
const simple_statement: *Node.SimpleStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
const statement_type = rc.OptionalStatements.map.get(simple_statement.identifier.slice(source)) orelse continue;
|
||||
const result = Compiler.evaluateNumberExpression(simple_statement.value, source, code_page_lookup);
|
||||
switch (statement_type) {
|
||||
@ -2824,7 +2824,7 @@ pub const Compiler = struct {
|
||||
pub fn getLanguageFromOptionalStatements(statements: []*Node, source: []const u8, code_page_lookup: *const CodePageLookup) ?res.Language {
|
||||
for (statements) |node| switch (node.id) {
|
||||
.language_statement => {
|
||||
const language_statement = @fieldParentPtr(Node.LanguageStatement, "base", node);
|
||||
const language_statement: *Node.LanguageStatement = @alignCast(@fieldParentPtr("base", node));
|
||||
return languageFromLanguageStatement(language_statement, source, code_page_lookup);
|
||||
},
|
||||
else => continue,
|
||||
|
||||
@ -889,7 +889,7 @@ pub const Parser = struct {
|
||||
if (control == .control) {
|
||||
class = try self.parseExpression(.{});
|
||||
if (class.?.id == .literal) {
|
||||
const class_literal = @fieldParentPtr(Node.Literal, "base", class.?);
|
||||
const class_literal: *Node.Literal = @alignCast(@fieldParentPtr("base", class.?));
|
||||
const is_invalid_control_class = class_literal.token.id == .literal and !rc.ControlClass.map.has(class_literal.token.slice(self.lexer.buffer));
|
||||
if (is_invalid_control_class) {
|
||||
return self.addErrorDetailsAndFail(.{
|
||||
|
||||
@ -48,7 +48,7 @@ pub const File = struct {
|
||||
pub fn field_count(file: *const File, node: Ast.Node.Index) u32 {
|
||||
const scope = file.scopes.get(node) orelse return 0;
|
||||
if (scope.tag != .namespace) return 0;
|
||||
const namespace = @fieldParentPtr(Scope.Namespace, "base", scope);
|
||||
const namespace: *Scope.Namespace = @alignCast(@fieldParentPtr("base", scope));
|
||||
return namespace.field_count;
|
||||
}
|
||||
|
||||
@ -439,11 +439,11 @@ pub const Scope = struct {
|
||||
while (true) switch (it.tag) {
|
||||
.top => unreachable,
|
||||
.local => {
|
||||
const local = @fieldParentPtr(Local, "base", it);
|
||||
const local: *Local = @alignCast(@fieldParentPtr("base", it));
|
||||
it = local.parent;
|
||||
},
|
||||
.namespace => {
|
||||
const namespace = @fieldParentPtr(Namespace, "base", it);
|
||||
const namespace: *Namespace = @alignCast(@fieldParentPtr("base", it));
|
||||
return namespace.decl_index;
|
||||
},
|
||||
};
|
||||
@ -453,7 +453,7 @@ pub const Scope = struct {
|
||||
switch (scope.tag) {
|
||||
.top, .local => return null,
|
||||
.namespace => {
|
||||
const namespace = @fieldParentPtr(Namespace, "base", scope);
|
||||
const namespace: *Namespace = @alignCast(@fieldParentPtr("base", scope));
|
||||
return namespace.names.get(name);
|
||||
},
|
||||
}
|
||||
@ -465,7 +465,7 @@ pub const Scope = struct {
|
||||
while (true) switch (it.tag) {
|
||||
.top => break,
|
||||
.local => {
|
||||
const local = @fieldParentPtr(Local, "base", it);
|
||||
const local: *Local = @alignCast(@fieldParentPtr("base", it));
|
||||
const name_token = main_tokens[local.var_node] + 1;
|
||||
const ident_name = ast.tokenSlice(name_token);
|
||||
if (std.mem.eql(u8, ident_name, name)) {
|
||||
@ -474,7 +474,7 @@ pub const Scope = struct {
|
||||
it = local.parent;
|
||||
},
|
||||
.namespace => {
|
||||
const namespace = @fieldParentPtr(Namespace, "base", it);
|
||||
const namespace: *Namespace = @alignCast(@fieldParentPtr("base", it));
|
||||
if (namespace.names.get(name)) |node| {
|
||||
return node;
|
||||
}
|
||||
|
||||
@ -1062,8 +1062,8 @@ pub fn getUninstallStep(self: *Build) *Step {
|
||||
|
||||
fn makeUninstall(uninstall_step: *Step, prog_node: *std.Progress.Node) anyerror!void {
|
||||
_ = prog_node;
|
||||
const uninstall_tls = @fieldParentPtr(TopLevelStep, "step", uninstall_step);
|
||||
const self = @fieldParentPtr(Build, "uninstall_tls", uninstall_tls);
|
||||
const uninstall_tls: *TopLevelStep = @fieldParentPtr("step", uninstall_step);
|
||||
const self: *Build = @fieldParentPtr("uninstall_tls", uninstall_tls);
|
||||
|
||||
for (self.installed_files.items) |installed_file| {
|
||||
const full_path = self.getInstallPath(installed_file.dir, installed_file.path);
|
||||
|
||||
@ -231,7 +231,7 @@ fn makeNoOp(step: *Step, prog_node: *std.Progress.Node) anyerror!void {
|
||||
|
||||
pub fn cast(step: *Step, comptime T: type) ?*T {
|
||||
if (step.id == T.base_id) {
|
||||
return @fieldParentPtr(T, "step", step);
|
||||
return @fieldParentPtr("step", step);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -49,7 +49,7 @@ pub fn setName(self: *CheckFile, name: []const u8) void {
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const self = @fieldParentPtr(CheckFile, "step", step);
|
||||
const self: *CheckFile = @fieldParentPtr("step", step);
|
||||
|
||||
const src_path = self.source.getPath(b);
|
||||
const contents = fs.cwd().readFileAlloc(b.allocator, src_path, self.max_bytes) catch |err| {
|
||||
|
||||
@ -530,7 +530,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const gpa = b.allocator;
|
||||
const self = @fieldParentPtr(CheckObject, "step", step);
|
||||
const self: *CheckObject = @fieldParentPtr("step", step);
|
||||
|
||||
const src_path = self.source.getPath(b);
|
||||
const contents = fs.cwd().readFileAllocOptions(
|
||||
|
||||
@ -918,7 +918,7 @@ fn getGeneratedFilePath(self: *Compile, comptime tag_name: []const u8, asking_st
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
const b = step.owner;
|
||||
const arena = b.allocator;
|
||||
const self = @fieldParentPtr(Compile, "step", step);
|
||||
const self: *Compile = @fieldParentPtr("step", step);
|
||||
|
||||
var zig_args = ArrayList([]const u8).init(arena);
|
||||
defer zig_args.deinit();
|
||||
|
||||
@ -167,7 +167,7 @@ fn putValue(self: *ConfigHeader, field_name: []const u8, comptime T: type, v: T)
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const self = @fieldParentPtr(ConfigHeader, "step", step);
|
||||
const self: *ConfigHeader = @fieldParentPtr("step", step);
|
||||
const gpa = b.allocator;
|
||||
const arena = b.allocator;
|
||||
|
||||
|
||||
@ -47,7 +47,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
|
||||
const b = step.owner;
|
||||
const arena = b.allocator;
|
||||
const self = @fieldParentPtr(Fmt, "step", step);
|
||||
const self: *Fmt = @fieldParentPtr("step", step);
|
||||
|
||||
var argv: std.ArrayListUnmanaged([]const u8) = .{};
|
||||
try argv.ensureUnusedCapacity(arena, 2 + 1 + self.paths.len + 2 * self.exclude_paths.len);
|
||||
|
||||
@ -121,7 +121,7 @@ pub fn create(owner: *std.Build, artifact: *Step.Compile, options: Options) *Ins
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const self = @fieldParentPtr(InstallArtifact, "step", step);
|
||||
const self: *InstallArtifact = @fieldParentPtr("step", step);
|
||||
const dest_builder = step.owner;
|
||||
const cwd = fs.cwd();
|
||||
|
||||
|
||||
@ -63,7 +63,7 @@ pub fn create(owner: *std.Build, options: Options) *InstallDirStep {
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const self = @fieldParentPtr(InstallDirStep, "step", step);
|
||||
const self: *InstallDirStep = @fieldParentPtr("step", step);
|
||||
const dest_builder = self.dest_builder;
|
||||
const arena = dest_builder.allocator;
|
||||
const dest_prefix = dest_builder.getInstallPath(self.options.install_dir, self.options.install_subdir);
|
||||
|
||||
@ -43,7 +43,7 @@ pub fn create(
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const src_builder = step.owner;
|
||||
const self = @fieldParentPtr(InstallFile, "step", step);
|
||||
const self: *InstallFile = @fieldParentPtr("step", step);
|
||||
const dest_builder = self.dest_builder;
|
||||
const full_src_path = self.source.getPath2(src_builder, step);
|
||||
const full_dest_path = dest_builder.getInstallPath(self.dir, self.dest_rel_path);
|
||||
|
||||
@ -92,7 +92,7 @@ pub fn getOutputSeparatedDebug(self: *const ObjCopy) ?std.Build.LazyPath {
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
const b = step.owner;
|
||||
const self = @fieldParentPtr(ObjCopy, "step", step);
|
||||
const self: *ObjCopy = @fieldParentPtr("step", step);
|
||||
|
||||
var man = b.graph.cache.obtain();
|
||||
defer man.deinit();
|
||||
|
||||
@ -415,7 +415,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
|
||||
const b = step.owner;
|
||||
const self = @fieldParentPtr(Options, "step", step);
|
||||
const self: *Options = @fieldParentPtr("step", step);
|
||||
|
||||
for (self.args.items) |item| {
|
||||
self.addOption(
|
||||
|
||||
@ -28,7 +28,7 @@ fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
|
||||
const b = step.owner;
|
||||
const self = @fieldParentPtr(RemoveDir, "step", step);
|
||||
const self: *RemoveDir = @fieldParentPtr("step", step);
|
||||
|
||||
b.build_root.handle.deleteTree(self.dir_path) catch |err| {
|
||||
if (b.build_root.path) |base| {
|
||||
|
||||
@ -497,7 +497,7 @@ const IndexedOutput = struct {
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
const b = step.owner;
|
||||
const arena = b.allocator;
|
||||
const self = @fieldParentPtr(Run, "step", step);
|
||||
const self: *Run = @fieldParentPtr("step", step);
|
||||
const has_side_effects = self.hasSideEffects();
|
||||
|
||||
var argv_list = ArrayList([]const u8).init(arena);
|
||||
|
||||
@ -118,7 +118,7 @@ pub fn defineCMacroRaw(self: *TranslateC, name_and_value: []const u8) void {
|
||||
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
const b = step.owner;
|
||||
const self = @fieldParentPtr(TranslateC, "step", step);
|
||||
const self: *TranslateC = @fieldParentPtr("step", step);
|
||||
|
||||
var argv_list = std.ArrayList([]const u8).init(b.allocator);
|
||||
try argv_list.append(b.graph.zig_exe);
|
||||
|
||||
@ -141,7 +141,7 @@ fn maybeUpdateName(wf: *WriteFile) void {
|
||||
fn make(step: *Step, prog_node: *std.Progress.Node) !void {
|
||||
_ = prog_node;
|
||||
const b = step.owner;
|
||||
const wf = @fieldParentPtr(WriteFile, "step", step);
|
||||
const wf: *WriteFile = @fieldParentPtr("step", step);
|
||||
|
||||
// Writing to source files is kind of an extra capability of this
|
||||
// WriteFile - arguably it should be a different step. But anyway here
|
||||
|
||||
@ -644,7 +644,7 @@ const PosixImpl = struct {
|
||||
};
|
||||
|
||||
// There's a wait queue on the address; get the queue head and tail.
|
||||
const head = @fieldParentPtr(Waiter, "node", entry_node);
|
||||
const head: *Waiter = @fieldParentPtr("node", entry_node);
|
||||
const tail = head.tail orelse unreachable;
|
||||
|
||||
// Push the waiter to the tail by replacing it and linking to the previous tail.
|
||||
@ -656,7 +656,7 @@ const PosixImpl = struct {
|
||||
fn remove(treap: *Treap, address: usize, max_waiters: usize) WaitList {
|
||||
// Find the wait queue associated with this address and get the head/tail if any.
|
||||
var entry = treap.getEntryFor(address);
|
||||
var queue_head = if (entry.node) |node| @fieldParentPtr(Waiter, "node", node) else null;
|
||||
var queue_head: ?*Waiter = if (entry.node) |node| @fieldParentPtr("node", node) else null;
|
||||
const queue_tail = if (queue_head) |head| head.tail else null;
|
||||
|
||||
// Once we're done updating the head, fix it's tail pointer and update the treap's queue head as well.
|
||||
@ -699,7 +699,7 @@ const PosixImpl = struct {
|
||||
};
|
||||
|
||||
// The queue head and tail must exist if we're removing a queued waiter.
|
||||
const head = @fieldParentPtr(Waiter, "node", entry.node orelse unreachable);
|
||||
const head: *Waiter = @fieldParentPtr("node", entry.node orelse unreachable);
|
||||
const tail = head.tail orelse unreachable;
|
||||
|
||||
// A waiter with a previous link is never the head of the queue.
|
||||
|
||||
@ -88,8 +88,8 @@ pub fn spawn(pool: *Pool, comptime func: anytype, args: anytype) !void {
|
||||
run_node: RunQueue.Node = .{ .data = .{ .runFn = runFn } },
|
||||
|
||||
fn runFn(runnable: *Runnable) void {
|
||||
const run_node = @fieldParentPtr(RunQueue.Node, "data", runnable);
|
||||
const closure = @fieldParentPtr(@This(), "run_node", run_node);
|
||||
const run_node: *RunQueue.Node = @fieldParentPtr("data", runnable);
|
||||
const closure: *@This() = @fieldParentPtr("run_node", run_node);
|
||||
@call(.auto, func, closure.arguments);
|
||||
|
||||
// The thread pool's allocator is protected by the mutex.
|
||||
|
||||
@ -1150,8 +1150,8 @@ pub const siginfo_t = extern struct {
|
||||
|
||||
/// Renamed from `sigaction` to `Sigaction` to avoid conflict with function name.
|
||||
pub const Sigaction = extern struct {
|
||||
pub const handler_fn = *align(1) const fn (c_int) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (c_int, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
pub const handler_fn = *align(1) const fn (i32) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (i32, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
|
||||
handler: extern union {
|
||||
handler: ?handler_fn,
|
||||
|
||||
@ -690,8 +690,8 @@ pub const empty_sigset = sigset_t{ .__bits = [_]c_uint{0} ** _SIG_WORDS };
|
||||
pub const sig_atomic_t = c_int;
|
||||
|
||||
pub const Sigaction = extern struct {
|
||||
pub const handler_fn = *align(1) const fn (c_int) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (c_int, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
pub const handler_fn = *align(1) const fn (i32) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (i32, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
|
||||
/// signal handler
|
||||
handler: extern union {
|
||||
@ -702,7 +702,7 @@ pub const Sigaction = extern struct {
|
||||
mask: sigset_t,
|
||||
};
|
||||
|
||||
pub const sig_t = *const fn (c_int) callconv(.C) void;
|
||||
pub const sig_t = *const fn (i32) callconv(.C) void;
|
||||
|
||||
pub const SOCK = struct {
|
||||
pub const STREAM = 1;
|
||||
|
||||
@ -1171,8 +1171,8 @@ const NSIG = 32;
|
||||
|
||||
/// Renamed from `sigaction` to `Sigaction` to avoid conflict with the syscall.
|
||||
pub const Sigaction = extern struct {
|
||||
pub const handler_fn = *align(1) const fn (c_int) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (c_int, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
pub const handler_fn = *align(1) const fn (i32) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (i32, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
|
||||
/// signal handler
|
||||
handler: extern union {
|
||||
|
||||
@ -501,7 +501,7 @@ pub const siginfo_t = extern struct {
|
||||
/// Renamed from `sigaction` to `Sigaction` to avoid conflict with the syscall.
|
||||
pub const Sigaction = extern struct {
|
||||
pub const handler_fn = *align(1) const fn (i32) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (c_int, *allowzero anyopaque, ?*anyopaque) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (i32, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
|
||||
/// signal handler
|
||||
handler: extern union {
|
||||
|
||||
@ -864,8 +864,8 @@ pub const SIG = struct {
|
||||
|
||||
/// Renamed from `sigaction` to `Sigaction` to avoid conflict with the syscall.
|
||||
pub const Sigaction = extern struct {
|
||||
pub const handler_fn = *align(1) const fn (c_int) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (c_int, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
pub const handler_fn = *align(1) const fn (i32) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (i32, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
|
||||
/// signal handler
|
||||
handler: extern union {
|
||||
|
||||
@ -842,8 +842,8 @@ pub const SIG = struct {
|
||||
|
||||
/// Renamed from `sigaction` to `Sigaction` to avoid conflict with the syscall.
|
||||
pub const Sigaction = extern struct {
|
||||
pub const handler_fn = *align(1) const fn (c_int) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (c_int, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
pub const handler_fn = *align(1) const fn (i32) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (i32, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
|
||||
/// signal handler
|
||||
handler: extern union {
|
||||
|
||||
@ -874,8 +874,8 @@ pub const SIG = struct {
|
||||
|
||||
/// Renamed from `sigaction` to `Sigaction` to avoid conflict with the syscall.
|
||||
pub const Sigaction = extern struct {
|
||||
pub const handler_fn = *align(1) const fn (c_int) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (c_int, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
pub const handler_fn = *align(1) const fn (i32) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (i32, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
|
||||
/// signal options
|
||||
flags: c_uint,
|
||||
|
||||
@ -2570,7 +2570,7 @@ fn resetSegfaultHandler() void {
|
||||
updateSegfaultHandler(&act) catch {};
|
||||
}
|
||||
|
||||
fn handleSegfaultPosix(sig: i32, info: *const posix.siginfo_t, ctx_ptr: ?*const anyopaque) callconv(.C) noreturn {
|
||||
fn handleSegfaultPosix(sig: i32, info: *const posix.siginfo_t, ctx_ptr: ?*anyopaque) callconv(.C) noreturn {
|
||||
// Reset to the default handler so that if a segfault happens in this handler it will crash
|
||||
// the process. Also when this handler returns, the original instruction will be repeated
|
||||
// and the resulting segfault will crash the process rather than continually dump stack traces.
|
||||
|
||||
@ -108,7 +108,7 @@ pub const ConnectionPool = struct {
|
||||
pool.mutex.lock();
|
||||
defer pool.mutex.unlock();
|
||||
|
||||
const node = @fieldParentPtr(Node, "data", connection);
|
||||
const node: *Node = @fieldParentPtr("data", connection);
|
||||
|
||||
pool.used.remove(node);
|
||||
|
||||
|
||||
@ -695,8 +695,8 @@ pub const SIG = struct {
|
||||
};
|
||||
|
||||
pub const Sigaction = extern struct {
|
||||
pub const handler_fn = *align(1) const fn (c_int) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (c_int, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
pub const handler_fn = *align(1) const fn (i32) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (i32, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
|
||||
handler: extern union {
|
||||
handler: ?handler_fn,
|
||||
|
||||
@ -4301,7 +4301,7 @@ pub const all_mask: sigset_t = [_]u32{0xffffffff} ** @typeInfo(sigset_t).Array.l
|
||||
pub const app_mask: sigset_t = [2]u32{ 0xfffffffc, 0x7fffffff } ++ [_]u32{0xffffffff} ** 30;
|
||||
|
||||
const k_sigaction_funcs = struct {
|
||||
const handler = ?*align(1) const fn (c_int) callconv(.C) void;
|
||||
const handler = ?*align(1) const fn (i32) callconv(.C) void;
|
||||
const restorer = *const fn () callconv(.C) void;
|
||||
};
|
||||
|
||||
@ -4328,8 +4328,8 @@ pub const k_sigaction = switch (native_arch) {
|
||||
|
||||
/// Renamed from `sigaction` to `Sigaction` to avoid conflict with the syscall.
|
||||
pub const Sigaction = extern struct {
|
||||
pub const handler_fn = *align(1) const fn (c_int) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (c_int, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
pub const handler_fn = *align(1) const fn (i32) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (i32, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
|
||||
handler: extern union {
|
||||
handler: ?handler_fn,
|
||||
|
||||
@ -186,8 +186,8 @@ pub const empty_sigset = 0;
|
||||
pub const siginfo_t = c_long;
|
||||
// TODO plan9 doesn't have sigaction_fn. Sigaction is not a union, but we incude it here to be compatible.
|
||||
pub const Sigaction = extern struct {
|
||||
pub const handler_fn = *const fn (c_int) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (c_int, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
pub const handler_fn = *const fn (i32) callconv(.C) void;
|
||||
pub const sigaction_fn = *const fn (i32, *const siginfo_t, ?*anyopaque) callconv(.C) void;
|
||||
|
||||
handler: extern union {
|
||||
handler: ?handler_fn,
|
||||
|
||||
@ -597,4 +597,4 @@ fn maybeIgnoreSigpipe() void {
|
||||
}
|
||||
}
|
||||
|
||||
fn noopSigHandler(_: c_int) callconv(.C) void {}
|
||||
fn noopSigHandler(_: i32) callconv(.C) void {}
|
||||
|
||||
@ -1021,4 +1021,5 @@ test {
|
||||
_ = string_literal;
|
||||
_ = system;
|
||||
_ = target;
|
||||
_ = c_translation;
|
||||
}
|
||||
|
||||
@ -316,8 +316,7 @@ const ResultInfo = struct {
|
||||
};
|
||||
|
||||
/// Find the result type for a cast builtin given the result location.
|
||||
/// If the location does not have a known result type, emits an error on
|
||||
/// the given node.
|
||||
/// If the location does not have a known result type, returns `null`.
|
||||
fn resultType(rl: Loc, gz: *GenZir, node: Ast.Node.Index) !?Zir.Inst.Ref {
|
||||
return switch (rl) {
|
||||
.discard, .none, .ref, .inferred_ptr, .destructure => null,
|
||||
@ -330,6 +329,9 @@ const ResultInfo = struct {
|
||||
};
|
||||
}
|
||||
|
||||
/// Find the result type for a cast builtin given the result location.
|
||||
/// If the location does not have a known result type, emits an error on
|
||||
/// the given node.
|
||||
fn resultTypeForCast(rl: Loc, gz: *GenZir, node: Ast.Node.Index, builtin_name: []const u8) !Zir.Inst.Ref {
|
||||
const astgen = gz.astgen;
|
||||
if (try rl.resultType(gz, node)) |ty| return ty;
|
||||
@ -2786,7 +2788,6 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
|
||||
.atomic_load,
|
||||
.atomic_rmw,
|
||||
.mul_add,
|
||||
.field_parent_ptr,
|
||||
.max,
|
||||
.min,
|
||||
.c_import,
|
||||
@ -8853,6 +8854,7 @@ fn ptrCast(
|
||||
const node_datas = tree.nodes.items(.data);
|
||||
const node_tags = tree.nodes.items(.tag);
|
||||
|
||||
const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?;
|
||||
var flags: Zir.Inst.FullPtrCastFlags = .{};
|
||||
|
||||
// Note that all pointer cast builtins have one parameter, so we only need
|
||||
@ -8870,36 +8872,62 @@ fn ptrCast(
|
||||
}
|
||||
|
||||
if (node_datas[node].lhs == 0) break; // 0 args
|
||||
if (node_datas[node].rhs != 0) break; // 2 args
|
||||
|
||||
const builtin_token = main_tokens[node];
|
||||
const builtin_name = tree.tokenSlice(builtin_token);
|
||||
const info = BuiltinFn.list.get(builtin_name) orelse break;
|
||||
if (info.param_count != 1) break;
|
||||
if (node_datas[node].rhs == 0) {
|
||||
// 1 arg
|
||||
if (info.param_count != 1) break;
|
||||
|
||||
switch (info.tag) {
|
||||
else => break,
|
||||
inline .ptr_cast,
|
||||
.align_cast,
|
||||
.addrspace_cast,
|
||||
.const_cast,
|
||||
.volatile_cast,
|
||||
=> |tag| {
|
||||
if (@field(flags, @tagName(tag))) {
|
||||
return astgen.failNode(node, "redundant {s}", .{builtin_name});
|
||||
}
|
||||
@field(flags, @tagName(tag)) = true;
|
||||
},
|
||||
switch (info.tag) {
|
||||
else => break,
|
||||
inline .ptr_cast,
|
||||
.align_cast,
|
||||
.addrspace_cast,
|
||||
.const_cast,
|
||||
.volatile_cast,
|
||||
=> |tag| {
|
||||
if (@field(flags, @tagName(tag))) {
|
||||
return astgen.failNode(node, "redundant {s}", .{builtin_name});
|
||||
}
|
||||
@field(flags, @tagName(tag)) = true;
|
||||
},
|
||||
}
|
||||
|
||||
node = node_datas[node].lhs;
|
||||
} else {
|
||||
// 2 args
|
||||
if (info.param_count != 2) break;
|
||||
|
||||
switch (info.tag) {
|
||||
else => break,
|
||||
.field_parent_ptr => {
|
||||
if (flags.ptr_cast) break;
|
||||
|
||||
const flags_int: FlagsInt = @bitCast(flags);
|
||||
const cursor = maybeAdvanceSourceCursorToMainToken(gz, root_node);
|
||||
const parent_ptr_type = try ri.rl.resultTypeForCast(gz, root_node, "@alignCast");
|
||||
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .slice_const_u8_type } }, node_datas[node].lhs);
|
||||
const field_ptr = try expr(gz, scope, .{ .rl = .none }, node_datas[node].rhs);
|
||||
try emitDbgStmt(gz, cursor);
|
||||
const result = try gz.addExtendedPayloadSmall(.field_parent_ptr, flags_int, Zir.Inst.FieldParentPtr{
|
||||
.src_node = gz.nodeIndexToRelative(node),
|
||||
.parent_ptr_type = parent_ptr_type,
|
||||
.field_name = field_name,
|
||||
.field_ptr = field_ptr,
|
||||
});
|
||||
return rvalue(gz, ri, result, root_node);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
node = node_datas[node].lhs;
|
||||
}
|
||||
|
||||
const flags_i: u5 = @bitCast(flags);
|
||||
assert(flags_i != 0);
|
||||
const flags_int: FlagsInt = @bitCast(flags);
|
||||
assert(flags_int != 0);
|
||||
|
||||
const ptr_only: Zir.Inst.FullPtrCastFlags = .{ .ptr_cast = true };
|
||||
if (flags_i == @as(u5, @bitCast(ptr_only))) {
|
||||
if (flags_int == @as(FlagsInt, @bitCast(ptr_only))) {
|
||||
// Special case: simpler representation
|
||||
return typeCast(gz, scope, ri, root_node, node, .ptr_cast, "@ptrCast");
|
||||
}
|
||||
@ -8908,12 +8936,12 @@ fn ptrCast(
|
||||
.const_cast = true,
|
||||
.volatile_cast = true,
|
||||
};
|
||||
if ((flags_i & ~@as(u5, @bitCast(no_result_ty_flags))) == 0) {
|
||||
if ((flags_int & ~@as(FlagsInt, @bitCast(no_result_ty_flags))) == 0) {
|
||||
// Result type not needed
|
||||
const cursor = maybeAdvanceSourceCursorToMainToken(gz, root_node);
|
||||
const operand = try expr(gz, scope, .{ .rl = .none }, node);
|
||||
try emitDbgStmt(gz, cursor);
|
||||
const result = try gz.addExtendedPayloadSmall(.ptr_cast_no_dest, flags_i, Zir.Inst.UnNode{
|
||||
const result = try gz.addExtendedPayloadSmall(.ptr_cast_no_dest, flags_int, Zir.Inst.UnNode{
|
||||
.node = gz.nodeIndexToRelative(root_node),
|
||||
.operand = operand,
|
||||
});
|
||||
@ -8926,7 +8954,7 @@ fn ptrCast(
|
||||
const result_type = try ri.rl.resultTypeForCast(gz, root_node, flags.needResultTypeBuiltinName());
|
||||
const operand = try expr(gz, scope, .{ .rl = .none }, node);
|
||||
try emitDbgStmt(gz, cursor);
|
||||
const result = try gz.addExtendedPayloadSmall(.ptr_cast_full, flags_i, Zir.Inst.BinNode{
|
||||
const result = try gz.addExtendedPayloadSmall(.ptr_cast_full, flags_int, Zir.Inst.BinNode{
|
||||
.node = gz.nodeIndexToRelative(root_node),
|
||||
.lhs = result_type,
|
||||
.rhs = operand,
|
||||
@ -9379,7 +9407,7 @@ fn builtinCall(
|
||||
try emitDbgNode(gz, node);
|
||||
|
||||
const result = try gz.addExtendedPayload(.error_cast, Zir.Inst.BinNode{
|
||||
.lhs = try ri.rl.resultTypeForCast(gz, node, "@errorCast"),
|
||||
.lhs = try ri.rl.resultTypeForCast(gz, node, builtin_name),
|
||||
.rhs = try expr(gz, scope, .{ .rl = .none }, params[0]),
|
||||
.node = gz.nodeIndexToRelative(node),
|
||||
});
|
||||
@ -9452,7 +9480,7 @@ fn builtinCall(
|
||||
},
|
||||
|
||||
.splat => {
|
||||
const result_type = try ri.rl.resultTypeForCast(gz, node, "@splat");
|
||||
const result_type = try ri.rl.resultTypeForCast(gz, node, builtin_name);
|
||||
const elem_type = try gz.addUnNode(.vector_elem_type, result_type, node);
|
||||
const scalar = try expr(gz, scope, .{ .rl = .{ .ty = elem_type } }, params[0]);
|
||||
const result = try gz.addPlNode(.splat, node, Zir.Inst.Bin{
|
||||
@ -9537,12 +9565,13 @@ fn builtinCall(
|
||||
return rvalue(gz, ri, result, node);
|
||||
},
|
||||
.field_parent_ptr => {
|
||||
const parent_type = try typeExpr(gz, scope, params[0]);
|
||||
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .slice_const_u8_type } }, params[1]);
|
||||
const result = try gz.addPlNode(.field_parent_ptr, node, Zir.Inst.FieldParentPtr{
|
||||
.parent_type = parent_type,
|
||||
const parent_ptr_type = try ri.rl.resultTypeForCast(gz, node, builtin_name);
|
||||
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .slice_const_u8_type } }, params[0]);
|
||||
const result = try gz.addExtendedPayloadSmall(.field_parent_ptr, 0, Zir.Inst.FieldParentPtr{
|
||||
.src_node = gz.nodeIndexToRelative(node),
|
||||
.parent_ptr_type = parent_ptr_type,
|
||||
.field_name = field_name,
|
||||
.field_ptr = try expr(gz, scope, .{ .rl = .none }, params[2]),
|
||||
.field_ptr = try expr(gz, scope, .{ .rl = .none }, params[1]),
|
||||
});
|
||||
return rvalue(gz, ri, result, node);
|
||||
},
|
||||
@ -11686,20 +11715,20 @@ const Scope = struct {
|
||||
fn cast(base: *Scope, comptime T: type) ?*T {
|
||||
if (T == Defer) {
|
||||
switch (base.tag) {
|
||||
.defer_normal, .defer_error => return @fieldParentPtr(T, "base", base),
|
||||
.defer_normal, .defer_error => return @alignCast(@fieldParentPtr("base", base)),
|
||||
else => return null,
|
||||
}
|
||||
}
|
||||
if (T == Namespace) {
|
||||
switch (base.tag) {
|
||||
.namespace => return @fieldParentPtr(T, "base", base),
|
||||
.namespace => return @alignCast(@fieldParentPtr("base", base)),
|
||||
else => return null,
|
||||
}
|
||||
}
|
||||
if (base.tag != T.base_tag)
|
||||
return null;
|
||||
|
||||
return @fieldParentPtr(T, "base", base);
|
||||
return @alignCast(@fieldParentPtr("base", base));
|
||||
}
|
||||
|
||||
fn parent(base: *Scope) ?*Scope {
|
||||
|
||||
@ -911,6 +911,7 @@ fn builtinCall(astrl: *AstRlAnnotate, block: ?*Block, ri: ResultInfo, node: Ast.
|
||||
.work_item_id,
|
||||
.work_group_size,
|
||||
.work_group_id,
|
||||
.field_parent_ptr,
|
||||
=> {
|
||||
_ = try astrl.expr(args[0], block, ResultInfo.type_only);
|
||||
return false;
|
||||
@ -976,7 +977,6 @@ fn builtinCall(astrl: *AstRlAnnotate, block: ?*Block, ri: ResultInfo, node: Ast.
|
||||
},
|
||||
.bit_offset_of,
|
||||
.offset_of,
|
||||
.field_parent_ptr,
|
||||
.has_decl,
|
||||
.has_field,
|
||||
.field,
|
||||
|
||||
@ -504,7 +504,7 @@ pub const list = list: {
|
||||
"@fieldParentPtr",
|
||||
.{
|
||||
.tag = .field_parent_ptr,
|
||||
.param_count = 3,
|
||||
.param_count = 2,
|
||||
},
|
||||
},
|
||||
.{
|
||||
|
||||
@ -940,9 +940,6 @@ pub const Inst = struct {
|
||||
/// The addend communicates the type of the builtin.
|
||||
/// The mulends need to be coerced to the same type.
|
||||
mul_add,
|
||||
/// Implements the `@fieldParentPtr` builtin.
|
||||
/// Uses the `pl_node` union field with payload `FieldParentPtr`.
|
||||
field_parent_ptr,
|
||||
/// Implements the `@memcpy` builtin.
|
||||
/// Uses the `pl_node` union field with payload `Bin`.
|
||||
memcpy,
|
||||
@ -1230,7 +1227,6 @@ pub const Inst = struct {
|
||||
.atomic_store,
|
||||
.mul_add,
|
||||
.builtin_call,
|
||||
.field_parent_ptr,
|
||||
.max,
|
||||
.memcpy,
|
||||
.memset,
|
||||
@ -1522,7 +1518,6 @@ pub const Inst = struct {
|
||||
.atomic_rmw,
|
||||
.mul_add,
|
||||
.builtin_call,
|
||||
.field_parent_ptr,
|
||||
.max,
|
||||
.min,
|
||||
.c_import,
|
||||
@ -1794,7 +1789,6 @@ pub const Inst = struct {
|
||||
.atomic_store = .pl_node,
|
||||
.mul_add = .pl_node,
|
||||
.builtin_call = .pl_node,
|
||||
.field_parent_ptr = .pl_node,
|
||||
.max = .pl_node,
|
||||
.memcpy = .pl_node,
|
||||
.memset = .pl_node,
|
||||
@ -2064,6 +2058,12 @@ pub const Inst = struct {
|
||||
/// with a specific value. For instance, this is used for the capture of an `errdefer`.
|
||||
/// This should never appear in a body.
|
||||
value_placeholder,
|
||||
/// Implements the `@fieldParentPtr` builtin.
|
||||
/// `operand` is payload index to `FieldParentPtr`.
|
||||
/// `small` contains `FullPtrCastFlags`.
|
||||
/// Guaranteed to not have the `ptr_cast` flag.
|
||||
/// Uses the `pl_node` union field with payload `FieldParentPtr`.
|
||||
field_parent_ptr,
|
||||
|
||||
pub const InstData = struct {
|
||||
opcode: Extended,
|
||||
@ -3363,9 +3363,14 @@ pub const Inst = struct {
|
||||
};
|
||||
|
||||
pub const FieldParentPtr = struct {
|
||||
parent_type: Ref,
|
||||
src_node: i32,
|
||||
parent_ptr_type: Ref,
|
||||
field_name: Ref,
|
||||
field_ptr: Ref,
|
||||
|
||||
pub fn src(self: FieldParentPtr) LazySrcLoc {
|
||||
return LazySrcLoc.nodeOffset(self.src_node);
|
||||
}
|
||||
};
|
||||
|
||||
pub const Shuffle = struct {
|
||||
|
||||
@ -414,7 +414,7 @@ pub const Macros = struct {
|
||||
}
|
||||
|
||||
pub fn WL_CONTAINER_OF(ptr: anytype, sample: anytype, comptime member: []const u8) @TypeOf(sample) {
|
||||
return @fieldParentPtr(@TypeOf(sample.*), member, ptr);
|
||||
return @fieldParentPtr(member, ptr);
|
||||
}
|
||||
|
||||
/// A 2-argument function-like macro defined as #define FOO(A, B) (A)(B)
|
||||
|
||||
29
lib/zig.h
29
lib/zig.h
@ -130,22 +130,18 @@ typedef char bool;
|
||||
#define zig_restrict
|
||||
#endif
|
||||
|
||||
#if __STDC_VERSION__ >= 201112L
|
||||
#define zig_align(alignment) _Alignas(alignment)
|
||||
#elif zig_has_attribute(aligned)
|
||||
#define zig_align(alignment) __attribute__((aligned(alignment)))
|
||||
#elif _MSC_VER
|
||||
#define zig_align(alignment) __declspec(align(alignment))
|
||||
#else
|
||||
#define zig_align zig_align_unavailable
|
||||
#endif
|
||||
|
||||
#if zig_has_attribute(aligned)
|
||||
#define zig_under_align(alignment) __attribute__((aligned(alignment)))
|
||||
#elif _MSC_VER
|
||||
#define zig_under_align(alignment) zig_align(alignment)
|
||||
#define zig_under_align(alignment) __declspec(align(alignment))
|
||||
#else
|
||||
#define zig_align zig_align_unavailable
|
||||
#define zig_under_align zig_align_unavailable
|
||||
#endif
|
||||
|
||||
#if __STDC_VERSION__ >= 201112L
|
||||
#define zig_align(alignment) _Alignas(alignment)
|
||||
#else
|
||||
#define zig_align(alignment) zig_under_align(alignment)
|
||||
#endif
|
||||
|
||||
#if zig_has_attribute(aligned)
|
||||
@ -165,11 +161,14 @@ typedef char bool;
|
||||
#endif
|
||||
|
||||
#if zig_has_attribute(section)
|
||||
#define zig_linksection(name, def, ...) def __attribute__((section(name)))
|
||||
#define zig_linksection(name) __attribute__((section(name)))
|
||||
#define zig_linksection_fn zig_linksection
|
||||
#elif _MSC_VER
|
||||
#define zig_linksection(name, def, ...) __pragma(section(name, __VA_ARGS__)) __declspec(allocate(name)) def
|
||||
#define zig_linksection(name) __pragma(section(name, read, write)) __declspec(allocate(name))
|
||||
#define zig_linksection_fn(name) __pragma(section(name, read, execute)) __declspec(code_seg(name))
|
||||
#else
|
||||
#define zig_linksection(name, def, ...) zig_linksection_unavailable
|
||||
#define zig_linksection(name) zig_linksection_unavailable
|
||||
#define zig_linksection_fn zig_linksection
|
||||
#endif
|
||||
|
||||
#if zig_has_builtin(unreachable) || defined(zig_gnuc)
|
||||
|
||||
@ -3451,19 +3451,24 @@ fn processOneJob(comp: *Compilation, job: Job, prog_node: *std.Progress.Node) !v
|
||||
|
||||
var dg: c_codegen.DeclGen = .{
|
||||
.gpa = gpa,
|
||||
.module = module,
|
||||
.zcu = module,
|
||||
.mod = module.namespacePtr(decl.src_namespace).file_scope.mod,
|
||||
.error_msg = null,
|
||||
.pass = .{ .decl = decl_index },
|
||||
.is_naked_fn = false,
|
||||
.fwd_decl = fwd_decl.toManaged(gpa),
|
||||
.ctypes = .{},
|
||||
.ctype_pool = c_codegen.CType.Pool.empty,
|
||||
.scratch = .{},
|
||||
.anon_decl_deps = .{},
|
||||
.aligned_anon_decls = .{},
|
||||
};
|
||||
defer {
|
||||
dg.ctypes.deinit(gpa);
|
||||
dg.fwd_decl.deinit();
|
||||
fwd_decl.* = dg.fwd_decl.moveToUnmanaged();
|
||||
fwd_decl.shrinkAndFree(gpa, fwd_decl.items.len);
|
||||
dg.ctype_pool.deinit(gpa);
|
||||
dg.scratch.deinit(gpa);
|
||||
}
|
||||
try dg.ctype_pool.init(gpa);
|
||||
|
||||
c_codegen.genHeader(&dg) catch |err| switch (err) {
|
||||
error.AnalysisFail => {
|
||||
@ -3472,9 +3477,6 @@ fn processOneJob(comp: *Compilation, job: Job, prog_node: *std.Progress.Node) !v
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
|
||||
fwd_decl.* = dg.fwd_decl.moveToUnmanaged();
|
||||
fwd_decl.shrinkAndFree(gpa, fwd_decl.items.len);
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
@ -712,7 +712,7 @@ pub const Key = union(enum) {
|
||||
pub fn fieldName(
|
||||
self: AnonStructType,
|
||||
ip: *const InternPool,
|
||||
index: u32,
|
||||
index: usize,
|
||||
) OptionalNullTerminatedString {
|
||||
if (self.names.len == 0)
|
||||
return .none;
|
||||
@ -3879,20 +3879,13 @@ pub const Alignment = enum(u6) {
|
||||
none = std.math.maxInt(u6),
|
||||
_,
|
||||
|
||||
pub fn toByteUnitsOptional(a: Alignment) ?u64 {
|
||||
pub fn toByteUnits(a: Alignment) ?u64 {
|
||||
return switch (a) {
|
||||
.none => null,
|
||||
else => @as(u64, 1) << @intFromEnum(a),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn toByteUnits(a: Alignment, default: u64) u64 {
|
||||
return switch (a) {
|
||||
.none => default,
|
||||
else => @as(u64, 1) << @intFromEnum(a),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn fromByteUnits(n: u64) Alignment {
|
||||
if (n == 0) return .none;
|
||||
assert(std.math.isPowerOfTwo(n));
|
||||
@ -5170,48 +5163,55 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
|
||||
.ptr => |ptr| {
|
||||
const ptr_type = ip.indexToKey(ptr.ty).ptr_type;
|
||||
assert(ptr_type.flags.size != .Slice);
|
||||
switch (ptr.addr) {
|
||||
.decl => |decl| ip.items.appendAssumeCapacity(.{
|
||||
ip.items.appendAssumeCapacity(switch (ptr.addr) {
|
||||
.decl => |decl| .{
|
||||
.tag = .ptr_decl,
|
||||
.data = try ip.addExtra(gpa, PtrDecl{
|
||||
.ty = ptr.ty,
|
||||
.decl = decl,
|
||||
}),
|
||||
}),
|
||||
.comptime_alloc => |alloc_index| ip.items.appendAssumeCapacity(.{
|
||||
},
|
||||
.comptime_alloc => |alloc_index| .{
|
||||
.tag = .ptr_comptime_alloc,
|
||||
.data = try ip.addExtra(gpa, PtrComptimeAlloc{
|
||||
.ty = ptr.ty,
|
||||
.index = alloc_index,
|
||||
}),
|
||||
}),
|
||||
.anon_decl => |anon_decl| ip.items.appendAssumeCapacity(
|
||||
if (ptrsHaveSameAlignment(ip, ptr.ty, ptr_type, anon_decl.orig_ty)) .{
|
||||
},
|
||||
.anon_decl => |anon_decl| if (ptrsHaveSameAlignment(ip, ptr.ty, ptr_type, anon_decl.orig_ty)) item: {
|
||||
if (ptr.ty != anon_decl.orig_ty) {
|
||||
_ = ip.map.pop();
|
||||
var new_key = key;
|
||||
new_key.ptr.addr.anon_decl.orig_ty = ptr.ty;
|
||||
const new_gop = try ip.map.getOrPutAdapted(gpa, new_key, adapter);
|
||||
if (new_gop.found_existing) return @enumFromInt(new_gop.index);
|
||||
}
|
||||
break :item .{
|
||||
.tag = .ptr_anon_decl,
|
||||
.data = try ip.addExtra(gpa, PtrAnonDecl{
|
||||
.ty = ptr.ty,
|
||||
.val = anon_decl.val,
|
||||
}),
|
||||
} else .{
|
||||
.tag = .ptr_anon_decl_aligned,
|
||||
.data = try ip.addExtra(gpa, PtrAnonDeclAligned{
|
||||
.ty = ptr.ty,
|
||||
.val = anon_decl.val,
|
||||
.orig_ty = anon_decl.orig_ty,
|
||||
}),
|
||||
},
|
||||
),
|
||||
.comptime_field => |field_val| {
|
||||
};
|
||||
} else .{
|
||||
.tag = .ptr_anon_decl_aligned,
|
||||
.data = try ip.addExtra(gpa, PtrAnonDeclAligned{
|
||||
.ty = ptr.ty,
|
||||
.val = anon_decl.val,
|
||||
.orig_ty = anon_decl.orig_ty,
|
||||
}),
|
||||
},
|
||||
.comptime_field => |field_val| item: {
|
||||
assert(field_val != .none);
|
||||
ip.items.appendAssumeCapacity(.{
|
||||
break :item .{
|
||||
.tag = .ptr_comptime_field,
|
||||
.data = try ip.addExtra(gpa, PtrComptimeField{
|
||||
.ty = ptr.ty,
|
||||
.field_val = field_val,
|
||||
}),
|
||||
});
|
||||
};
|
||||
},
|
||||
.int, .eu_payload, .opt_payload => |base| {
|
||||
.int, .eu_payload, .opt_payload => |base| item: {
|
||||
switch (ptr.addr) {
|
||||
.int => assert(ip.typeOf(base) == .usize_type),
|
||||
.eu_payload => assert(ip.indexToKey(
|
||||
@ -5222,7 +5222,7 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
|
||||
) == .opt_type),
|
||||
else => unreachable,
|
||||
}
|
||||
ip.items.appendAssumeCapacity(.{
|
||||
break :item .{
|
||||
.tag = switch (ptr.addr) {
|
||||
.int => .ptr_int,
|
||||
.eu_payload => .ptr_eu_payload,
|
||||
@ -5233,9 +5233,9 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
|
||||
.ty = ptr.ty,
|
||||
.base = base,
|
||||
}),
|
||||
});
|
||||
};
|
||||
},
|
||||
.elem, .field => |base_index| {
|
||||
.elem, .field => |base_index| item: {
|
||||
const base_ptr_type = ip.indexToKey(ip.typeOf(base_index.base)).ptr_type;
|
||||
switch (ptr.addr) {
|
||||
.elem => assert(base_ptr_type.flags.size == .Many),
|
||||
@ -5272,7 +5272,7 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
|
||||
} });
|
||||
assert(!(try ip.map.getOrPutAdapted(gpa, key, adapter)).found_existing);
|
||||
try ip.items.ensureUnusedCapacity(gpa, 1);
|
||||
ip.items.appendAssumeCapacity(.{
|
||||
break :item .{
|
||||
.tag = switch (ptr.addr) {
|
||||
.elem => .ptr_elem,
|
||||
.field => .ptr_field,
|
||||
@ -5283,9 +5283,9 @@ pub fn get(ip: *InternPool, gpa: Allocator, key: Key) Allocator.Error!Index {
|
||||
.base = base_index.base,
|
||||
.index = index_index,
|
||||
}),
|
||||
});
|
||||
};
|
||||
},
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
.opt => |opt| {
|
||||
|
||||
@ -299,7 +299,7 @@ const ValueArena = struct {
|
||||
/// and must live until the matching call to release().
|
||||
pub fn acquire(self: *ValueArena, child_allocator: Allocator, out_arena_allocator: *std.heap.ArenaAllocator) Allocator {
|
||||
if (self.state_acquired) |state_acquired| {
|
||||
return @fieldParentPtr(std.heap.ArenaAllocator, "state", state_acquired).allocator();
|
||||
return @as(*std.heap.ArenaAllocator, @fieldParentPtr("state", state_acquired)).allocator();
|
||||
}
|
||||
|
||||
out_arena_allocator.* = self.state.promote(child_allocator);
|
||||
@ -309,7 +309,7 @@ const ValueArena = struct {
|
||||
|
||||
/// Releases the allocator acquired by `acquire. `arena_allocator` must match the one passed to `acquire`.
|
||||
pub fn release(self: *ValueArena, arena_allocator: *std.heap.ArenaAllocator) void {
|
||||
if (@fieldParentPtr(std.heap.ArenaAllocator, "state", self.state_acquired.?) == arena_allocator) {
|
||||
if (@as(*std.heap.ArenaAllocator, @fieldParentPtr("state", self.state_acquired.?)) == arena_allocator) {
|
||||
self.state = self.state_acquired.?.*;
|
||||
self.state_acquired = null;
|
||||
}
|
||||
@ -5846,7 +5846,7 @@ pub fn intBitsForValue(mod: *Module, val: Value, sign: bool) u16 {
|
||||
return @as(u16, @intCast(big.bitCountTwosComp()));
|
||||
},
|
||||
.lazy_align => |lazy_ty| {
|
||||
return Type.smallestUnsignedBits(Type.fromInterned(lazy_ty).abiAlignment(mod).toByteUnits(0)) + @intFromBool(sign);
|
||||
return Type.smallestUnsignedBits(Type.fromInterned(lazy_ty).abiAlignment(mod).toByteUnits() orelse 0) + @intFromBool(sign);
|
||||
},
|
||||
.lazy_size => |lazy_ty| {
|
||||
return Type.smallestUnsignedBits(Type.fromInterned(lazy_ty).abiSize(mod)) + @intFromBool(sign);
|
||||
|
||||
298
src/Sema.zig
298
src/Sema.zig
@ -1131,7 +1131,6 @@ fn analyzeBodyInner(
|
||||
.atomic_rmw => try sema.zirAtomicRmw(block, inst),
|
||||
.mul_add => try sema.zirMulAdd(block, inst),
|
||||
.builtin_call => try sema.zirBuiltinCall(block, inst),
|
||||
.field_parent_ptr => try sema.zirFieldParentPtr(block, inst),
|
||||
.@"resume" => try sema.zirResume(block, inst),
|
||||
.@"await" => try sema.zirAwait(block, inst),
|
||||
.for_len => try sema.zirForLen(block, inst),
|
||||
@ -1296,6 +1295,7 @@ fn analyzeBodyInner(
|
||||
continue;
|
||||
},
|
||||
.value_placeholder => unreachable, // never appears in a body
|
||||
.field_parent_ptr => try sema.zirFieldParentPtr(block, extended),
|
||||
};
|
||||
},
|
||||
|
||||
@ -6508,7 +6508,7 @@ fn zirSetAlignStack(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.Inst
|
||||
const alignment = try sema.resolveAlign(block, operand_src, extra.operand);
|
||||
if (alignment.order(Alignment.fromNonzeroByteUnits(256)).compare(.gt)) {
|
||||
return sema.fail(block, src, "attempt to @setAlignStack({d}); maximum is 256", .{
|
||||
alignment.toByteUnitsOptional().?,
|
||||
alignment.toByteUnits().?,
|
||||
});
|
||||
}
|
||||
|
||||
@ -17699,19 +17699,20 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
|
||||
.ty = new_decl_ty.toIntern(),
|
||||
.storage = .{ .elems = param_vals },
|
||||
} });
|
||||
const ptr_ty = (try sema.ptrType(.{
|
||||
const slice_ty = (try sema.ptrType(.{
|
||||
.child = param_info_ty.toIntern(),
|
||||
.flags = .{
|
||||
.size = .Slice,
|
||||
.is_const = true,
|
||||
},
|
||||
})).toIntern();
|
||||
const manyptr_ty = Type.fromInterned(slice_ty).slicePtrFieldType(mod).toIntern();
|
||||
break :v try mod.intern(.{ .slice = .{
|
||||
.ty = ptr_ty,
|
||||
.ty = slice_ty,
|
||||
.ptr = try mod.intern(.{ .ptr = .{
|
||||
.ty = Type.fromInterned(ptr_ty).slicePtrFieldType(mod).toIntern(),
|
||||
.ty = manyptr_ty,
|
||||
.addr = .{ .anon_decl = .{
|
||||
.orig_ty = ptr_ty,
|
||||
.orig_ty = manyptr_ty,
|
||||
.val = new_decl_val,
|
||||
} },
|
||||
} }),
|
||||
@ -17804,7 +17805,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
|
||||
},
|
||||
.Pointer => {
|
||||
const info = ty.ptrInfo(mod);
|
||||
const alignment = if (info.flags.alignment.toByteUnitsOptional()) |alignment|
|
||||
const alignment = if (info.flags.alignment.toByteUnits()) |alignment|
|
||||
try mod.intValue(Type.comptime_int, alignment)
|
||||
else
|
||||
try Type.fromInterned(info.child).lazyAbiAlignment(mod);
|
||||
@ -18031,12 +18032,13 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
|
||||
.ty = array_errors_ty.toIntern(),
|
||||
.storage = .{ .elems = vals },
|
||||
} });
|
||||
const manyptr_errors_ty = slice_errors_ty.slicePtrFieldType(mod).toIntern();
|
||||
break :v try mod.intern(.{ .slice = .{
|
||||
.ty = slice_errors_ty.toIntern(),
|
||||
.ptr = try mod.intern(.{ .ptr = .{
|
||||
.ty = slice_errors_ty.slicePtrFieldType(mod).toIntern(),
|
||||
.ty = manyptr_errors_ty,
|
||||
.addr = .{ .anon_decl = .{
|
||||
.orig_ty = slice_errors_ty.toIntern(),
|
||||
.orig_ty = manyptr_errors_ty,
|
||||
.val = new_decl_val,
|
||||
} },
|
||||
} }),
|
||||
@ -18155,20 +18157,21 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
|
||||
.ty = fields_array_ty.toIntern(),
|
||||
.storage = .{ .elems = enum_field_vals },
|
||||
} });
|
||||
const ptr_ty = (try sema.ptrType(.{
|
||||
const slice_ty = (try sema.ptrType(.{
|
||||
.child = enum_field_ty.toIntern(),
|
||||
.flags = .{
|
||||
.size = .Slice,
|
||||
.is_const = true,
|
||||
},
|
||||
})).toIntern();
|
||||
const manyptr_ty = Type.fromInterned(slice_ty).slicePtrFieldType(mod).toIntern();
|
||||
break :v try mod.intern(.{ .slice = .{
|
||||
.ty = ptr_ty,
|
||||
.ty = slice_ty,
|
||||
.ptr = try mod.intern(.{ .ptr = .{
|
||||
.ty = Type.fromInterned(ptr_ty).slicePtrFieldType(mod).toIntern(),
|
||||
.ty = manyptr_ty,
|
||||
.addr = .{ .anon_decl = .{
|
||||
.val = new_decl_val,
|
||||
.orig_ty = ptr_ty,
|
||||
.orig_ty = manyptr_ty,
|
||||
} },
|
||||
} }),
|
||||
.len = (try mod.intValue(Type.usize, enum_field_vals.len)).toIntern(),
|
||||
@ -18279,7 +18282,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
|
||||
// type: type,
|
||||
field_ty,
|
||||
// alignment: comptime_int,
|
||||
(try mod.intValue(Type.comptime_int, alignment.toByteUnits(0))).toIntern(),
|
||||
(try mod.intValue(Type.comptime_int, alignment.toByteUnits() orelse 0)).toIntern(),
|
||||
};
|
||||
field_val.* = try mod.intern(.{ .aggregate = .{
|
||||
.ty = union_field_ty.toIntern(),
|
||||
@ -18296,19 +18299,20 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
|
||||
.ty = array_fields_ty.toIntern(),
|
||||
.storage = .{ .elems = union_field_vals },
|
||||
} });
|
||||
const ptr_ty = (try sema.ptrType(.{
|
||||
const slice_ty = (try sema.ptrType(.{
|
||||
.child = union_field_ty.toIntern(),
|
||||
.flags = .{
|
||||
.size = .Slice,
|
||||
.is_const = true,
|
||||
},
|
||||
})).toIntern();
|
||||
const manyptr_ty = Type.fromInterned(slice_ty).slicePtrFieldType(mod).toIntern();
|
||||
break :v try mod.intern(.{ .slice = .{
|
||||
.ty = ptr_ty,
|
||||
.ty = slice_ty,
|
||||
.ptr = try mod.intern(.{ .ptr = .{
|
||||
.ty = Type.fromInterned(ptr_ty).slicePtrFieldType(mod).toIntern(),
|
||||
.ty = manyptr_ty,
|
||||
.addr = .{ .anon_decl = .{
|
||||
.orig_ty = ptr_ty,
|
||||
.orig_ty = manyptr_ty,
|
||||
.val = new_decl_val,
|
||||
} },
|
||||
} }),
|
||||
@ -18436,7 +18440,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
|
||||
// is_comptime: bool,
|
||||
Value.makeBool(is_comptime).toIntern(),
|
||||
// alignment: comptime_int,
|
||||
(try mod.intValue(Type.comptime_int, Type.fromInterned(field_ty).abiAlignment(mod).toByteUnits(0))).toIntern(),
|
||||
(try mod.intValue(Type.comptime_int, Type.fromInterned(field_ty).abiAlignment(mod).toByteUnits() orelse 0)).toIntern(),
|
||||
};
|
||||
struct_field_val.* = try mod.intern(.{ .aggregate = .{
|
||||
.ty = struct_field_ty.toIntern(),
|
||||
@ -18505,7 +18509,7 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
|
||||
// is_comptime: bool,
|
||||
Value.makeBool(field_is_comptime).toIntern(),
|
||||
// alignment: comptime_int,
|
||||
(try mod.intValue(Type.comptime_int, alignment.toByteUnits(0))).toIntern(),
|
||||
(try mod.intValue(Type.comptime_int, alignment.toByteUnits() orelse 0)).toIntern(),
|
||||
};
|
||||
field_val.* = try mod.intern(.{ .aggregate = .{
|
||||
.ty = struct_field_ty.toIntern(),
|
||||
@ -18523,19 +18527,20 @@ fn zirTypeInfo(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
|
||||
.ty = array_fields_ty.toIntern(),
|
||||
.storage = .{ .elems = struct_field_vals },
|
||||
} });
|
||||
const ptr_ty = (try sema.ptrType(.{
|
||||
const slice_ty = (try sema.ptrType(.{
|
||||
.child = struct_field_ty.toIntern(),
|
||||
.flags = .{
|
||||
.size = .Slice,
|
||||
.is_const = true,
|
||||
},
|
||||
})).toIntern();
|
||||
const manyptr_ty = Type.fromInterned(slice_ty).slicePtrFieldType(mod).toIntern();
|
||||
break :v try mod.intern(.{ .slice = .{
|
||||
.ty = ptr_ty,
|
||||
.ty = slice_ty,
|
||||
.ptr = try mod.intern(.{ .ptr = .{
|
||||
.ty = Type.fromInterned(ptr_ty).slicePtrFieldType(mod).toIntern(),
|
||||
.ty = manyptr_ty,
|
||||
.addr = .{ .anon_decl = .{
|
||||
.orig_ty = ptr_ty,
|
||||
.orig_ty = manyptr_ty,
|
||||
.val = new_decl_val,
|
||||
} },
|
||||
} }),
|
||||
@ -18661,19 +18666,20 @@ fn typeInfoDecls(
|
||||
.ty = array_decl_ty.toIntern(),
|
||||
.storage = .{ .elems = decl_vals.items },
|
||||
} });
|
||||
const ptr_ty = (try sema.ptrType(.{
|
||||
const slice_ty = (try sema.ptrType(.{
|
||||
.child = declaration_ty.toIntern(),
|
||||
.flags = .{
|
||||
.size = .Slice,
|
||||
.is_const = true,
|
||||
},
|
||||
})).toIntern();
|
||||
const manyptr_ty = Type.fromInterned(slice_ty).slicePtrFieldType(mod).toIntern();
|
||||
return try mod.intern(.{ .slice = .{
|
||||
.ty = ptr_ty,
|
||||
.ty = slice_ty,
|
||||
.ptr = try mod.intern(.{ .ptr = .{
|
||||
.ty = Type.fromInterned(ptr_ty).slicePtrFieldType(mod).toIntern(),
|
||||
.ty = manyptr_ty,
|
||||
.addr = .{ .anon_decl = .{
|
||||
.orig_ty = ptr_ty,
|
||||
.orig_ty = manyptr_ty,
|
||||
.val = new_decl_val,
|
||||
} },
|
||||
} }),
|
||||
@ -19803,8 +19809,18 @@ fn zirPtrType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
|
||||
break :blk @intCast(host_size);
|
||||
} else 0;
|
||||
|
||||
if (host_size != 0 and bit_offset >= host_size * 8) {
|
||||
return sema.fail(block, bitoffset_src, "bit offset starts after end of host integer", .{});
|
||||
if (host_size != 0) {
|
||||
if (bit_offset >= host_size * 8) {
|
||||
return sema.fail(block, bitoffset_src, "packed type '{}' at bit offset {} starts {} bits after the end of a {} byte host integer", .{
|
||||
elem_ty.fmt(mod), bit_offset, bit_offset - host_size * 8, host_size,
|
||||
});
|
||||
}
|
||||
const elem_bit_size = try elem_ty.bitSizeAdvanced(mod, sema);
|
||||
if (elem_bit_size > host_size * 8 - bit_offset) {
|
||||
return sema.fail(block, bitoffset_src, "packed type '{}' at bit offset {} ends {} bits after the end of a {} byte host integer", .{
|
||||
elem_ty.fmt(mod), bit_offset, elem_bit_size - (host_size * 8 - bit_offset), host_size,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (elem_ty.zigTypeTag(mod) == .Fn) {
|
||||
@ -22552,7 +22568,7 @@ fn zirPtrFromInt(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!
|
||||
try sema.addSafetyCheck(block, src, is_non_zero, .cast_to_null);
|
||||
}
|
||||
if (ptr_align.compare(.gt, .@"1")) {
|
||||
const align_bytes_minus_1 = ptr_align.toByteUnitsOptional().? - 1;
|
||||
const align_bytes_minus_1 = ptr_align.toByteUnits().? - 1;
|
||||
const align_minus_1 = Air.internedToRef((try mod.intValue(Type.usize, align_bytes_minus_1)).toIntern());
|
||||
const remainder = try block.addBinOp(.bit_and, operand_coerced, align_minus_1);
|
||||
const is_aligned = try block.addBinOp(.cmp_eq, remainder, .zero_usize);
|
||||
@ -22572,7 +22588,7 @@ fn zirPtrFromInt(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!
|
||||
try sema.addSafetyCheck(block, src, is_non_zero, .cast_to_null);
|
||||
}
|
||||
if (ptr_align.compare(.gt, .@"1")) {
|
||||
const align_bytes_minus_1 = ptr_align.toByteUnitsOptional().? - 1;
|
||||
const align_bytes_minus_1 = ptr_align.toByteUnits().? - 1;
|
||||
const align_minus_1 = Air.internedToRef((try mod.intValue(Type.usize, align_bytes_minus_1)).toIntern());
|
||||
const remainder = try block.addBinOp(.bit_and, elem_coerced, align_minus_1);
|
||||
const is_aligned = try block.addBinOp(.cmp_eq, remainder, .zero_usize);
|
||||
@ -22741,10 +22757,8 @@ fn zirErrorCast(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData
|
||||
}
|
||||
|
||||
fn zirPtrCastFull(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) CompileError!Air.Inst.Ref {
|
||||
const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(
|
||||
@typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?,
|
||||
@truncate(extended.small),
|
||||
));
|
||||
const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?;
|
||||
const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small)));
|
||||
const extra = sema.code.extraData(Zir.Inst.BinNode, extended.operand).data;
|
||||
const src = LazySrcLoc.nodeOffset(extra.node);
|
||||
const operand_src: LazySrcLoc = .{ .node_offset_ptrcast_operand = extra.node };
|
||||
@ -22757,6 +22771,7 @@ fn zirPtrCastFull(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstDa
|
||||
operand,
|
||||
operand_src,
|
||||
dest_ty,
|
||||
flags.needResultTypeBuiltinName(),
|
||||
);
|
||||
}
|
||||
|
||||
@ -22775,6 +22790,7 @@ fn zirPtrCast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
|
||||
operand,
|
||||
operand_src,
|
||||
dest_ty,
|
||||
"@ptrCast",
|
||||
);
|
||||
}
|
||||
|
||||
@ -22786,6 +22802,7 @@ fn ptrCastFull(
|
||||
operand: Air.Inst.Ref,
|
||||
operand_src: LazySrcLoc,
|
||||
dest_ty: Type,
|
||||
operation: []const u8,
|
||||
) CompileError!Air.Inst.Ref {
|
||||
const mod = sema.mod;
|
||||
const operand_ty = sema.typeOf(operand);
|
||||
@ -22818,7 +22835,7 @@ fn ptrCastFull(
|
||||
};
|
||||
const dest_elem_size = Type.fromInterned(dest_info.child).abiSize(mod);
|
||||
if (src_elem_size != dest_elem_size) {
|
||||
return sema.fail(block, src, "TODO: implement @ptrCast between slices changing the length", .{});
|
||||
return sema.fail(block, src, "TODO: implement {s} between slices changing the length", .{operation});
|
||||
}
|
||||
}
|
||||
|
||||
@ -22967,13 +22984,13 @@ fn ptrCastFull(
|
||||
if (!flags.align_cast) {
|
||||
if (dest_align.compare(.gt, src_align)) {
|
||||
return sema.failWithOwnedErrorMsg(block, msg: {
|
||||
const msg = try sema.errMsg(block, src, "cast increases pointer alignment", .{});
|
||||
const msg = try sema.errMsg(block, src, "{s} increases pointer alignment", .{operation});
|
||||
errdefer msg.destroy(sema.gpa);
|
||||
try sema.errNote(block, operand_src, msg, "'{}' has alignment '{d}'", .{
|
||||
operand_ty.fmt(mod), src_align.toByteUnits(0),
|
||||
operand_ty.fmt(mod), src_align.toByteUnits() orelse 0,
|
||||
});
|
||||
try sema.errNote(block, src, msg, "'{}' has alignment '{d}'", .{
|
||||
dest_ty.fmt(mod), dest_align.toByteUnits(0),
|
||||
dest_ty.fmt(mod), dest_align.toByteUnits() orelse 0,
|
||||
});
|
||||
try sema.errNote(block, src, msg, "use @alignCast to assert pointer alignment", .{});
|
||||
break :msg msg;
|
||||
@ -22984,7 +23001,7 @@ fn ptrCastFull(
|
||||
if (!flags.addrspace_cast) {
|
||||
if (src_info.flags.address_space != dest_info.flags.address_space) {
|
||||
return sema.failWithOwnedErrorMsg(block, msg: {
|
||||
const msg = try sema.errMsg(block, src, "cast changes pointer address space", .{});
|
||||
const msg = try sema.errMsg(block, src, "{s} changes pointer address space", .{operation});
|
||||
errdefer msg.destroy(sema.gpa);
|
||||
try sema.errNote(block, operand_src, msg, "'{}' has address space '{s}'", .{
|
||||
operand_ty.fmt(mod), @tagName(src_info.flags.address_space),
|
||||
@ -23014,7 +23031,7 @@ fn ptrCastFull(
|
||||
if (!flags.const_cast) {
|
||||
if (src_info.flags.is_const and !dest_info.flags.is_const) {
|
||||
return sema.failWithOwnedErrorMsg(block, msg: {
|
||||
const msg = try sema.errMsg(block, src, "cast discards const qualifier", .{});
|
||||
const msg = try sema.errMsg(block, src, "{s} discards const qualifier", .{operation});
|
||||
errdefer msg.destroy(sema.gpa);
|
||||
try sema.errNote(block, src, msg, "use @constCast to discard const qualifier", .{});
|
||||
break :msg msg;
|
||||
@ -23025,7 +23042,7 @@ fn ptrCastFull(
|
||||
if (!flags.volatile_cast) {
|
||||
if (src_info.flags.is_volatile and !dest_info.flags.is_volatile) {
|
||||
return sema.failWithOwnedErrorMsg(block, msg: {
|
||||
const msg = try sema.errMsg(block, src, "cast discards volatile qualifier", .{});
|
||||
const msg = try sema.errMsg(block, src, "{s} discards volatile qualifier", .{operation});
|
||||
errdefer msg.destroy(sema.gpa);
|
||||
try sema.errNote(block, src, msg, "use @volatileCast to discard volatile qualifier", .{});
|
||||
break :msg msg;
|
||||
@ -23067,7 +23084,7 @@ fn ptrCastFull(
|
||||
if (!dest_align.check(addr)) {
|
||||
return sema.fail(block, operand_src, "pointer address 0x{X} is not aligned to {d} bytes", .{
|
||||
addr,
|
||||
dest_align.toByteUnitsOptional().?,
|
||||
dest_align.toByteUnits().?,
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -23110,7 +23127,7 @@ fn ptrCastFull(
|
||||
dest_align.compare(.gt, src_align) and
|
||||
try sema.typeHasRuntimeBits(Type.fromInterned(dest_info.child)))
|
||||
{
|
||||
const align_bytes_minus_1 = dest_align.toByteUnitsOptional().? - 1;
|
||||
const align_bytes_minus_1 = dest_align.toByteUnits().? - 1;
|
||||
const align_minus_1 = Air.internedToRef((try mod.intValue(Type.usize, align_bytes_minus_1)).toIntern());
|
||||
const ptr_int = try block.addUnOp(.int_from_ptr, ptr);
|
||||
const remainder = try block.addBinOp(.bit_and, ptr_int, align_minus_1);
|
||||
@ -23171,10 +23188,8 @@ fn ptrCastFull(
|
||||
|
||||
fn zirPtrCastNoDest(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) CompileError!Air.Inst.Ref {
|
||||
const mod = sema.mod;
|
||||
const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(
|
||||
@typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?,
|
||||
@truncate(extended.small),
|
||||
));
|
||||
const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?;
|
||||
const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small)));
|
||||
const extra = sema.code.extraData(Zir.Inst.UnNode, extended.operand).data;
|
||||
const src = LazySrcLoc.nodeOffset(extra.node);
|
||||
const operand_src: LazySrcLoc = .{ .node_offset_ptrcast_operand = extra.node };
|
||||
@ -24843,107 +24858,151 @@ fn zirBuiltinCall(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
|
||||
);
|
||||
}
|
||||
|
||||
fn zirFieldParentPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
|
||||
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
||||
const extra = sema.code.extraData(Zir.Inst.FieldParentPtr, inst_data.payload_index).data;
|
||||
const src = inst_data.src();
|
||||
const ty_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = inst_data.src_node };
|
||||
const name_src: LazySrcLoc = .{ .node_offset_builtin_call_arg1 = inst_data.src_node };
|
||||
const ptr_src: LazySrcLoc = .{ .node_offset_builtin_call_arg2 = inst_data.src_node };
|
||||
|
||||
const parent_ty = try sema.resolveType(block, ty_src, extra.parent_type);
|
||||
const field_name = try sema.resolveConstStringIntern(block, name_src, extra.field_name, .{
|
||||
.needed_comptime_reason = "field name must be comptime-known",
|
||||
});
|
||||
const field_ptr = try sema.resolveInst(extra.field_ptr);
|
||||
const field_ptr_ty = sema.typeOf(field_ptr);
|
||||
fn zirFieldParentPtr(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) CompileError!Air.Inst.Ref {
|
||||
const mod = sema.mod;
|
||||
const ip = &mod.intern_pool;
|
||||
|
||||
if (parent_ty.zigTypeTag(mod) != .Struct and parent_ty.zigTypeTag(mod) != .Union) {
|
||||
return sema.fail(block, ty_src, "expected struct or union type, found '{}'", .{parent_ty.fmt(sema.mod)});
|
||||
const extra = sema.code.extraData(Zir.Inst.FieldParentPtr, extended.operand).data;
|
||||
const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?;
|
||||
const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small)));
|
||||
assert(!flags.ptr_cast);
|
||||
const inst_src = extra.src();
|
||||
const field_name_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = extra.src_node };
|
||||
const field_ptr_src: LazySrcLoc = .{ .node_offset_builtin_call_arg1 = extra.src_node };
|
||||
|
||||
const parent_ptr_ty = try sema.resolveDestType(block, inst_src, extra.parent_ptr_type, .remove_eu, "@fieldParentPtr");
|
||||
try sema.checkPtrType(block, inst_src, parent_ptr_ty, true);
|
||||
const parent_ptr_info = parent_ptr_ty.ptrInfo(mod);
|
||||
if (parent_ptr_info.flags.size != .One) {
|
||||
return sema.fail(block, inst_src, "expected single pointer type, found '{}'", .{parent_ptr_ty.fmt(sema.mod)});
|
||||
}
|
||||
const parent_ty = Type.fromInterned(parent_ptr_info.child);
|
||||
switch (parent_ty.zigTypeTag(mod)) {
|
||||
.Struct, .Union => {},
|
||||
else => return sema.fail(block, inst_src, "expected pointer to struct or union type, found '{}'", .{parent_ptr_ty.fmt(sema.mod)}),
|
||||
}
|
||||
try sema.resolveTypeLayout(parent_ty);
|
||||
|
||||
const field_name = try sema.resolveConstStringIntern(block, field_name_src, extra.field_name, .{
|
||||
.needed_comptime_reason = "field name must be comptime-known",
|
||||
});
|
||||
const field_index = switch (parent_ty.zigTypeTag(mod)) {
|
||||
.Struct => blk: {
|
||||
if (parent_ty.isTuple(mod)) {
|
||||
if (ip.stringEqlSlice(field_name, "len")) {
|
||||
return sema.fail(block, src, "cannot get @fieldParentPtr of 'len' field of tuple", .{});
|
||||
return sema.fail(block, inst_src, "cannot get @fieldParentPtr of 'len' field of tuple", .{});
|
||||
}
|
||||
break :blk try sema.tupleFieldIndex(block, parent_ty, field_name, name_src);
|
||||
break :blk try sema.tupleFieldIndex(block, parent_ty, field_name, field_name_src);
|
||||
} else {
|
||||
break :blk try sema.structFieldIndex(block, parent_ty, field_name, name_src);
|
||||
break :blk try sema.structFieldIndex(block, parent_ty, field_name, field_name_src);
|
||||
}
|
||||
},
|
||||
.Union => try sema.unionFieldIndex(block, parent_ty, field_name, name_src),
|
||||
.Union => try sema.unionFieldIndex(block, parent_ty, field_name, field_name_src),
|
||||
else => unreachable,
|
||||
};
|
||||
|
||||
if (parent_ty.zigTypeTag(mod) == .Struct and parent_ty.structFieldIsComptime(field_index, mod)) {
|
||||
return sema.fail(block, src, "cannot get @fieldParentPtr of a comptime field", .{});
|
||||
return sema.fail(block, field_name_src, "cannot get @fieldParentPtr of a comptime field", .{});
|
||||
}
|
||||
|
||||
try sema.checkPtrOperand(block, ptr_src, field_ptr_ty);
|
||||
const field_ptr_ty_info = field_ptr_ty.ptrInfo(mod);
|
||||
const field_ptr = try sema.resolveInst(extra.field_ptr);
|
||||
const field_ptr_ty = sema.typeOf(field_ptr);
|
||||
try sema.checkPtrOperand(block, field_ptr_src, field_ptr_ty);
|
||||
const field_ptr_info = field_ptr_ty.ptrInfo(mod);
|
||||
|
||||
var ptr_ty_data: InternPool.Key.PtrType = .{
|
||||
.child = parent_ty.structFieldType(field_index, mod).toIntern(),
|
||||
var actual_parent_ptr_info: InternPool.Key.PtrType = .{
|
||||
.child = parent_ty.toIntern(),
|
||||
.flags = .{
|
||||
.address_space = field_ptr_ty_info.flags.address_space,
|
||||
.is_const = field_ptr_ty_info.flags.is_const,
|
||||
.alignment = try parent_ptr_ty.ptrAlignmentAdvanced(mod, sema),
|
||||
.is_const = field_ptr_info.flags.is_const,
|
||||
.is_volatile = field_ptr_info.flags.is_volatile,
|
||||
.is_allowzero = field_ptr_info.flags.is_allowzero,
|
||||
.address_space = field_ptr_info.flags.address_space,
|
||||
},
|
||||
.packed_offset = parent_ptr_info.packed_offset,
|
||||
};
|
||||
const field_ty = parent_ty.structFieldType(field_index, mod);
|
||||
var actual_field_ptr_info: InternPool.Key.PtrType = .{
|
||||
.child = field_ty.toIntern(),
|
||||
.flags = .{
|
||||
.alignment = try field_ptr_ty.ptrAlignmentAdvanced(mod, sema),
|
||||
.is_const = field_ptr_info.flags.is_const,
|
||||
.is_volatile = field_ptr_info.flags.is_volatile,
|
||||
.is_allowzero = field_ptr_info.flags.is_allowzero,
|
||||
.address_space = field_ptr_info.flags.address_space,
|
||||
},
|
||||
.packed_offset = field_ptr_info.packed_offset,
|
||||
};
|
||||
switch (parent_ty.containerLayout(mod)) {
|
||||
.auto => {
|
||||
actual_parent_ptr_info.flags.alignment = actual_field_ptr_info.flags.alignment.minStrict(
|
||||
if (mod.typeToStruct(parent_ty)) |struct_obj| try sema.structFieldAlignment(
|
||||
struct_obj.fieldAlign(ip, field_index),
|
||||
field_ty,
|
||||
struct_obj.layout,
|
||||
) else if (mod.typeToUnion(parent_ty)) |union_obj|
|
||||
try sema.unionFieldAlignment(union_obj, field_index)
|
||||
else
|
||||
actual_field_ptr_info.flags.alignment,
|
||||
);
|
||||
|
||||
if (parent_ty.containerLayout(mod) == .@"packed") {
|
||||
return sema.fail(block, src, "TODO handle packed structs/unions with @fieldParentPtr", .{});
|
||||
} else {
|
||||
ptr_ty_data.flags.alignment = blk: {
|
||||
if (mod.typeToStruct(parent_ty)) |struct_type| {
|
||||
break :blk struct_type.fieldAlign(ip, field_index);
|
||||
} else if (mod.typeToUnion(parent_ty)) |union_obj| {
|
||||
break :blk union_obj.fieldAlign(ip, field_index);
|
||||
} else {
|
||||
break :blk .none;
|
||||
}
|
||||
};
|
||||
actual_parent_ptr_info.packed_offset = .{ .bit_offset = 0, .host_size = 0 };
|
||||
actual_field_ptr_info.packed_offset = .{ .bit_offset = 0, .host_size = 0 };
|
||||
},
|
||||
.@"extern" => {
|
||||
const field_offset = parent_ty.structFieldOffset(field_index, mod);
|
||||
actual_parent_ptr_info.flags.alignment = actual_field_ptr_info.flags.alignment.minStrict(if (field_offset > 0)
|
||||
Alignment.fromLog2Units(@ctz(field_offset))
|
||||
else
|
||||
actual_field_ptr_info.flags.alignment);
|
||||
|
||||
actual_parent_ptr_info.packed_offset = .{ .bit_offset = 0, .host_size = 0 };
|
||||
actual_field_ptr_info.packed_offset = .{ .bit_offset = 0, .host_size = 0 };
|
||||
},
|
||||
.@"packed" => {
|
||||
const byte_offset = std.math.divExact(u32, @abs(@as(i32, actual_parent_ptr_info.packed_offset.bit_offset) +
|
||||
(if (mod.typeToStruct(parent_ty)) |struct_obj| mod.structPackedFieldBitOffset(struct_obj, field_index) else 0) -
|
||||
actual_field_ptr_info.packed_offset.bit_offset), 8) catch
|
||||
return sema.fail(block, inst_src, "pointer bit-offset mismatch", .{});
|
||||
actual_parent_ptr_info.flags.alignment = actual_field_ptr_info.flags.alignment.minStrict(if (byte_offset > 0)
|
||||
Alignment.fromLog2Units(@ctz(byte_offset))
|
||||
else
|
||||
actual_field_ptr_info.flags.alignment);
|
||||
},
|
||||
}
|
||||
|
||||
const actual_field_ptr_ty = try sema.ptrType(ptr_ty_data);
|
||||
const casted_field_ptr = try sema.coerce(block, actual_field_ptr_ty, field_ptr, ptr_src);
|
||||
|
||||
ptr_ty_data.child = parent_ty.toIntern();
|
||||
const result_ptr = try sema.ptrType(ptr_ty_data);
|
||||
|
||||
if (try sema.resolveDefinedValue(block, src, casted_field_ptr)) |field_ptr_val| {
|
||||
const actual_field_ptr_ty = try sema.ptrType(actual_field_ptr_info);
|
||||
const casted_field_ptr = try sema.coerce(block, actual_field_ptr_ty, field_ptr, field_ptr_src);
|
||||
const actual_parent_ptr_ty = try sema.ptrType(actual_parent_ptr_info);
|
||||
const result = if (try sema.resolveDefinedValue(block, field_ptr_src, casted_field_ptr)) |field_ptr_val| result: {
|
||||
const field = switch (ip.indexToKey(field_ptr_val.toIntern())) {
|
||||
.ptr => |ptr| switch (ptr.addr) {
|
||||
.field => |field| field,
|
||||
else => null,
|
||||
},
|
||||
else => null,
|
||||
} orelse return sema.fail(block, ptr_src, "pointer value not based on parent struct", .{});
|
||||
} orelse return sema.fail(block, field_ptr_src, "pointer value not based on parent struct", .{});
|
||||
|
||||
if (field.index != field_index) {
|
||||
return sema.fail(block, src, "field '{}' has index '{d}' but pointer value is index '{d}' of struct '{}'", .{
|
||||
return sema.fail(block, inst_src, "field '{}' has index '{d}' but pointer value is index '{d}' of struct '{}'", .{
|
||||
field_name.fmt(ip), field_index, field.index, parent_ty.fmt(sema.mod),
|
||||
});
|
||||
}
|
||||
return Air.internedToRef(field.base);
|
||||
}
|
||||
|
||||
try sema.requireRuntimeBlock(block, src, ptr_src);
|
||||
try sema.queueFullTypeResolution(result_ptr);
|
||||
return block.addInst(.{
|
||||
.tag = .field_parent_ptr,
|
||||
.data = .{ .ty_pl = .{
|
||||
.ty = Air.internedToRef(result_ptr.toIntern()),
|
||||
.payload = try block.sema.addExtra(Air.FieldParentPtr{
|
||||
.field_ptr = casted_field_ptr,
|
||||
.field_index = @intCast(field_index),
|
||||
}),
|
||||
} },
|
||||
});
|
||||
break :result try sema.coerce(block, actual_parent_ptr_ty, Air.internedToRef(field.base), inst_src);
|
||||
} else result: {
|
||||
try sema.requireRuntimeBlock(block, inst_src, field_ptr_src);
|
||||
try sema.queueFullTypeResolution(parent_ty);
|
||||
break :result try block.addInst(.{
|
||||
.tag = .field_parent_ptr,
|
||||
.data = .{ .ty_pl = .{
|
||||
.ty = Air.internedToRef(actual_parent_ptr_ty.toIntern()),
|
||||
.payload = try block.sema.addExtra(Air.FieldParentPtr{
|
||||
.field_ptr = casted_field_ptr,
|
||||
.field_index = @intCast(field_index),
|
||||
}),
|
||||
} },
|
||||
});
|
||||
};
|
||||
return sema.ptrCastFull(block, flags, inst_src, result, inst_src, parent_ptr_ty, "@fieldParentPtr");
|
||||
}
|
||||
|
||||
fn zirMinMax(
|
||||
@ -27837,7 +27896,7 @@ fn structFieldPtrByIndex(
|
||||
const elem_size_bits = Type.fromInterned(ptr_ty_data.child).bitSize(mod);
|
||||
if (elem_size_bytes * 8 == elem_size_bits) {
|
||||
const byte_offset = ptr_ty_data.packed_offset.bit_offset / 8;
|
||||
const new_align: Alignment = @enumFromInt(@ctz(byte_offset | parent_align.toByteUnitsOptional().?));
|
||||
const new_align: Alignment = @enumFromInt(@ctz(byte_offset | parent_align.toByteUnits().?));
|
||||
assert(new_align != .none);
|
||||
ptr_ty_data.flags.alignment = new_align;
|
||||
ptr_ty_data.packed_offset = .{ .host_size = 0, .bit_offset = 0 };
|
||||
@ -29132,7 +29191,7 @@ fn coerceExtra(
|
||||
.addr = .{ .int = if (dest_info.flags.alignment != .none)
|
||||
(try mod.intValue(
|
||||
Type.usize,
|
||||
dest_info.flags.alignment.toByteUnitsOptional().?,
|
||||
dest_info.flags.alignment.toByteUnits().?,
|
||||
)).toIntern()
|
||||
else
|
||||
try mod.intern_pool.getCoercedInts(
|
||||
@ -29800,7 +29859,7 @@ const InMemoryCoercionResult = union(enum) {
|
||||
},
|
||||
.ptr_alignment => |pair| {
|
||||
try sema.errNote(block, src, msg, "pointer alignment '{d}' cannot cast into pointer alignment '{d}'", .{
|
||||
pair.actual.toByteUnits(0), pair.wanted.toByteUnits(0),
|
||||
pair.actual.toByteUnits() orelse 0, pair.wanted.toByteUnits() orelse 0,
|
||||
});
|
||||
break;
|
||||
},
|
||||
@ -36066,7 +36125,7 @@ fn resolveUnionLayout(sema: *Sema, ty: Type) CompileError!void {
|
||||
// alignment is greater.
|
||||
var size: u64 = 0;
|
||||
var padding: u32 = 0;
|
||||
if (tag_align.compare(.gte, max_align)) {
|
||||
if (tag_align.order(max_align).compare(.gte)) {
|
||||
// {Tag, Payload}
|
||||
size += tag_size;
|
||||
size = max_align.forward(size);
|
||||
@ -36077,7 +36136,10 @@ fn resolveUnionLayout(sema: *Sema, ty: Type) CompileError!void {
|
||||
} else {
|
||||
// {Payload, Tag}
|
||||
size += max_size;
|
||||
size = tag_align.forward(size);
|
||||
size = switch (mod.getTarget().ofmt) {
|
||||
.c => max_align,
|
||||
else => tag_align,
|
||||
}.forward(size);
|
||||
size += tag_size;
|
||||
const prev_size = size;
|
||||
size = max_align.forward(size);
|
||||
|
||||
@ -176,7 +176,7 @@ pub fn toBigIntAdvanced(
|
||||
if (opt_sema) |sema| try sema.resolveTypeLayout(Type.fromInterned(ty));
|
||||
const x = switch (int.storage) {
|
||||
else => unreachable,
|
||||
.lazy_align => Type.fromInterned(ty).abiAlignment(mod).toByteUnits(0),
|
||||
.lazy_align => Type.fromInterned(ty).abiAlignment(mod).toByteUnits() orelse 0,
|
||||
.lazy_size => Type.fromInterned(ty).abiSize(mod),
|
||||
};
|
||||
return BigIntMutable.init(&space.limbs, x).toConst();
|
||||
@ -237,9 +237,9 @@ pub fn getUnsignedIntAdvanced(val: Value, mod: *Module, opt_sema: ?*Sema) !?u64
|
||||
.u64 => |x| x,
|
||||
.i64 => |x| std.math.cast(u64, x),
|
||||
.lazy_align => |ty| if (opt_sema) |sema|
|
||||
(try Type.fromInterned(ty).abiAlignmentAdvanced(mod, .{ .sema = sema })).scalar.toByteUnits(0)
|
||||
(try Type.fromInterned(ty).abiAlignmentAdvanced(mod, .{ .sema = sema })).scalar.toByteUnits() orelse 0
|
||||
else
|
||||
Type.fromInterned(ty).abiAlignment(mod).toByteUnits(0),
|
||||
Type.fromInterned(ty).abiAlignment(mod).toByteUnits() orelse 0,
|
||||
.lazy_size => |ty| if (opt_sema) |sema|
|
||||
(try Type.fromInterned(ty).abiSizeAdvanced(mod, .{ .sema = sema })).scalar
|
||||
else
|
||||
@ -289,7 +289,7 @@ pub fn toSignedInt(val: Value, mod: *Module) i64 {
|
||||
.big_int => |big_int| big_int.to(i64) catch unreachable,
|
||||
.i64 => |x| x,
|
||||
.u64 => |x| @intCast(x),
|
||||
.lazy_align => |ty| @intCast(Type.fromInterned(ty).abiAlignment(mod).toByteUnits(0)),
|
||||
.lazy_align => |ty| @intCast(Type.fromInterned(ty).abiAlignment(mod).toByteUnits() orelse 0),
|
||||
.lazy_size => |ty| @intCast(Type.fromInterned(ty).abiSize(mod)),
|
||||
},
|
||||
else => unreachable,
|
||||
@ -497,7 +497,7 @@ pub fn writeToPackedMemory(
|
||||
inline .u64, .i64 => |int| std.mem.writeVarPackedInt(buffer, bit_offset, bits, int, endian),
|
||||
.big_int => |bigint| bigint.writePackedTwosComplement(buffer, bit_offset, bits, endian),
|
||||
.lazy_align => |lazy_align| {
|
||||
const num = Type.fromInterned(lazy_align).abiAlignment(mod).toByteUnits(0);
|
||||
const num = Type.fromInterned(lazy_align).abiAlignment(mod).toByteUnits() orelse 0;
|
||||
std.mem.writeVarPackedInt(buffer, bit_offset, bits, num, endian);
|
||||
},
|
||||
.lazy_size => |lazy_size| {
|
||||
@ -890,7 +890,7 @@ pub fn toFloat(val: Value, comptime T: type, mod: *Module) T {
|
||||
}
|
||||
return @floatFromInt(x);
|
||||
},
|
||||
.lazy_align => |ty| @floatFromInt(Type.fromInterned(ty).abiAlignment(mod).toByteUnits(0)),
|
||||
.lazy_align => |ty| @floatFromInt(Type.fromInterned(ty).abiAlignment(mod).toByteUnits() orelse 0),
|
||||
.lazy_size => |ty| @floatFromInt(Type.fromInterned(ty).abiSize(mod)),
|
||||
},
|
||||
.float => |float| switch (float.storage) {
|
||||
@ -1529,9 +1529,9 @@ pub fn floatFromIntScalar(val: Value, float_ty: Type, mod: *Module, opt_sema: ?*
|
||||
},
|
||||
inline .u64, .i64 => |x| floatFromIntInner(x, float_ty, mod),
|
||||
.lazy_align => |ty| if (opt_sema) |sema| {
|
||||
return floatFromIntInner((try Type.fromInterned(ty).abiAlignmentAdvanced(mod, .{ .sema = sema })).scalar.toByteUnits(0), float_ty, mod);
|
||||
return floatFromIntInner((try Type.fromInterned(ty).abiAlignmentAdvanced(mod, .{ .sema = sema })).scalar.toByteUnits() orelse 0, float_ty, mod);
|
||||
} else {
|
||||
return floatFromIntInner(Type.fromInterned(ty).abiAlignment(mod).toByteUnits(0), float_ty, mod);
|
||||
return floatFromIntInner(Type.fromInterned(ty).abiAlignment(mod).toByteUnits() orelse 0, float_ty, mod);
|
||||
},
|
||||
.lazy_size => |ty| if (opt_sema) |sema| {
|
||||
return floatFromIntInner((try Type.fromInterned(ty).abiSizeAdvanced(mod, .{ .sema = sema })).scalar, float_ty, mod);
|
||||
|
||||
@ -1296,7 +1296,7 @@ fn genFunc(func: *CodeGen) InnerError!void {
|
||||
// subtract it from the current stack pointer
|
||||
try prologue.append(.{ .tag = .i32_sub, .data = .{ .tag = {} } });
|
||||
// Get negative stack aligment
|
||||
try prologue.append(.{ .tag = .i32_const, .data = .{ .imm32 = @as(i32, @intCast(func.stack_alignment.toByteUnitsOptional().?)) * -1 } });
|
||||
try prologue.append(.{ .tag = .i32_const, .data = .{ .imm32 = @as(i32, @intCast(func.stack_alignment.toByteUnits().?)) * -1 } });
|
||||
// Bitwise-and the value to get the new stack pointer to ensure the pointers are aligned with the abi alignment
|
||||
try prologue.append(.{ .tag = .i32_and, .data = .{ .tag = {} } });
|
||||
// store the current stack pointer as the bottom, which will be used to calculate all stack pointer offsets
|
||||
@ -2107,7 +2107,7 @@ fn airRet(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
});
|
||||
try func.addMemArg(Mir.Inst.Tag.fromOpcode(opcode), .{
|
||||
.offset = operand.offset(),
|
||||
.alignment = @intCast(scalar_type.abiAlignment(mod).toByteUnitsOptional().?),
|
||||
.alignment = @intCast(scalar_type.abiAlignment(mod).toByteUnits().?),
|
||||
});
|
||||
},
|
||||
else => try func.emitWValue(operand),
|
||||
@ -2384,7 +2384,7 @@ fn store(func: *CodeGen, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerE
|
||||
try func.mir_extra.appendSlice(func.gpa, &[_]u32{
|
||||
std.wasm.simdOpcode(.v128_store),
|
||||
offset + lhs.offset(),
|
||||
@intCast(ty.abiAlignment(mod).toByteUnits(0)),
|
||||
@intCast(ty.abiAlignment(mod).toByteUnits() orelse 0),
|
||||
});
|
||||
return func.addInst(.{ .tag = .simd_prefix, .data = .{ .payload = extra_index } });
|
||||
},
|
||||
@ -2440,7 +2440,7 @@ fn store(func: *CodeGen, lhs: WValue, rhs: WValue, ty: Type, offset: u32) InnerE
|
||||
Mir.Inst.Tag.fromOpcode(opcode),
|
||||
.{
|
||||
.offset = offset + lhs.offset(),
|
||||
.alignment = @intCast(ty.abiAlignment(mod).toByteUnitsOptional().?),
|
||||
.alignment = @intCast(ty.abiAlignment(mod).toByteUnits().?),
|
||||
},
|
||||
);
|
||||
}
|
||||
@ -2500,7 +2500,7 @@ fn load(func: *CodeGen, operand: WValue, ty: Type, offset: u32) InnerError!WValu
|
||||
try func.mir_extra.appendSlice(func.gpa, &[_]u32{
|
||||
std.wasm.simdOpcode(.v128_load),
|
||||
offset + operand.offset(),
|
||||
@intCast(ty.abiAlignment(mod).toByteUnitsOptional().?),
|
||||
@intCast(ty.abiAlignment(mod).toByteUnits().?),
|
||||
});
|
||||
try func.addInst(.{ .tag = .simd_prefix, .data = .{ .payload = extra_index } });
|
||||
return WValue{ .stack = {} };
|
||||
@ -2518,7 +2518,7 @@ fn load(func: *CodeGen, operand: WValue, ty: Type, offset: u32) InnerError!WValu
|
||||
Mir.Inst.Tag.fromOpcode(opcode),
|
||||
.{
|
||||
.offset = offset + operand.offset(),
|
||||
.alignment = @intCast(ty.abiAlignment(mod).toByteUnitsOptional().?),
|
||||
.alignment = @intCast(ty.abiAlignment(mod).toByteUnits().?),
|
||||
},
|
||||
);
|
||||
|
||||
@ -3456,7 +3456,7 @@ fn intStorageAsI32(storage: InternPool.Key.Int.Storage, mod: *Module) i32 {
|
||||
.i64 => |x| @as(i32, @intCast(x)),
|
||||
.u64 => |x| @as(i32, @bitCast(@as(u32, @intCast(x)))),
|
||||
.big_int => unreachable,
|
||||
.lazy_align => |ty| @as(i32, @bitCast(@as(u32, @intCast(Type.fromInterned(ty).abiAlignment(mod).toByteUnits(0))))),
|
||||
.lazy_align => |ty| @as(i32, @bitCast(@as(u32, @intCast(Type.fromInterned(ty).abiAlignment(mod).toByteUnits() orelse 0)))),
|
||||
.lazy_size => |ty| @as(i32, @bitCast(@as(u32, @intCast(Type.fromInterned(ty).abiSize(mod))))),
|
||||
};
|
||||
}
|
||||
@ -4204,7 +4204,7 @@ fn airIsErr(func: *CodeGen, inst: Air.Inst.Index, opcode: wasm.Opcode) InnerErro
|
||||
if (pl_ty.hasRuntimeBitsIgnoreComptime(mod)) {
|
||||
try func.addMemArg(.i32_load16_u, .{
|
||||
.offset = operand.offset() + @as(u32, @intCast(errUnionErrorOffset(pl_ty, mod))),
|
||||
.alignment = @intCast(Type.anyerror.abiAlignment(mod).toByteUnitsOptional().?),
|
||||
.alignment = @intCast(Type.anyerror.abiAlignment(mod).toByteUnits().?),
|
||||
});
|
||||
}
|
||||
|
||||
@ -5141,7 +5141,7 @@ fn airSplat(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
try func.mir_extra.appendSlice(func.gpa, &[_]u32{
|
||||
opcode,
|
||||
operand.offset(),
|
||||
@intCast(elem_ty.abiAlignment(mod).toByteUnitsOptional().?),
|
||||
@intCast(elem_ty.abiAlignment(mod).toByteUnits().?),
|
||||
});
|
||||
try func.addInst(.{ .tag = .simd_prefix, .data = .{ .payload = extra_index } });
|
||||
try func.addLabel(.local_set, result.local.value);
|
||||
@ -6552,7 +6552,7 @@ fn lowerTry(
|
||||
const err_offset = @as(u32, @intCast(errUnionErrorOffset(pl_ty, mod)));
|
||||
try func.addMemArg(.i32_load16_u, .{
|
||||
.offset = err_union.offset() + err_offset,
|
||||
.alignment = @intCast(Type.anyerror.abiAlignment(mod).toByteUnitsOptional().?),
|
||||
.alignment = @intCast(Type.anyerror.abiAlignment(mod).toByteUnits().?),
|
||||
});
|
||||
}
|
||||
try func.addTag(.i32_eqz);
|
||||
@ -7499,7 +7499,7 @@ fn airCmpxchg(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
else => |size| return func.fail("TODO: implement `@cmpxchg` for types with abi size '{d}'", .{size}),
|
||||
}, .{
|
||||
.offset = ptr_operand.offset(),
|
||||
.alignment = @intCast(ty.abiAlignment(mod).toByteUnitsOptional().?),
|
||||
.alignment = @intCast(ty.abiAlignment(mod).toByteUnits().?),
|
||||
});
|
||||
try func.addLabel(.local_tee, val_local.local.value);
|
||||
_ = try func.cmp(.stack, expected_val, ty, .eq);
|
||||
@ -7561,7 +7561,7 @@ fn airAtomicLoad(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
try func.emitWValue(ptr);
|
||||
try func.addAtomicMemArg(tag, .{
|
||||
.offset = ptr.offset(),
|
||||
.alignment = @intCast(ty.abiAlignment(mod).toByteUnitsOptional().?),
|
||||
.alignment = @intCast(ty.abiAlignment(mod).toByteUnits().?),
|
||||
});
|
||||
} else {
|
||||
_ = try func.load(ptr, ty, 0);
|
||||
@ -7622,7 +7622,7 @@ fn airAtomicRmw(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
},
|
||||
.{
|
||||
.offset = ptr.offset(),
|
||||
.alignment = @intCast(ty.abiAlignment(mod).toByteUnitsOptional().?),
|
||||
.alignment = @intCast(ty.abiAlignment(mod).toByteUnits().?),
|
||||
},
|
||||
);
|
||||
const select_res = try func.allocLocal(ty);
|
||||
@ -7682,7 +7682,7 @@ fn airAtomicRmw(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
};
|
||||
try func.addAtomicMemArg(tag, .{
|
||||
.offset = ptr.offset(),
|
||||
.alignment = @intCast(ty.abiAlignment(mod).toByteUnitsOptional().?),
|
||||
.alignment = @intCast(ty.abiAlignment(mod).toByteUnits().?),
|
||||
});
|
||||
const result = try WValue.toLocal(.stack, func, ty);
|
||||
return func.finishAir(inst, result, &.{ pl_op.operand, extra.operand });
|
||||
@ -7781,7 +7781,7 @@ fn airAtomicStore(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
|
||||
try func.lowerToStack(operand);
|
||||
try func.addAtomicMemArg(tag, .{
|
||||
.offset = ptr.offset(),
|
||||
.alignment = @intCast(ty.abiAlignment(mod).toByteUnitsOptional().?),
|
||||
.alignment = @intCast(ty.abiAlignment(mod).toByteUnits().?),
|
||||
});
|
||||
} else {
|
||||
try func.store(ptr, operand, ty, 0);
|
||||
|
||||
@ -7920,17 +7920,14 @@ fn fieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, index: u32
|
||||
const mod = self.bin_file.comp.module.?;
|
||||
const ptr_field_ty = self.typeOfIndex(inst);
|
||||
const ptr_container_ty = self.typeOf(operand);
|
||||
const ptr_container_ty_info = ptr_container_ty.ptrInfo(mod);
|
||||
const container_ty = ptr_container_ty.childType(mod);
|
||||
|
||||
const field_offset: i32 = if (mod.typeToPackedStruct(container_ty)) |struct_obj|
|
||||
if (ptr_field_ty.ptrInfo(mod).packed_offset.host_size == 0)
|
||||
@divExact(mod.structPackedFieldBitOffset(struct_obj, index) +
|
||||
ptr_container_ty_info.packed_offset.bit_offset, 8)
|
||||
else
|
||||
0
|
||||
else
|
||||
@intCast(container_ty.structFieldOffset(index, mod));
|
||||
const field_off: i32 = switch (container_ty.containerLayout(mod)) {
|
||||
.auto, .@"extern" => @intCast(container_ty.structFieldOffset(index, mod)),
|
||||
.@"packed" => @divExact(@as(i32, ptr_container_ty.ptrInfo(mod).packed_offset.bit_offset) +
|
||||
(if (mod.typeToStruct(container_ty)) |struct_obj| mod.structPackedFieldBitOffset(struct_obj, index) else 0) -
|
||||
ptr_field_ty.ptrInfo(mod).packed_offset.bit_offset, 8),
|
||||
};
|
||||
|
||||
const src_mcv = try self.resolveInst(operand);
|
||||
const dst_mcv = if (switch (src_mcv) {
|
||||
@ -7938,7 +7935,7 @@ fn fieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, index: u32
|
||||
.register, .register_offset => self.reuseOperand(inst, operand, 0, src_mcv),
|
||||
else => false,
|
||||
}) src_mcv else try self.copyToRegisterWithInstTracking(inst, ptr_field_ty, src_mcv);
|
||||
return dst_mcv.offset(field_offset);
|
||||
return dst_mcv.offset(field_off);
|
||||
}
|
||||
|
||||
fn airStructFieldVal(self: *Self, inst: Air.Inst.Index) !void {
|
||||
@ -7958,11 +7955,8 @@ fn airStructFieldVal(self: *Self, inst: Air.Inst.Index) !void {
|
||||
|
||||
const src_mcv = try self.resolveInst(operand);
|
||||
const field_off: u32 = switch (container_ty.containerLayout(mod)) {
|
||||
.auto, .@"extern" => @intCast(container_ty.structFieldOffset(index, mod) * 8),
|
||||
.@"packed" => if (mod.typeToStruct(container_ty)) |struct_type|
|
||||
mod.structPackedFieldBitOffset(struct_type, index)
|
||||
else
|
||||
0,
|
||||
.auto, .@"extern" => @intCast(container_ty.structFieldOffset(extra.field_index, mod) * 8),
|
||||
.@"packed" => if (mod.typeToStruct(container_ty)) |struct_obj| mod.structPackedFieldBitOffset(struct_obj, extra.field_index) else 0,
|
||||
};
|
||||
|
||||
switch (src_mcv) {
|
||||
@ -8239,7 +8233,12 @@ fn airFieldParentPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
|
||||
const inst_ty = self.typeOfIndex(inst);
|
||||
const parent_ty = inst_ty.childType(mod);
|
||||
const field_offset: i32 = @intCast(parent_ty.structFieldOffset(extra.field_index, mod));
|
||||
const field_off: i32 = switch (parent_ty.containerLayout(mod)) {
|
||||
.auto, .@"extern" => @intCast(parent_ty.structFieldOffset(extra.field_index, mod)),
|
||||
.@"packed" => @divExact(@as(i32, inst_ty.ptrInfo(mod).packed_offset.bit_offset) +
|
||||
(if (mod.typeToStruct(parent_ty)) |struct_obj| mod.structPackedFieldBitOffset(struct_obj, extra.field_index) else 0) -
|
||||
self.typeOf(extra.field_ptr).ptrInfo(mod).packed_offset.bit_offset, 8),
|
||||
};
|
||||
|
||||
const src_mcv = try self.resolveInst(extra.field_ptr);
|
||||
const dst_mcv = if (src_mcv.isRegisterOffset() and
|
||||
@ -8247,7 +8246,7 @@ fn airFieldParentPtr(self: *Self, inst: Air.Inst.Index) !void {
|
||||
src_mcv
|
||||
else
|
||||
try self.copyToRegisterWithInstTracking(inst, inst_ty, src_mcv);
|
||||
const result = dst_mcv.offset(-field_offset);
|
||||
const result = dst_mcv.offset(-field_off);
|
||||
return self.finishAir(inst, result, .{ extra.field_ptr, .none, .none });
|
||||
}
|
||||
|
||||
@ -17950,7 +17949,7 @@ fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
|
||||
.Struct => {
|
||||
const frame_index = try self.allocFrameIndex(FrameAlloc.initSpill(result_ty, mod));
|
||||
if (result_ty.containerLayout(mod) == .@"packed") {
|
||||
const struct_type = mod.typeToStruct(result_ty).?;
|
||||
const struct_obj = mod.typeToStruct(result_ty).?;
|
||||
try self.genInlineMemset(
|
||||
.{ .lea_frame = .{ .index = frame_index } },
|
||||
.{ .immediate = 0 },
|
||||
@ -17971,7 +17970,7 @@ fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
|
||||
}
|
||||
const elem_abi_size: u32 = @intCast(elem_ty.abiSize(mod));
|
||||
const elem_abi_bits = elem_abi_size * 8;
|
||||
const elem_off = mod.structPackedFieldBitOffset(struct_type, elem_i);
|
||||
const elem_off = mod.structPackedFieldBitOffset(struct_obj, elem_i);
|
||||
const elem_byte_off: i32 = @intCast(elem_off / elem_abi_bits * elem_abi_size);
|
||||
const elem_bit_off = elem_off % elem_abi_bits;
|
||||
const elem_mcv = try self.resolveInst(elem);
|
||||
@ -18959,7 +18958,7 @@ fn resolveCallingConventionValues(
|
||||
|
||||
const param_size: u31 = @intCast(ty.abiSize(mod));
|
||||
const param_align: u31 =
|
||||
@intCast(@max(ty.abiAlignment(mod).toByteUnitsOptional().?, 8));
|
||||
@intCast(@max(ty.abiAlignment(mod).toByteUnits().?, 8));
|
||||
result.stack_byte_count =
|
||||
mem.alignForward(u31, result.stack_byte_count, param_align);
|
||||
arg.* = .{ .load_frame = .{
|
||||
@ -19003,7 +19002,7 @@ fn resolveCallingConventionValues(
|
||||
continue;
|
||||
}
|
||||
const param_size: u31 = @intCast(ty.abiSize(mod));
|
||||
const param_align: u31 = @intCast(ty.abiAlignment(mod).toByteUnitsOptional().?);
|
||||
const param_align: u31 = @intCast(ty.abiAlignment(mod).toByteUnits().?);
|
||||
result.stack_byte_count =
|
||||
mem.alignForward(u31, result.stack_byte_count, param_align);
|
||||
arg.* = .{ .load_frame = .{
|
||||
@ -19096,7 +19095,7 @@ fn splitType(self: *Self, ty: Type) ![2]Type {
|
||||
.integer => switch (part_i) {
|
||||
0 => Type.u64,
|
||||
1 => part: {
|
||||
const elem_size = ty.abiAlignment(mod).minStrict(.@"8").toByteUnitsOptional().?;
|
||||
const elem_size = ty.abiAlignment(mod).minStrict(.@"8").toByteUnits().?;
|
||||
const elem_ty = try mod.intType(.unsigned, @intCast(elem_size * 8));
|
||||
break :part switch (@divExact(ty.abiSize(mod) - 8, elem_size)) {
|
||||
1 => elem_ty,
|
||||
|
||||
@ -848,9 +848,8 @@ const mnemonic_to_encodings_map = init: {
|
||||
const final_storage = data_storage;
|
||||
var final_map: [mnemonic_count][]const Data = .{&.{}} ** mnemonic_count;
|
||||
storage_i = 0;
|
||||
for (&final_map, mnemonic_map) |*value, wip_value| {
|
||||
value.ptr = final_storage[storage_i..].ptr;
|
||||
value.len = wip_value.len;
|
||||
for (&final_map, mnemonic_map) |*final_value, value| {
|
||||
final_value.* = final_storage[storage_i..][0..value.len];
|
||||
storage_i += value.len;
|
||||
}
|
||||
break :init final_map;
|
||||
|
||||
@ -548,7 +548,7 @@ pub fn generateSymbol(
|
||||
}
|
||||
|
||||
const size = struct_type.size(ip).*;
|
||||
const alignment = struct_type.flagsPtr(ip).alignment.toByteUnitsOptional().?;
|
||||
const alignment = struct_type.flagsPtr(ip).alignment.toByteUnits().?;
|
||||
|
||||
const padding = math.cast(
|
||||
usize,
|
||||
@ -893,12 +893,12 @@ fn genDeclRef(
|
||||
// TODO this feels clunky. Perhaps we should check for it in `genTypedValue`?
|
||||
if (ty.castPtrToFn(zcu)) |fn_ty| {
|
||||
if (zcu.typeToFunc(fn_ty).?.is_generic) {
|
||||
return GenResult.mcv(.{ .immediate = fn_ty.abiAlignment(zcu).toByteUnitsOptional().? });
|
||||
return GenResult.mcv(.{ .immediate = fn_ty.abiAlignment(zcu).toByteUnits().? });
|
||||
}
|
||||
} else if (ty.zigTypeTag(zcu) == .Pointer) {
|
||||
const elem_ty = ty.elemType2(zcu);
|
||||
if (!elem_ty.hasRuntimeBits(zcu)) {
|
||||
return GenResult.mcv(.{ .immediate = elem_ty.abiAlignment(zcu).toByteUnitsOptional().? });
|
||||
return GenResult.mcv(.{ .immediate = elem_ty.abiAlignment(zcu).toByteUnits().? });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
3787
src/codegen/c.zig
3787
src/codegen/c.zig
File diff suppressed because it is too large
Load Diff
2491
src/codegen/c/Type.zig
Normal file
2491
src/codegen/c/Type.zig
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -2033,7 +2033,7 @@ pub const Object = struct {
|
||||
owner_decl.src_node + 1, // Line
|
||||
try o.lowerDebugType(int_ty),
|
||||
ty.abiSize(mod) * 8,
|
||||
ty.abiAlignment(mod).toByteUnits(0) * 8,
|
||||
(ty.abiAlignment(mod).toByteUnits() orelse 0) * 8,
|
||||
try o.builder.debugTuple(enumerators),
|
||||
);
|
||||
|
||||
@ -2120,7 +2120,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
try o.lowerDebugType(ptr_ty),
|
||||
ptr_size * 8,
|
||||
ptr_align.toByteUnits(0) * 8,
|
||||
(ptr_align.toByteUnits() orelse 0) * 8,
|
||||
0, // Offset
|
||||
);
|
||||
|
||||
@ -2131,7 +2131,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
try o.lowerDebugType(len_ty),
|
||||
len_size * 8,
|
||||
len_align.toByteUnits(0) * 8,
|
||||
(len_align.toByteUnits() orelse 0) * 8,
|
||||
len_offset * 8,
|
||||
);
|
||||
|
||||
@ -2142,7 +2142,7 @@ pub const Object = struct {
|
||||
line,
|
||||
.none, // Underlying type
|
||||
ty.abiSize(mod) * 8,
|
||||
ty.abiAlignment(mod).toByteUnits(0) * 8,
|
||||
(ty.abiAlignment(mod).toByteUnits() orelse 0) * 8,
|
||||
try o.builder.debugTuple(&.{
|
||||
debug_ptr_type,
|
||||
debug_len_type,
|
||||
@ -2170,7 +2170,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
debug_elem_ty,
|
||||
target.ptrBitWidth(),
|
||||
ty.ptrAlignment(mod).toByteUnits(0) * 8,
|
||||
(ty.ptrAlignment(mod).toByteUnits() orelse 0) * 8,
|
||||
0, // Offset
|
||||
);
|
||||
|
||||
@ -2217,7 +2217,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
try o.lowerDebugType(ty.childType(mod)),
|
||||
ty.abiSize(mod) * 8,
|
||||
ty.abiAlignment(mod).toByteUnits(0) * 8,
|
||||
(ty.abiAlignment(mod).toByteUnits() orelse 0) * 8,
|
||||
try o.builder.debugTuple(&.{
|
||||
try o.builder.debugSubrange(
|
||||
try o.builder.debugConstant(try o.builder.intConst(.i64, 0)),
|
||||
@ -2260,7 +2260,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
debug_elem_type,
|
||||
ty.abiSize(mod) * 8,
|
||||
ty.abiAlignment(mod).toByteUnits(0) * 8,
|
||||
(ty.abiAlignment(mod).toByteUnits() orelse 0) * 8,
|
||||
try o.builder.debugTuple(&.{
|
||||
try o.builder.debugSubrange(
|
||||
try o.builder.debugConstant(try o.builder.intConst(.i64, 0)),
|
||||
@ -2316,7 +2316,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
try o.lowerDebugType(child_ty),
|
||||
payload_size * 8,
|
||||
payload_align.toByteUnits(0) * 8,
|
||||
(payload_align.toByteUnits() orelse 0) * 8,
|
||||
0, // Offset
|
||||
);
|
||||
|
||||
@ -2327,7 +2327,7 @@ pub const Object = struct {
|
||||
0,
|
||||
try o.lowerDebugType(non_null_ty),
|
||||
non_null_size * 8,
|
||||
non_null_align.toByteUnits(0) * 8,
|
||||
(non_null_align.toByteUnits() orelse 0) * 8,
|
||||
non_null_offset * 8,
|
||||
);
|
||||
|
||||
@ -2338,7 +2338,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
.none, // Underlying type
|
||||
ty.abiSize(mod) * 8,
|
||||
ty.abiAlignment(mod).toByteUnits(0) * 8,
|
||||
(ty.abiAlignment(mod).toByteUnits() orelse 0) * 8,
|
||||
try o.builder.debugTuple(&.{
|
||||
debug_data_type,
|
||||
debug_some_type,
|
||||
@ -2396,7 +2396,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
try o.lowerDebugType(Type.anyerror),
|
||||
error_size * 8,
|
||||
error_align.toByteUnits(0) * 8,
|
||||
(error_align.toByteUnits() orelse 0) * 8,
|
||||
error_offset * 8,
|
||||
);
|
||||
fields[payload_index] = try o.builder.debugMemberType(
|
||||
@ -2406,7 +2406,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
try o.lowerDebugType(payload_ty),
|
||||
payload_size * 8,
|
||||
payload_align.toByteUnits(0) * 8,
|
||||
(payload_align.toByteUnits() orelse 0) * 8,
|
||||
payload_offset * 8,
|
||||
);
|
||||
|
||||
@ -2417,7 +2417,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
.none, // Underlying type
|
||||
ty.abiSize(mod) * 8,
|
||||
ty.abiAlignment(mod).toByteUnits(0) * 8,
|
||||
(ty.abiAlignment(mod).toByteUnits() orelse 0) * 8,
|
||||
try o.builder.debugTuple(&fields),
|
||||
);
|
||||
|
||||
@ -2485,7 +2485,7 @@ pub const Object = struct {
|
||||
0,
|
||||
try o.lowerDebugType(Type.fromInterned(field_ty)),
|
||||
field_size * 8,
|
||||
field_align.toByteUnits(0) * 8,
|
||||
(field_align.toByteUnits() orelse 0) * 8,
|
||||
field_offset * 8,
|
||||
));
|
||||
}
|
||||
@ -2497,7 +2497,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
.none, // Underlying type
|
||||
ty.abiSize(mod) * 8,
|
||||
ty.abiAlignment(mod).toByteUnits(0) * 8,
|
||||
(ty.abiAlignment(mod).toByteUnits() orelse 0) * 8,
|
||||
try o.builder.debugTuple(fields.items),
|
||||
);
|
||||
|
||||
@ -2566,7 +2566,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
try o.lowerDebugType(field_ty),
|
||||
field_size * 8,
|
||||
field_align.toByteUnits(0) * 8,
|
||||
(field_align.toByteUnits() orelse 0) * 8,
|
||||
field_offset * 8,
|
||||
));
|
||||
}
|
||||
@ -2578,7 +2578,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
.none, // Underlying type
|
||||
ty.abiSize(mod) * 8,
|
||||
ty.abiAlignment(mod).toByteUnits(0) * 8,
|
||||
(ty.abiAlignment(mod).toByteUnits() orelse 0) * 8,
|
||||
try o.builder.debugTuple(fields.items),
|
||||
);
|
||||
|
||||
@ -2621,7 +2621,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
.none, // Underlying type
|
||||
ty.abiSize(mod) * 8,
|
||||
ty.abiAlignment(mod).toByteUnits(0) * 8,
|
||||
(ty.abiAlignment(mod).toByteUnits() orelse 0) * 8,
|
||||
try o.builder.debugTuple(
|
||||
&.{try o.lowerDebugType(Type.fromInterned(union_type.enum_tag_ty))},
|
||||
),
|
||||
@ -2661,7 +2661,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
try o.lowerDebugType(Type.fromInterned(field_ty)),
|
||||
field_size * 8,
|
||||
field_align.toByteUnits(0) * 8,
|
||||
(field_align.toByteUnits() orelse 0) * 8,
|
||||
0, // Offset
|
||||
));
|
||||
}
|
||||
@ -2680,7 +2680,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
.none, // Underlying type
|
||||
ty.abiSize(mod) * 8,
|
||||
ty.abiAlignment(mod).toByteUnits(0) * 8,
|
||||
(ty.abiAlignment(mod).toByteUnits() orelse 0) * 8,
|
||||
try o.builder.debugTuple(fields.items),
|
||||
);
|
||||
|
||||
@ -2711,7 +2711,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
try o.lowerDebugType(Type.fromInterned(union_type.enum_tag_ty)),
|
||||
layout.tag_size * 8,
|
||||
layout.tag_align.toByteUnits(0) * 8,
|
||||
(layout.tag_align.toByteUnits() orelse 0) * 8,
|
||||
tag_offset * 8,
|
||||
);
|
||||
|
||||
@ -2722,7 +2722,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
debug_union_type,
|
||||
layout.payload_size * 8,
|
||||
layout.payload_align.toByteUnits(0) * 8,
|
||||
(layout.payload_align.toByteUnits() orelse 0) * 8,
|
||||
payload_offset * 8,
|
||||
);
|
||||
|
||||
@ -2739,7 +2739,7 @@ pub const Object = struct {
|
||||
0, // Line
|
||||
.none, // Underlying type
|
||||
ty.abiSize(mod) * 8,
|
||||
ty.abiAlignment(mod).toByteUnits(0) * 8,
|
||||
(ty.abiAlignment(mod).toByteUnits() orelse 0) * 8,
|
||||
try o.builder.debugTuple(&full_fields),
|
||||
);
|
||||
|
||||
@ -4473,7 +4473,7 @@ pub const Object = struct {
|
||||
// The value cannot be undefined, because we use the `nonnull` annotation
|
||||
// for non-optional pointers. We also need to respect the alignment, even though
|
||||
// the address will never be dereferenced.
|
||||
const int: u64 = ptr_ty.ptrInfo(mod).flags.alignment.toByteUnitsOptional() orelse
|
||||
const int: u64 = ptr_ty.ptrInfo(mod).flags.alignment.toByteUnits() orelse
|
||||
// Note that these 0xaa values are appropriate even in release-optimized builds
|
||||
// because we need a well-defined value that is not null, and LLVM does not
|
||||
// have an "undef_but_not_null" attribute. As an example, if this `alloc` AIR
|
||||
|
||||
@ -172,7 +172,7 @@ pub fn attachSegfaultHandler() void {
|
||||
};
|
||||
}
|
||||
|
||||
fn handleSegfaultPosix(sig: i32, info: *const posix.siginfo_t, ctx_ptr: ?*const anyopaque) callconv(.C) noreturn {
|
||||
fn handleSegfaultPosix(sig: i32, info: *const posix.siginfo_t, ctx_ptr: ?*anyopaque) callconv(.C) noreturn {
|
||||
// TODO: use alarm() here to prevent infinite loops
|
||||
PanicSwitch.preDispatch();
|
||||
|
||||
|
||||
95
src/link.zig
95
src/link.zig
@ -188,15 +188,10 @@ pub const File = struct {
|
||||
emit: Compilation.Emit,
|
||||
options: OpenOptions,
|
||||
) !*File {
|
||||
const tag = Tag.fromObjectFormat(comp.root_mod.resolved_target.result.ofmt);
|
||||
switch (tag) {
|
||||
.c => {
|
||||
const ptr = try C.open(arena, comp, emit, options);
|
||||
return &ptr.base;
|
||||
},
|
||||
inline else => |t| {
|
||||
if (build_options.only_c) unreachable;
|
||||
const ptr = try t.Type().open(arena, comp, emit, options);
|
||||
switch (Tag.fromObjectFormat(comp.root_mod.resolved_target.result.ofmt)) {
|
||||
inline else => |tag| {
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
const ptr = try tag.Type().open(arena, comp, emit, options);
|
||||
return &ptr.base;
|
||||
},
|
||||
}
|
||||
@ -208,25 +203,17 @@ pub const File = struct {
|
||||
emit: Compilation.Emit,
|
||||
options: OpenOptions,
|
||||
) !*File {
|
||||
const tag = Tag.fromObjectFormat(comp.root_mod.resolved_target.result.ofmt);
|
||||
switch (tag) {
|
||||
.c => {
|
||||
const ptr = try C.createEmpty(arena, comp, emit, options);
|
||||
return &ptr.base;
|
||||
},
|
||||
inline else => |t| {
|
||||
if (build_options.only_c) unreachable;
|
||||
const ptr = try t.Type().createEmpty(arena, comp, emit, options);
|
||||
switch (Tag.fromObjectFormat(comp.root_mod.resolved_target.result.ofmt)) {
|
||||
inline else => |tag| {
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
const ptr = try tag.Type().createEmpty(arena, comp, emit, options);
|
||||
return &ptr.base;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cast(base: *File, comptime T: type) ?*T {
|
||||
if (base.tag != T.base_tag)
|
||||
return null;
|
||||
|
||||
return @fieldParentPtr(T, "base", base);
|
||||
return if (base.tag == T.base_tag) @fieldParentPtr("base", base) else null;
|
||||
}
|
||||
|
||||
pub fn makeWritable(base: *File) !void {
|
||||
@ -383,7 +370,7 @@ pub const File = struct {
|
||||
.c => unreachable,
|
||||
.nvptx => unreachable,
|
||||
inline else => |t| {
|
||||
return @fieldParentPtr(t.Type(), "base", base).lowerUnnamedConst(val, decl_index);
|
||||
return @as(*t.Type(), @fieldParentPtr("base", base)).lowerUnnamedConst(val, decl_index);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -402,7 +389,7 @@ pub const File = struct {
|
||||
.c => unreachable,
|
||||
.nvptx => unreachable,
|
||||
inline else => |t| {
|
||||
return @fieldParentPtr(t.Type(), "base", base).getGlobalSymbol(name, lib_name);
|
||||
return @as(*t.Type(), @fieldParentPtr("base", base)).getGlobalSymbol(name, lib_name);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -412,12 +399,9 @@ pub const File = struct {
|
||||
const decl = module.declPtr(decl_index);
|
||||
assert(decl.has_tv);
|
||||
switch (base.tag) {
|
||||
.c => {
|
||||
return @fieldParentPtr(C, "base", base).updateDecl(module, decl_index);
|
||||
},
|
||||
inline else => |tag| {
|
||||
if (build_options.only_c) unreachable;
|
||||
return @fieldParentPtr(tag.Type(), "base", base).updateDecl(module, decl_index);
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).updateDecl(module, decl_index);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -431,12 +415,9 @@ pub const File = struct {
|
||||
liveness: Liveness,
|
||||
) UpdateDeclError!void {
|
||||
switch (base.tag) {
|
||||
.c => {
|
||||
return @fieldParentPtr(C, "base", base).updateFunc(module, func_index, air, liveness);
|
||||
},
|
||||
inline else => |tag| {
|
||||
if (build_options.only_c) unreachable;
|
||||
return @fieldParentPtr(tag.Type(), "base", base).updateFunc(module, func_index, air, liveness);
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).updateFunc(module, func_index, air, liveness);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -446,12 +427,9 @@ pub const File = struct {
|
||||
assert(decl.has_tv);
|
||||
switch (base.tag) {
|
||||
.spirv, .nvptx => {},
|
||||
.c => {
|
||||
return @fieldParentPtr(C, "base", base).updateDeclLineNumber(module, decl_index);
|
||||
},
|
||||
inline else => |tag| {
|
||||
if (build_options.only_c) unreachable;
|
||||
return @fieldParentPtr(tag.Type(), "base", base).updateDeclLineNumber(module, decl_index);
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).updateDeclLineNumber(module, decl_index);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -473,11 +451,9 @@ pub const File = struct {
|
||||
base.releaseLock();
|
||||
if (base.file) |f| f.close();
|
||||
switch (base.tag) {
|
||||
.c => @fieldParentPtr(C, "base", base).deinit(),
|
||||
|
||||
inline else => |tag| {
|
||||
if (build_options.only_c) unreachable;
|
||||
@fieldParentPtr(tag.Type(), "base", base).deinit();
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
@as(*tag.Type(), @fieldParentPtr("base", base)).deinit();
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -560,7 +536,7 @@ pub const File = struct {
|
||||
pub fn flush(base: *File, arena: Allocator, prog_node: *std.Progress.Node) FlushError!void {
|
||||
if (build_options.only_c) {
|
||||
assert(base.tag == .c);
|
||||
return @fieldParentPtr(C, "base", base).flush(arena, prog_node);
|
||||
return @as(*C, @fieldParentPtr("base", base)).flush(arena, prog_node);
|
||||
}
|
||||
const comp = base.comp;
|
||||
if (comp.clang_preprocessor_mode == .yes or comp.clang_preprocessor_mode == .pch) {
|
||||
@ -587,7 +563,7 @@ pub const File = struct {
|
||||
}
|
||||
switch (base.tag) {
|
||||
inline else => |tag| {
|
||||
return @fieldParentPtr(tag.Type(), "base", base).flush(arena, prog_node);
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).flush(arena, prog_node);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -596,12 +572,9 @@ pub const File = struct {
|
||||
/// rather than final output mode.
|
||||
pub fn flushModule(base: *File, arena: Allocator, prog_node: *std.Progress.Node) FlushError!void {
|
||||
switch (base.tag) {
|
||||
.c => {
|
||||
return @fieldParentPtr(C, "base", base).flushModule(arena, prog_node);
|
||||
},
|
||||
inline else => |tag| {
|
||||
if (build_options.only_c) unreachable;
|
||||
return @fieldParentPtr(tag.Type(), "base", base).flushModule(arena, prog_node);
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).flushModule(arena, prog_node);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -609,12 +582,9 @@ pub const File = struct {
|
||||
/// Called when a Decl is deleted from the Module.
|
||||
pub fn freeDecl(base: *File, decl_index: InternPool.DeclIndex) void {
|
||||
switch (base.tag) {
|
||||
.c => {
|
||||
@fieldParentPtr(C, "base", base).freeDecl(decl_index);
|
||||
},
|
||||
inline else => |tag| {
|
||||
if (build_options.only_c) unreachable;
|
||||
@fieldParentPtr(tag.Type(), "base", base).freeDecl(decl_index);
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
@as(*tag.Type(), @fieldParentPtr("base", base)).freeDecl(decl_index);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -635,12 +605,9 @@ pub const File = struct {
|
||||
exports: []const *Module.Export,
|
||||
) UpdateExportsError!void {
|
||||
switch (base.tag) {
|
||||
.c => {
|
||||
return @fieldParentPtr(C, "base", base).updateExports(module, exported, exports);
|
||||
},
|
||||
inline else => |tag| {
|
||||
if (build_options.only_c) unreachable;
|
||||
return @fieldParentPtr(tag.Type(), "base", base).updateExports(module, exported, exports);
|
||||
if (tag != .c and build_options.only_c) unreachable;
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).updateExports(module, exported, exports);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -664,7 +631,7 @@ pub const File = struct {
|
||||
.spirv => unreachable,
|
||||
.nvptx => unreachable,
|
||||
inline else => |tag| {
|
||||
return @fieldParentPtr(tag.Type(), "base", base).getDeclVAddr(decl_index, reloc_info);
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).getDeclVAddr(decl_index, reloc_info);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -683,7 +650,7 @@ pub const File = struct {
|
||||
.spirv => unreachable,
|
||||
.nvptx => unreachable,
|
||||
inline else => |tag| {
|
||||
return @fieldParentPtr(tag.Type(), "base", base).lowerAnonDecl(decl_val, decl_align, src_loc);
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).lowerAnonDecl(decl_val, decl_align, src_loc);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -695,7 +662,7 @@ pub const File = struct {
|
||||
.spirv => unreachable,
|
||||
.nvptx => unreachable,
|
||||
inline else => |tag| {
|
||||
return @fieldParentPtr(tag.Type(), "base", base).getAnonDeclVAddr(decl_val, reloc_info);
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).getAnonDeclVAddr(decl_val, reloc_info);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -714,7 +681,7 @@ pub const File = struct {
|
||||
=> {},
|
||||
|
||||
inline else => |tag| {
|
||||
return @fieldParentPtr(tag.Type(), "base", base).deleteDeclExport(decl_index, name);
|
||||
return @as(*tag.Type(), @fieldParentPtr("base", base)).deleteDeclExport(decl_index, name);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
321
src/link/C.zig
321
src/link/C.zig
@ -6,7 +6,8 @@ const fs = std.fs;
|
||||
|
||||
const C = @This();
|
||||
const build_options = @import("build_options");
|
||||
const Module = @import("../Module.zig");
|
||||
const Zcu = @import("../Module.zig");
|
||||
const Module = @import("../Package/Module.zig");
|
||||
const InternPool = @import("../InternPool.zig");
|
||||
const Alignment = InternPool.Alignment;
|
||||
const Compilation = @import("../Compilation.zig");
|
||||
@ -68,13 +69,13 @@ pub const DeclBlock = struct {
|
||||
fwd_decl: String = String.empty,
|
||||
/// Each `Decl` stores a set of used `CType`s. In `flush()`, we iterate
|
||||
/// over each `Decl` and generate the definition for each used `CType` once.
|
||||
ctypes: codegen.CType.Store = .{},
|
||||
/// Key and Value storage use the ctype arena.
|
||||
ctype_pool: codegen.CType.Pool = codegen.CType.Pool.empty,
|
||||
/// May contain string references to ctype_pool
|
||||
lazy_fns: codegen.LazyFnMap = .{},
|
||||
|
||||
fn deinit(db: *DeclBlock, gpa: Allocator) void {
|
||||
db.lazy_fns.deinit(gpa);
|
||||
db.ctypes.deinit(gpa);
|
||||
db.ctype_pool.deinit(gpa);
|
||||
db.* = undefined;
|
||||
}
|
||||
};
|
||||
@ -177,23 +178,24 @@ pub fn freeDecl(self: *C, decl_index: InternPool.DeclIndex) void {
|
||||
|
||||
pub fn updateFunc(
|
||||
self: *C,
|
||||
module: *Module,
|
||||
zcu: *Zcu,
|
||||
func_index: InternPool.Index,
|
||||
air: Air,
|
||||
liveness: Liveness,
|
||||
) !void {
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
const func = module.funcInfo(func_index);
|
||||
const func = zcu.funcInfo(func_index);
|
||||
const decl_index = func.owner_decl;
|
||||
const decl = module.declPtr(decl_index);
|
||||
const decl = zcu.declPtr(decl_index);
|
||||
const gop = try self.decl_table.getOrPut(gpa, decl_index);
|
||||
if (!gop.found_existing) gop.value_ptr.* = .{};
|
||||
const ctypes = &gop.value_ptr.ctypes;
|
||||
const ctype_pool = &gop.value_ptr.ctype_pool;
|
||||
const lazy_fns = &gop.value_ptr.lazy_fns;
|
||||
const fwd_decl = &self.fwd_decl_buf;
|
||||
const code = &self.code_buf;
|
||||
ctypes.clearRetainingCapacity(gpa);
|
||||
try ctype_pool.init(gpa);
|
||||
ctype_pool.clearRetainingCapacity();
|
||||
lazy_fns.clearRetainingCapacity();
|
||||
fwd_decl.clearRetainingCapacity();
|
||||
code.clearRetainingCapacity();
|
||||
@ -206,12 +208,14 @@ pub fn updateFunc(
|
||||
.object = .{
|
||||
.dg = .{
|
||||
.gpa = gpa,
|
||||
.module = module,
|
||||
.zcu = zcu,
|
||||
.mod = zcu.namespacePtr(decl.src_namespace).file_scope.mod,
|
||||
.error_msg = null,
|
||||
.pass = .{ .decl = decl_index },
|
||||
.is_naked_fn = decl.typeOf(module).fnCallingConvention(module) == .Naked,
|
||||
.is_naked_fn = decl.typeOf(zcu).fnCallingConvention(zcu) == .Naked,
|
||||
.fwd_decl = fwd_decl.toManaged(gpa),
|
||||
.ctypes = ctypes.*,
|
||||
.ctype_pool = ctype_pool.*,
|
||||
.scratch = .{},
|
||||
.anon_decl_deps = self.anon_decls,
|
||||
.aligned_anon_decls = self.aligned_anon_decls,
|
||||
},
|
||||
@ -220,36 +224,32 @@ pub fn updateFunc(
|
||||
},
|
||||
.lazy_fns = lazy_fns.*,
|
||||
};
|
||||
|
||||
function.object.indent_writer = .{ .underlying_writer = function.object.code.writer() };
|
||||
defer {
|
||||
self.anon_decls = function.object.dg.anon_decl_deps;
|
||||
self.aligned_anon_decls = function.object.dg.aligned_anon_decls;
|
||||
fwd_decl.* = function.object.dg.fwd_decl.moveToUnmanaged();
|
||||
ctype_pool.* = function.object.dg.ctype_pool.move();
|
||||
ctype_pool.freeUnusedCapacity(gpa);
|
||||
function.object.dg.scratch.deinit(gpa);
|
||||
lazy_fns.* = function.lazy_fns.move();
|
||||
lazy_fns.shrinkAndFree(gpa, lazy_fns.count());
|
||||
code.* = function.object.code.moveToUnmanaged();
|
||||
function.deinit();
|
||||
}
|
||||
|
||||
codegen.genFunc(&function) catch |err| switch (err) {
|
||||
error.AnalysisFail => {
|
||||
try module.failed_decls.put(gpa, decl_index, function.object.dg.error_msg.?);
|
||||
try zcu.failed_decls.put(gpa, decl_index, function.object.dg.error_msg.?);
|
||||
return;
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
|
||||
ctypes.* = function.object.dg.ctypes.move();
|
||||
lazy_fns.* = function.lazy_fns.move();
|
||||
|
||||
// Free excess allocated memory for this Decl.
|
||||
ctypes.shrinkAndFree(gpa, ctypes.count());
|
||||
lazy_fns.shrinkAndFree(gpa, lazy_fns.count());
|
||||
|
||||
gop.value_ptr.code = try self.addString(function.object.code.items);
|
||||
gop.value_ptr.fwd_decl = try self.addString(function.object.dg.fwd_decl.items);
|
||||
gop.value_ptr.code = try self.addString(function.object.code.items);
|
||||
}
|
||||
|
||||
fn updateAnonDecl(self: *C, module: *Module, i: usize) !void {
|
||||
fn updateAnonDecl(self: *C, zcu: *Zcu, i: usize) !void {
|
||||
const gpa = self.base.comp.gpa;
|
||||
const anon_decl = self.anon_decls.keys()[i];
|
||||
|
||||
@ -261,12 +261,14 @@ fn updateAnonDecl(self: *C, module: *Module, i: usize) !void {
|
||||
var object: codegen.Object = .{
|
||||
.dg = .{
|
||||
.gpa = gpa,
|
||||
.module = module,
|
||||
.zcu = zcu,
|
||||
.mod = zcu.root_mod,
|
||||
.error_msg = null,
|
||||
.pass = .{ .anon = anon_decl },
|
||||
.is_naked_fn = false,
|
||||
.fwd_decl = fwd_decl.toManaged(gpa),
|
||||
.ctypes = .{},
|
||||
.ctype_pool = codegen.CType.Pool.empty,
|
||||
.scratch = .{},
|
||||
.anon_decl_deps = self.anon_decls,
|
||||
.aligned_anon_decls = self.aligned_anon_decls,
|
||||
},
|
||||
@ -274,62 +276,64 @@ fn updateAnonDecl(self: *C, module: *Module, i: usize) !void {
|
||||
.indent_writer = undefined, // set later so we can get a pointer to object.code
|
||||
};
|
||||
object.indent_writer = .{ .underlying_writer = object.code.writer() };
|
||||
|
||||
defer {
|
||||
self.anon_decls = object.dg.anon_decl_deps;
|
||||
self.aligned_anon_decls = object.dg.aligned_anon_decls;
|
||||
object.dg.ctypes.deinit(object.dg.gpa);
|
||||
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
|
||||
object.dg.ctype_pool.deinit(object.dg.gpa);
|
||||
object.dg.scratch.deinit(gpa);
|
||||
code.* = object.code.moveToUnmanaged();
|
||||
}
|
||||
try object.dg.ctype_pool.init(gpa);
|
||||
|
||||
const c_value: codegen.CValue = .{ .constant = anon_decl };
|
||||
const c_value: codegen.CValue = .{ .constant = Value.fromInterned(anon_decl) };
|
||||
const alignment: Alignment = self.aligned_anon_decls.get(anon_decl) orelse .none;
|
||||
codegen.genDeclValue(&object, Value.fromInterned(anon_decl), false, c_value, alignment, .none) catch |err| switch (err) {
|
||||
codegen.genDeclValue(&object, c_value.constant, false, c_value, alignment, .none) catch |err| switch (err) {
|
||||
error.AnalysisFail => {
|
||||
@panic("TODO: C backend AnalysisFail on anonymous decl");
|
||||
//try module.failed_decls.put(gpa, decl_index, object.dg.error_msg.?);
|
||||
//try zcu.failed_decls.put(gpa, decl_index, object.dg.error_msg.?);
|
||||
//return;
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
|
||||
// Free excess allocated memory for this Decl.
|
||||
object.dg.ctypes.shrinkAndFree(gpa, object.dg.ctypes.count());
|
||||
|
||||
object.dg.ctype_pool.freeUnusedCapacity(gpa);
|
||||
object.dg.anon_decl_deps.values()[i] = .{
|
||||
.code = try self.addString(object.code.items),
|
||||
.fwd_decl = try self.addString(object.dg.fwd_decl.items),
|
||||
.ctypes = object.dg.ctypes.move(),
|
||||
.ctype_pool = object.dg.ctype_pool.move(),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn updateDecl(self: *C, module: *Module, decl_index: InternPool.DeclIndex) !void {
|
||||
pub fn updateDecl(self: *C, zcu: *Zcu, decl_index: InternPool.DeclIndex) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
const decl = zcu.declPtr(decl_index);
|
||||
const gop = try self.decl_table.getOrPut(gpa, decl_index);
|
||||
if (!gop.found_existing) {
|
||||
gop.value_ptr.* = .{};
|
||||
}
|
||||
const ctypes = &gop.value_ptr.ctypes;
|
||||
errdefer _ = self.decl_table.pop();
|
||||
if (!gop.found_existing) gop.value_ptr.* = .{};
|
||||
const ctype_pool = &gop.value_ptr.ctype_pool;
|
||||
const fwd_decl = &self.fwd_decl_buf;
|
||||
const code = &self.code_buf;
|
||||
ctypes.clearRetainingCapacity(gpa);
|
||||
try ctype_pool.init(gpa);
|
||||
ctype_pool.clearRetainingCapacity();
|
||||
fwd_decl.clearRetainingCapacity();
|
||||
code.clearRetainingCapacity();
|
||||
|
||||
var object: codegen.Object = .{
|
||||
.dg = .{
|
||||
.gpa = gpa,
|
||||
.module = module,
|
||||
.zcu = zcu,
|
||||
.mod = zcu.namespacePtr(decl.src_namespace).file_scope.mod,
|
||||
.error_msg = null,
|
||||
.pass = .{ .decl = decl_index },
|
||||
.is_naked_fn = false,
|
||||
.fwd_decl = fwd_decl.toManaged(gpa),
|
||||
.ctypes = ctypes.*,
|
||||
.ctype_pool = ctype_pool.*,
|
||||
.scratch = .{},
|
||||
.anon_decl_deps = self.anon_decls,
|
||||
.aligned_anon_decls = self.aligned_anon_decls,
|
||||
},
|
||||
@ -340,33 +344,29 @@ pub fn updateDecl(self: *C, module: *Module, decl_index: InternPool.DeclIndex) !
|
||||
defer {
|
||||
self.anon_decls = object.dg.anon_decl_deps;
|
||||
self.aligned_anon_decls = object.dg.aligned_anon_decls;
|
||||
object.dg.ctypes.deinit(object.dg.gpa);
|
||||
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
|
||||
ctype_pool.* = object.dg.ctype_pool.move();
|
||||
ctype_pool.freeUnusedCapacity(gpa);
|
||||
object.dg.scratch.deinit(gpa);
|
||||
code.* = object.code.moveToUnmanaged();
|
||||
}
|
||||
|
||||
codegen.genDecl(&object) catch |err| switch (err) {
|
||||
error.AnalysisFail => {
|
||||
try module.failed_decls.put(gpa, decl_index, object.dg.error_msg.?);
|
||||
try zcu.failed_decls.put(gpa, decl_index, object.dg.error_msg.?);
|
||||
return;
|
||||
},
|
||||
else => |e| return e,
|
||||
};
|
||||
|
||||
ctypes.* = object.dg.ctypes.move();
|
||||
|
||||
// Free excess allocated memory for this Decl.
|
||||
ctypes.shrinkAndFree(gpa, ctypes.count());
|
||||
|
||||
gop.value_ptr.code = try self.addString(object.code.items);
|
||||
gop.value_ptr.fwd_decl = try self.addString(object.dg.fwd_decl.items);
|
||||
}
|
||||
|
||||
pub fn updateDeclLineNumber(self: *C, module: *Module, decl_index: InternPool.DeclIndex) !void {
|
||||
pub fn updateDeclLineNumber(self: *C, zcu: *Zcu, decl_index: InternPool.DeclIndex) !void {
|
||||
// The C backend does not have the ability to fix line numbers without re-generating
|
||||
// the entire Decl.
|
||||
_ = self;
|
||||
_ = module;
|
||||
_ = zcu;
|
||||
_ = decl_index;
|
||||
}
|
||||
|
||||
@ -399,22 +399,25 @@ pub fn flushModule(self: *C, arena: Allocator, prog_node: *std.Progress.Node) !v
|
||||
|
||||
const comp = self.base.comp;
|
||||
const gpa = comp.gpa;
|
||||
const module = self.base.comp.module.?;
|
||||
const zcu = self.base.comp.module.?;
|
||||
|
||||
{
|
||||
var i: usize = 0;
|
||||
while (i < self.anon_decls.count()) : (i += 1) {
|
||||
try updateAnonDecl(self, module, i);
|
||||
try updateAnonDecl(self, zcu, i);
|
||||
}
|
||||
}
|
||||
|
||||
// This code path happens exclusively with -ofmt=c. The flush logic for
|
||||
// emit-h is in `flushEmitH` below.
|
||||
|
||||
var f: Flush = .{};
|
||||
var f: Flush = .{
|
||||
.ctype_pool = codegen.CType.Pool.empty,
|
||||
.lazy_ctype_pool = codegen.CType.Pool.empty,
|
||||
};
|
||||
defer f.deinit(gpa);
|
||||
|
||||
const abi_defines = try self.abiDefines(module.getTarget());
|
||||
const abi_defines = try self.abiDefines(zcu.getTarget());
|
||||
defer abi_defines.deinit();
|
||||
|
||||
// Covers defines, zig.h, ctypes, asm, lazy fwd.
|
||||
@ -429,7 +432,7 @@ pub fn flushModule(self: *C, arena: Allocator, prog_node: *std.Progress.Node) !v
|
||||
{
|
||||
var asm_buf = f.asm_buf.toManaged(gpa);
|
||||
defer f.asm_buf = asm_buf.moveToUnmanaged();
|
||||
try codegen.genGlobalAsm(module, asm_buf.writer());
|
||||
try codegen.genGlobalAsm(zcu, asm_buf.writer());
|
||||
f.appendBufAssumeCapacity(asm_buf.items);
|
||||
}
|
||||
|
||||
@ -438,7 +441,8 @@ pub fn flushModule(self: *C, arena: Allocator, prog_node: *std.Progress.Node) !v
|
||||
|
||||
self.lazy_fwd_decl_buf.clearRetainingCapacity();
|
||||
self.lazy_code_buf.clearRetainingCapacity();
|
||||
try self.flushErrDecls(&f.lazy_ctypes);
|
||||
try f.lazy_ctype_pool.init(gpa);
|
||||
try self.flushErrDecls(zcu, &f.lazy_ctype_pool);
|
||||
|
||||
// Unlike other backends, the .c code we are emitting has order-dependent decls.
|
||||
// `CType`s, forward decls, and non-functions first.
|
||||
@ -446,34 +450,35 @@ pub fn flushModule(self: *C, arena: Allocator, prog_node: *std.Progress.Node) !v
|
||||
{
|
||||
var export_names: std.AutoHashMapUnmanaged(InternPool.NullTerminatedString, void) = .{};
|
||||
defer export_names.deinit(gpa);
|
||||
try export_names.ensureTotalCapacity(gpa, @intCast(module.decl_exports.entries.len));
|
||||
for (module.decl_exports.values()) |exports| for (exports.items) |@"export"|
|
||||
try export_names.ensureTotalCapacity(gpa, @intCast(zcu.decl_exports.entries.len));
|
||||
for (zcu.decl_exports.values()) |exports| for (exports.items) |@"export"|
|
||||
try export_names.put(gpa, @"export".opts.name, {});
|
||||
|
||||
for (self.anon_decls.values()) |*decl_block| {
|
||||
try self.flushDeclBlock(&f, decl_block, export_names, .none);
|
||||
try self.flushDeclBlock(zcu, zcu.root_mod, &f, decl_block, export_names, .none);
|
||||
}
|
||||
|
||||
for (self.decl_table.keys(), self.decl_table.values()) |decl_index, *decl_block| {
|
||||
assert(module.declPtr(decl_index).has_tv);
|
||||
const decl = module.declPtr(decl_index);
|
||||
const extern_symbol_name = if (decl.isExtern(module)) decl.name.toOptional() else .none;
|
||||
try self.flushDeclBlock(&f, decl_block, export_names, extern_symbol_name);
|
||||
const decl = zcu.declPtr(decl_index);
|
||||
assert(decl.has_tv);
|
||||
const extern_symbol_name = if (decl.isExtern(zcu)) decl.name.toOptional() else .none;
|
||||
const mod = zcu.namespacePtr(decl.src_namespace).file_scope.mod;
|
||||
try self.flushDeclBlock(zcu, mod, &f, decl_block, export_names, extern_symbol_name);
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
// We need to flush lazy ctypes after flushing all decls but before flushing any decl ctypes.
|
||||
// This ensures that every lazy CType.Index exactly matches the global CType.Index.
|
||||
assert(f.ctypes.count() == 0);
|
||||
try self.flushCTypes(&f, .flush, f.lazy_ctypes);
|
||||
try f.ctype_pool.init(gpa);
|
||||
try self.flushCTypes(zcu, &f, .flush, &f.lazy_ctype_pool);
|
||||
|
||||
for (self.anon_decls.keys(), self.anon_decls.values()) |anon_decl, decl_block| {
|
||||
try self.flushCTypes(&f, .{ .anon = anon_decl }, decl_block.ctypes);
|
||||
try self.flushCTypes(zcu, &f, .{ .anon = anon_decl }, &decl_block.ctype_pool);
|
||||
}
|
||||
|
||||
for (self.decl_table.keys(), self.decl_table.values()) |decl_index, decl_block| {
|
||||
try self.flushCTypes(&f, .{ .decl = decl_index }, decl_block.ctypes);
|
||||
try self.flushCTypes(zcu, &f, .{ .decl = decl_index }, &decl_block.ctype_pool);
|
||||
}
|
||||
}
|
||||
|
||||
@ -504,11 +509,11 @@ pub fn flushModule(self: *C, arena: Allocator, prog_node: *std.Progress.Node) !v
|
||||
}
|
||||
|
||||
const Flush = struct {
|
||||
ctypes: codegen.CType.Store = .{},
|
||||
ctypes_map: std.ArrayListUnmanaged(codegen.CType.Index) = .{},
|
||||
ctype_pool: codegen.CType.Pool,
|
||||
ctype_global_from_decl_map: std.ArrayListUnmanaged(codegen.CType) = .{},
|
||||
ctypes_buf: std.ArrayListUnmanaged(u8) = .{},
|
||||
|
||||
lazy_ctypes: codegen.CType.Store = .{},
|
||||
lazy_ctype_pool: codegen.CType.Pool,
|
||||
lazy_fns: LazyFns = .{},
|
||||
|
||||
asm_buf: std.ArrayListUnmanaged(u8) = .{},
|
||||
@ -530,10 +535,11 @@ const Flush = struct {
|
||||
f.all_buffers.deinit(gpa);
|
||||
f.asm_buf.deinit(gpa);
|
||||
f.lazy_fns.deinit(gpa);
|
||||
f.lazy_ctypes.deinit(gpa);
|
||||
f.lazy_ctype_pool.deinit(gpa);
|
||||
f.ctypes_buf.deinit(gpa);
|
||||
f.ctypes_map.deinit(gpa);
|
||||
f.ctypes.deinit(gpa);
|
||||
assert(f.ctype_global_from_decl_map.items.len == 0);
|
||||
f.ctype_global_from_decl_map.deinit(gpa);
|
||||
f.ctype_pool.deinit(gpa);
|
||||
}
|
||||
};
|
||||
|
||||
@ -543,91 +549,62 @@ const FlushDeclError = error{
|
||||
|
||||
fn flushCTypes(
|
||||
self: *C,
|
||||
zcu: *Zcu,
|
||||
f: *Flush,
|
||||
pass: codegen.DeclGen.Pass,
|
||||
decl_ctypes: codegen.CType.Store,
|
||||
decl_ctype_pool: *const codegen.CType.Pool,
|
||||
) FlushDeclError!void {
|
||||
const gpa = self.base.comp.gpa;
|
||||
const mod = self.base.comp.module.?;
|
||||
const global_ctype_pool = &f.ctype_pool;
|
||||
|
||||
const decl_ctypes_len = decl_ctypes.count();
|
||||
f.ctypes_map.clearRetainingCapacity();
|
||||
try f.ctypes_map.ensureTotalCapacity(gpa, decl_ctypes_len);
|
||||
|
||||
var global_ctypes = f.ctypes.promote(gpa);
|
||||
defer f.ctypes.demote(global_ctypes);
|
||||
const global_from_decl_map = &f.ctype_global_from_decl_map;
|
||||
assert(global_from_decl_map.items.len == 0);
|
||||
try global_from_decl_map.ensureTotalCapacity(gpa, decl_ctype_pool.items.len);
|
||||
defer global_from_decl_map.clearRetainingCapacity();
|
||||
|
||||
var ctypes_buf = f.ctypes_buf.toManaged(gpa);
|
||||
defer f.ctypes_buf = ctypes_buf.moveToUnmanaged();
|
||||
const writer = ctypes_buf.writer();
|
||||
|
||||
const slice = decl_ctypes.set.map.entries.slice();
|
||||
for (slice.items(.key), 0..) |decl_cty, decl_i| {
|
||||
const Context = struct {
|
||||
arena: Allocator,
|
||||
ctypes_map: []codegen.CType.Index,
|
||||
cached_hash: codegen.CType.Store.Set.Map.Hash,
|
||||
idx: codegen.CType.Index,
|
||||
|
||||
pub fn hash(ctx: @This(), _: codegen.CType) codegen.CType.Store.Set.Map.Hash {
|
||||
return ctx.cached_hash;
|
||||
for (0..decl_ctype_pool.items.len) |decl_ctype_pool_index| {
|
||||
const PoolAdapter = struct {
|
||||
global_from_decl_map: []const codegen.CType,
|
||||
pub fn eql(pool_adapter: @This(), decl_ctype: codegen.CType, global_ctype: codegen.CType) bool {
|
||||
return if (decl_ctype.toPoolIndex()) |decl_pool_index|
|
||||
decl_pool_index < pool_adapter.global_from_decl_map.len and
|
||||
pool_adapter.global_from_decl_map[decl_pool_index].eql(global_ctype)
|
||||
else
|
||||
decl_ctype.index == global_ctype.index;
|
||||
}
|
||||
pub fn eql(ctx: @This(), lhs: codegen.CType, rhs: codegen.CType, _: usize) bool {
|
||||
return lhs.eqlContext(rhs, ctx);
|
||||
}
|
||||
pub fn eqlIndex(
|
||||
ctx: @This(),
|
||||
lhs_idx: codegen.CType.Index,
|
||||
rhs_idx: codegen.CType.Index,
|
||||
) bool {
|
||||
if (lhs_idx < codegen.CType.Tag.no_payload_count or
|
||||
rhs_idx < codegen.CType.Tag.no_payload_count) return lhs_idx == rhs_idx;
|
||||
const lhs_i = lhs_idx - codegen.CType.Tag.no_payload_count;
|
||||
if (lhs_i >= ctx.ctypes_map.len) return false;
|
||||
return ctx.ctypes_map[lhs_i] == rhs_idx;
|
||||
}
|
||||
pub fn copyIndex(ctx: @This(), idx: codegen.CType.Index) codegen.CType.Index {
|
||||
if (idx < codegen.CType.Tag.no_payload_count) return idx;
|
||||
return ctx.ctypes_map[idx - codegen.CType.Tag.no_payload_count];
|
||||
pub fn copy(pool_adapter: @This(), decl_ctype: codegen.CType) codegen.CType {
|
||||
return if (decl_ctype.toPoolIndex()) |decl_pool_index|
|
||||
pool_adapter.global_from_decl_map[decl_pool_index]
|
||||
else
|
||||
decl_ctype;
|
||||
}
|
||||
};
|
||||
const decl_idx = @as(codegen.CType.Index, @intCast(codegen.CType.Tag.no_payload_count + decl_i));
|
||||
const ctx = Context{
|
||||
.arena = global_ctypes.arena.allocator(),
|
||||
.ctypes_map = f.ctypes_map.items,
|
||||
.cached_hash = decl_ctypes.indexToHash(decl_idx),
|
||||
.idx = decl_idx,
|
||||
};
|
||||
const gop = try global_ctypes.set.map.getOrPutContextAdapted(gpa, decl_cty, ctx, .{
|
||||
.store = &global_ctypes.set,
|
||||
});
|
||||
const global_idx =
|
||||
@as(codegen.CType.Index, @intCast(codegen.CType.Tag.no_payload_count + gop.index));
|
||||
f.ctypes_map.appendAssumeCapacity(global_idx);
|
||||
if (!gop.found_existing) {
|
||||
errdefer _ = global_ctypes.set.map.pop();
|
||||
gop.key_ptr.* = try decl_cty.copyContext(ctx);
|
||||
}
|
||||
if (std.debug.runtime_safety) {
|
||||
const global_cty = &global_ctypes.set.map.entries.items(.key)[gop.index];
|
||||
assert(global_cty == gop.key_ptr);
|
||||
assert(decl_cty.eqlContext(global_cty.*, ctx));
|
||||
assert(decl_cty.hash(decl_ctypes.set) == global_cty.hash(global_ctypes.set));
|
||||
}
|
||||
const decl_ctype = codegen.CType.fromPoolIndex(decl_ctype_pool_index);
|
||||
const global_ctype, const found_existing = try global_ctype_pool.getOrPutAdapted(
|
||||
gpa,
|
||||
decl_ctype_pool,
|
||||
decl_ctype,
|
||||
PoolAdapter{ .global_from_decl_map = global_from_decl_map.items },
|
||||
);
|
||||
global_from_decl_map.appendAssumeCapacity(global_ctype);
|
||||
try codegen.genTypeDecl(
|
||||
mod,
|
||||
zcu,
|
||||
writer,
|
||||
global_ctypes.set,
|
||||
global_idx,
|
||||
global_ctype_pool,
|
||||
global_ctype,
|
||||
pass,
|
||||
decl_ctypes.set,
|
||||
decl_idx,
|
||||
gop.found_existing,
|
||||
decl_ctype_pool,
|
||||
decl_ctype,
|
||||
found_existing,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn flushErrDecls(self: *C, ctypes: *codegen.CType.Store) FlushDeclError!void {
|
||||
fn flushErrDecls(self: *C, zcu: *Zcu, ctype_pool: *codegen.CType.Pool) FlushDeclError!void {
|
||||
const gpa = self.base.comp.gpa;
|
||||
|
||||
const fwd_decl = &self.lazy_fwd_decl_buf;
|
||||
@ -636,12 +613,14 @@ fn flushErrDecls(self: *C, ctypes: *codegen.CType.Store) FlushDeclError!void {
|
||||
var object = codegen.Object{
|
||||
.dg = .{
|
||||
.gpa = gpa,
|
||||
.module = self.base.comp.module.?,
|
||||
.zcu = zcu,
|
||||
.mod = zcu.root_mod,
|
||||
.error_msg = null,
|
||||
.pass = .flush,
|
||||
.is_naked_fn = false,
|
||||
.fwd_decl = fwd_decl.toManaged(gpa),
|
||||
.ctypes = ctypes.*,
|
||||
.ctype_pool = ctype_pool.*,
|
||||
.scratch = .{},
|
||||
.anon_decl_deps = self.anon_decls,
|
||||
.aligned_anon_decls = self.aligned_anon_decls,
|
||||
},
|
||||
@ -652,8 +631,10 @@ fn flushErrDecls(self: *C, ctypes: *codegen.CType.Store) FlushDeclError!void {
|
||||
defer {
|
||||
self.anon_decls = object.dg.anon_decl_deps;
|
||||
self.aligned_anon_decls = object.dg.aligned_anon_decls;
|
||||
object.dg.ctypes.deinit(gpa);
|
||||
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
|
||||
ctype_pool.* = object.dg.ctype_pool.move();
|
||||
ctype_pool.freeUnusedCapacity(gpa);
|
||||
object.dg.scratch.deinit(gpa);
|
||||
code.* = object.code.moveToUnmanaged();
|
||||
}
|
||||
|
||||
@ -661,13 +642,14 @@ fn flushErrDecls(self: *C, ctypes: *codegen.CType.Store) FlushDeclError!void {
|
||||
error.AnalysisFail => unreachable,
|
||||
else => |e| return e,
|
||||
};
|
||||
|
||||
ctypes.* = object.dg.ctypes.move();
|
||||
}
|
||||
|
||||
fn flushLazyFn(
|
||||
self: *C,
|
||||
ctypes: *codegen.CType.Store,
|
||||
zcu: *Zcu,
|
||||
mod: *Module,
|
||||
ctype_pool: *codegen.CType.Pool,
|
||||
lazy_ctype_pool: *const codegen.CType.Pool,
|
||||
lazy_fn: codegen.LazyFnMap.Entry,
|
||||
) FlushDeclError!void {
|
||||
const gpa = self.base.comp.gpa;
|
||||
@ -678,12 +660,14 @@ fn flushLazyFn(
|
||||
var object = codegen.Object{
|
||||
.dg = .{
|
||||
.gpa = gpa,
|
||||
.module = self.base.comp.module.?,
|
||||
.zcu = zcu,
|
||||
.mod = mod,
|
||||
.error_msg = null,
|
||||
.pass = .flush,
|
||||
.is_naked_fn = false,
|
||||
.fwd_decl = fwd_decl.toManaged(gpa),
|
||||
.ctypes = ctypes.*,
|
||||
.ctype_pool = ctype_pool.*,
|
||||
.scratch = .{},
|
||||
.anon_decl_deps = .{},
|
||||
.aligned_anon_decls = .{},
|
||||
},
|
||||
@ -696,20 +680,27 @@ fn flushLazyFn(
|
||||
// `updateFunc()` does.
|
||||
assert(object.dg.anon_decl_deps.count() == 0);
|
||||
assert(object.dg.aligned_anon_decls.count() == 0);
|
||||
object.dg.ctypes.deinit(gpa);
|
||||
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
|
||||
ctype_pool.* = object.dg.ctype_pool.move();
|
||||
ctype_pool.freeUnusedCapacity(gpa);
|
||||
object.dg.scratch.deinit(gpa);
|
||||
code.* = object.code.moveToUnmanaged();
|
||||
}
|
||||
|
||||
codegen.genLazyFn(&object, lazy_fn) catch |err| switch (err) {
|
||||
codegen.genLazyFn(&object, lazy_ctype_pool, lazy_fn) catch |err| switch (err) {
|
||||
error.AnalysisFail => unreachable,
|
||||
else => |e| return e,
|
||||
};
|
||||
|
||||
ctypes.* = object.dg.ctypes.move();
|
||||
}
|
||||
|
||||
fn flushLazyFns(self: *C, f: *Flush, lazy_fns: codegen.LazyFnMap) FlushDeclError!void {
|
||||
fn flushLazyFns(
|
||||
self: *C,
|
||||
zcu: *Zcu,
|
||||
mod: *Module,
|
||||
f: *Flush,
|
||||
lazy_ctype_pool: *const codegen.CType.Pool,
|
||||
lazy_fns: codegen.LazyFnMap,
|
||||
) FlushDeclError!void {
|
||||
const gpa = self.base.comp.gpa;
|
||||
try f.lazy_fns.ensureUnusedCapacity(gpa, @intCast(lazy_fns.count()));
|
||||
|
||||
@ -718,19 +709,21 @@ fn flushLazyFns(self: *C, f: *Flush, lazy_fns: codegen.LazyFnMap) FlushDeclError
|
||||
const gop = f.lazy_fns.getOrPutAssumeCapacity(entry.key_ptr.*);
|
||||
if (gop.found_existing) continue;
|
||||
gop.value_ptr.* = {};
|
||||
try self.flushLazyFn(&f.lazy_ctypes, entry);
|
||||
try self.flushLazyFn(zcu, mod, &f.lazy_ctype_pool, lazy_ctype_pool, entry);
|
||||
}
|
||||
}
|
||||
|
||||
fn flushDeclBlock(
|
||||
self: *C,
|
||||
zcu: *Zcu,
|
||||
mod: *Module,
|
||||
f: *Flush,
|
||||
decl_block: *DeclBlock,
|
||||
export_names: std.AutoHashMapUnmanaged(InternPool.NullTerminatedString, void),
|
||||
extern_symbol_name: InternPool.OptionalNullTerminatedString,
|
||||
) FlushDeclError!void {
|
||||
const gpa = self.base.comp.gpa;
|
||||
try self.flushLazyFns(f, decl_block.lazy_fns);
|
||||
try self.flushLazyFns(zcu, mod, f, &decl_block.ctype_pool, decl_block.lazy_fns);
|
||||
try f.all_buffers.ensureUnusedCapacity(gpa, 1);
|
||||
fwd_decl: {
|
||||
if (extern_symbol_name.unwrap()) |name| {
|
||||
@ -740,15 +733,15 @@ fn flushDeclBlock(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn flushEmitH(module: *Module) !void {
|
||||
pub fn flushEmitH(zcu: *Zcu) !void {
|
||||
const tracy = trace(@src());
|
||||
defer tracy.end();
|
||||
|
||||
const emit_h = module.emit_h orelse return;
|
||||
const emit_h = zcu.emit_h orelse return;
|
||||
|
||||
// We collect a list of buffers to write, and write them all at once with pwritev 😎
|
||||
const num_buffers = emit_h.decl_table.count() + 1;
|
||||
var all_buffers = try std.ArrayList(std.posix.iovec_const).initCapacity(module.gpa, num_buffers);
|
||||
var all_buffers = try std.ArrayList(std.posix.iovec_const).initCapacity(zcu.gpa, num_buffers);
|
||||
defer all_buffers.deinit();
|
||||
|
||||
var file_size: u64 = zig_h.len;
|
||||
@ -771,7 +764,7 @@ pub fn flushEmitH(module: *Module) !void {
|
||||
}
|
||||
}
|
||||
|
||||
const directory = emit_h.loc.directory orelse module.comp.local_cache_directory;
|
||||
const directory = emit_h.loc.directory orelse zcu.comp.local_cache_directory;
|
||||
const file = try directory.handle.createFile(emit_h.loc.basename, .{
|
||||
// We set the end position explicitly below; by not truncating the file, we possibly
|
||||
// make it easier on the file system by doing 1 reallocation instead of two.
|
||||
@ -785,12 +778,12 @@ pub fn flushEmitH(module: *Module) !void {
|
||||
|
||||
pub fn updateExports(
|
||||
self: *C,
|
||||
module: *Module,
|
||||
exported: Module.Exported,
|
||||
exports: []const *Module.Export,
|
||||
zcu: *Zcu,
|
||||
exported: Zcu.Exported,
|
||||
exports: []const *Zcu.Export,
|
||||
) !void {
|
||||
_ = exports;
|
||||
_ = exported;
|
||||
_ = module;
|
||||
_ = zcu;
|
||||
_ = self;
|
||||
}
|
||||
|
||||
@ -1223,7 +1223,7 @@ fn lowerConst(self: *Coff, name: []const u8, val: Value, required_alignment: Int
|
||||
atom.getSymbolPtr(self).value = try self.allocateAtom(
|
||||
atom_index,
|
||||
atom.size,
|
||||
@intCast(required_alignment.toByteUnitsOptional().?),
|
||||
@intCast(required_alignment.toByteUnits().?),
|
||||
);
|
||||
errdefer self.freeAtom(atom_index);
|
||||
|
||||
@ -1344,7 +1344,7 @@ fn updateLazySymbolAtom(
|
||||
symbol.section_number = @as(coff.SectionNumber, @enumFromInt(section_index + 1));
|
||||
symbol.type = .{ .complex_type = .NULL, .base_type = .NULL };
|
||||
|
||||
const vaddr = try self.allocateAtom(atom_index, code_len, @intCast(required_alignment.toByteUnits(0)));
|
||||
const vaddr = try self.allocateAtom(atom_index, code_len, @intCast(required_alignment.toByteUnits() orelse 0));
|
||||
errdefer self.freeAtom(atom_index);
|
||||
|
||||
log.debug("allocated atom for {s} at 0x{x}", .{ name, vaddr });
|
||||
@ -1428,7 +1428,7 @@ fn updateDeclCode(self: *Coff, decl_index: InternPool.DeclIndex, code: []u8, com
|
||||
const decl_name = mod.intern_pool.stringToSlice(try decl.fullyQualifiedName(mod));
|
||||
|
||||
log.debug("updateDeclCode {s}{*}", .{ decl_name, decl });
|
||||
const required_alignment: u32 = @intCast(decl.getAlignment(mod).toByteUnits(0));
|
||||
const required_alignment: u32 = @intCast(decl.getAlignment(mod).toByteUnits() orelse 0);
|
||||
|
||||
const decl_metadata = self.decls.get(decl_index).?;
|
||||
const atom_index = decl_metadata.atom;
|
||||
|
||||
@ -4051,7 +4051,7 @@ fn updateSectionSizes(self: *Elf) !void {
|
||||
const padding = offset - shdr.sh_size;
|
||||
atom_ptr.value = offset;
|
||||
shdr.sh_size += padding + atom_ptr.size;
|
||||
shdr.sh_addralign = @max(shdr.sh_addralign, atom_ptr.alignment.toByteUnits(1));
|
||||
shdr.sh_addralign = @max(shdr.sh_addralign, atom_ptr.alignment.toByteUnits() orelse 1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -208,7 +208,7 @@ pub fn allocate(self: *Atom, elf_file: *Elf) !void {
|
||||
zig_object.debug_aranges_section_dirty = true;
|
||||
}
|
||||
}
|
||||
shdr.sh_addralign = @max(shdr.sh_addralign, self.alignment.toByteUnitsOptional().?);
|
||||
shdr.sh_addralign = @max(shdr.sh_addralign, self.alignment.toByteUnits().?);
|
||||
|
||||
// This function can also reallocate an atom.
|
||||
// In this case we need to "unplug" it from its previous location before
|
||||
|
||||
@ -313,7 +313,7 @@ pub fn inputShdr(self: ZigObject, atom_index: Atom.Index, elf_file: *Elf) elf.El
|
||||
shdr.sh_addr = 0;
|
||||
shdr.sh_offset = 0;
|
||||
shdr.sh_size = atom.size;
|
||||
shdr.sh_addralign = atom.alignment.toByteUnits(1);
|
||||
shdr.sh_addralign = atom.alignment.toByteUnits() orelse 1;
|
||||
return shdr;
|
||||
}
|
||||
|
||||
|
||||
@ -330,7 +330,7 @@ fn updateSectionSizes(elf_file: *Elf) !void {
|
||||
const padding = offset - shdr.sh_size;
|
||||
atom_ptr.value = offset;
|
||||
shdr.sh_size += padding + atom_ptr.size;
|
||||
shdr.sh_addralign = @max(shdr.sh_addralign, atom_ptr.alignment.toByteUnits(1));
|
||||
shdr.sh_addralign = @max(shdr.sh_addralign, atom_ptr.alignment.toByteUnits() orelse 1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -63,7 +63,7 @@ fn advance(shdr: *elf.Elf64_Shdr, size: u64, alignment: Atom.Alignment) !u64 {
|
||||
const offset = alignment.forward(shdr.sh_size);
|
||||
const padding = offset - shdr.sh_size;
|
||||
shdr.sh_size += padding + size;
|
||||
shdr.sh_addralign = @max(shdr.sh_addralign, alignment.toByteUnits(1));
|
||||
shdr.sh_addralign = @max(shdr.sh_addralign, alignment.toByteUnits() orelse 1);
|
||||
return offset;
|
||||
}
|
||||
|
||||
|
||||
@ -2060,7 +2060,7 @@ fn calcSectionSizes(self: *MachO) !void {
|
||||
|
||||
for (atoms.items) |atom_index| {
|
||||
const atom = self.getAtom(atom_index).?;
|
||||
const atom_alignment = atom.alignment.toByteUnits(1);
|
||||
const atom_alignment = atom.alignment.toByteUnits() orelse 1;
|
||||
const offset = mem.alignForward(u64, header.size, atom_alignment);
|
||||
const padding = offset - header.size;
|
||||
atom.value = offset;
|
||||
|
||||
@ -380,7 +380,7 @@ fn calcSectionSizes(macho_file: *MachO) !void {
|
||||
if (atoms.items.len == 0) continue;
|
||||
for (atoms.items) |atom_index| {
|
||||
const atom = macho_file.getAtom(atom_index).?;
|
||||
const atom_alignment = atom.alignment.toByteUnits(1);
|
||||
const atom_alignment = atom.alignment.toByteUnits() orelse 1;
|
||||
const offset = mem.alignForward(u64, header.size, atom_alignment);
|
||||
const padding = offset - header.size;
|
||||
atom.value = offset;
|
||||
|
||||
@ -2263,7 +2263,7 @@ fn setupMemory(wasm: *Wasm) !void {
|
||||
}
|
||||
if (wasm.findGlobalSymbol("__tls_align")) |loc| {
|
||||
const sym = loc.getSymbol(wasm);
|
||||
wasm.wasm_globals.items[sym.index - wasm.imported_globals_count].init.i32_const = @intCast(segment.alignment.toByteUnitsOptional().?);
|
||||
wasm.wasm_globals.items[sym.index - wasm.imported_globals_count].init.i32_const = @intCast(segment.alignment.toByteUnits().?);
|
||||
}
|
||||
if (wasm.findGlobalSymbol("__tls_base")) |loc| {
|
||||
const sym = loc.getSymbol(wasm);
|
||||
|
||||
@ -29,34 +29,28 @@ pub const Node = struct {
|
||||
map,
|
||||
list,
|
||||
value,
|
||||
|
||||
pub fn Type(comptime tag: Tag) type {
|
||||
return switch (tag) {
|
||||
.doc => Doc,
|
||||
.map => Map,
|
||||
.list => List,
|
||||
.value => Value,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub fn cast(self: *const Node, comptime T: type) ?*const T {
|
||||
if (self.tag != T.base_tag) {
|
||||
return null;
|
||||
}
|
||||
return @fieldParentPtr(T, "base", self);
|
||||
return @fieldParentPtr("base", self);
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Node, allocator: Allocator) void {
|
||||
switch (self.tag) {
|
||||
.doc => {
|
||||
const parent = @fieldParentPtr(Node.Doc, "base", self);
|
||||
parent.deinit(allocator);
|
||||
allocator.destroy(parent);
|
||||
},
|
||||
.map => {
|
||||
const parent = @fieldParentPtr(Node.Map, "base", self);
|
||||
parent.deinit(allocator);
|
||||
allocator.destroy(parent);
|
||||
},
|
||||
.list => {
|
||||
const parent = @fieldParentPtr(Node.List, "base", self);
|
||||
parent.deinit(allocator);
|
||||
allocator.destroy(parent);
|
||||
},
|
||||
.value => {
|
||||
const parent = @fieldParentPtr(Node.Value, "base", self);
|
||||
inline else => |tag| {
|
||||
const parent: *tag.Type() = @fieldParentPtr("base", self);
|
||||
parent.deinit(allocator);
|
||||
allocator.destroy(parent);
|
||||
},
|
||||
@ -69,12 +63,9 @@ pub const Node = struct {
|
||||
options: std.fmt.FormatOptions,
|
||||
writer: anytype,
|
||||
) !void {
|
||||
return switch (self.tag) {
|
||||
.doc => @fieldParentPtr(Node.Doc, "base", self).format(fmt, options, writer),
|
||||
.map => @fieldParentPtr(Node.Map, "base", self).format(fmt, options, writer),
|
||||
.list => @fieldParentPtr(Node.List, "base", self).format(fmt, options, writer),
|
||||
.value => @fieldParentPtr(Node.Value, "base", self).format(fmt, options, writer),
|
||||
};
|
||||
switch (self.tag) {
|
||||
inline else => |tag| return @as(*tag.Type(), @fieldParentPtr("base", self)).format(fmt, options, writer),
|
||||
}
|
||||
}
|
||||
|
||||
pub const Doc = struct {
|
||||
|
||||
@ -3544,11 +3544,7 @@ fn createModule(
|
||||
// If the target is not overridden, use the parent's target. Of course,
|
||||
// if this is the root module then we need to proceed to resolve the
|
||||
// target.
|
||||
if (cli_mod.target_arch_os_abi == null and
|
||||
cli_mod.target_mcpu == null and
|
||||
create_module.dynamic_linker == null and
|
||||
create_module.object_format == null)
|
||||
{
|
||||
if (cli_mod.target_arch_os_abi == null and cli_mod.target_mcpu == null) {
|
||||
if (parent) |p| break :t p.resolved_target;
|
||||
}
|
||||
|
||||
|
||||
@ -80,7 +80,7 @@ pub fn print(
|
||||
inline .u64, .i64, .big_int => |x| try writer.print("{}", .{x}),
|
||||
.lazy_align => |ty| if (opt_sema) |sema| {
|
||||
const a = (try Type.fromInterned(ty).abiAlignmentAdvanced(mod, .{ .sema = sema })).scalar;
|
||||
try writer.print("{}", .{a.toByteUnits(0)});
|
||||
try writer.print("{}", .{a.toByteUnits() orelse 0});
|
||||
} else try writer.print("@alignOf({})", .{Type.fromInterned(ty).fmt(mod)}),
|
||||
.lazy_size => |ty| if (opt_sema) |sema| {
|
||||
const s = (try Type.fromInterned(ty).abiSizeAdvanced(mod, .{ .sema = sema })).scalar;
|
||||
|
||||
@ -355,7 +355,6 @@ const Writer = struct {
|
||||
.atomic_rmw => try self.writeAtomicRmw(stream, inst),
|
||||
.shuffle => try self.writeShuffle(stream, inst),
|
||||
.mul_add => try self.writeMulAdd(stream, inst),
|
||||
.field_parent_ptr => try self.writeFieldParentPtr(stream, inst),
|
||||
.builtin_call => try self.writeBuiltinCall(stream, inst),
|
||||
|
||||
.field_type_ref => try self.writeFieldTypeRef(stream, inst),
|
||||
@ -609,6 +608,7 @@ const Writer = struct {
|
||||
|
||||
.restore_err_ret_index => try self.writeRestoreErrRetIndex(stream, extended),
|
||||
.closure_get => try self.writeClosureGet(stream, extended),
|
||||
.field_parent_ptr => try self.writeFieldParentPtr(stream, extended),
|
||||
}
|
||||
}
|
||||
|
||||
@ -901,16 +901,21 @@ const Writer = struct {
|
||||
try self.writeSrc(stream, inst_data.src());
|
||||
}
|
||||
|
||||
fn writeFieldParentPtr(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void {
|
||||
const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
|
||||
const extra = self.code.extraData(Zir.Inst.FieldParentPtr, inst_data.payload_index).data;
|
||||
try self.writeInstRef(stream, extra.parent_type);
|
||||
fn writeFieldParentPtr(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
||||
const extra = self.code.extraData(Zir.Inst.FieldParentPtr, extended.operand).data;
|
||||
const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?;
|
||||
const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small)));
|
||||
if (flags.align_cast) try stream.writeAll("align_cast, ");
|
||||
if (flags.addrspace_cast) try stream.writeAll("addrspace_cast, ");
|
||||
if (flags.const_cast) try stream.writeAll("const_cast, ");
|
||||
if (flags.volatile_cast) try stream.writeAll("volatile_cast, ");
|
||||
try self.writeInstRef(stream, extra.parent_ptr_type);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.field_name);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.field_ptr);
|
||||
try stream.writeAll(") ");
|
||||
try self.writeSrc(stream, inst_data.src());
|
||||
try self.writeSrc(stream, extra.src());
|
||||
}
|
||||
|
||||
fn writeBuiltinAsyncCall(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
||||
@ -1069,7 +1074,8 @@ const Writer = struct {
|
||||
}
|
||||
|
||||
fn writePtrCastFull(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
||||
const flags = @as(Zir.Inst.FullPtrCastFlags, @bitCast(@as(u5, @truncate(extended.small))));
|
||||
const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?;
|
||||
const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small)));
|
||||
const extra = self.code.extraData(Zir.Inst.BinNode, extended.operand).data;
|
||||
const src = LazySrcLoc.nodeOffset(extra.node);
|
||||
if (flags.ptr_cast) try stream.writeAll("ptr_cast, ");
|
||||
@ -1085,7 +1091,8 @@ const Writer = struct {
|
||||
}
|
||||
|
||||
fn writePtrCastNoDest(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
|
||||
const flags = @as(Zir.Inst.FullPtrCastFlags, @bitCast(@as(u5, @truncate(extended.small))));
|
||||
const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).Struct.backing_integer.?;
|
||||
const flags: Zir.Inst.FullPtrCastFlags = @bitCast(@as(FlagsInt, @truncate(extended.small)));
|
||||
const extra = self.code.extraData(Zir.Inst.UnNode, extended.operand).data;
|
||||
const src = LazySrcLoc.nodeOffset(extra.node);
|
||||
if (flags.const_cast) try stream.writeAll("const_cast, ");
|
||||
|
||||
@ -59,7 +59,7 @@ pub fn RegisterManager(
|
||||
pub const RegisterBitSet = StaticBitSet(tracked_registers.len);
|
||||
|
||||
fn getFunction(self: *Self) *Function {
|
||||
return @fieldParentPtr(Function, "register_manager", self);
|
||||
return @alignCast(@fieldParentPtr("register_manager", self));
|
||||
}
|
||||
|
||||
fn excludeRegister(reg: Register, register_class: RegisterBitSet) bool {
|
||||
|
||||
@ -525,7 +525,7 @@ pub fn backendSupportsFeature(
|
||||
.error_return_trace => use_llvm,
|
||||
.is_named_enum_value => use_llvm,
|
||||
.error_set_has_value => use_llvm or cpu_arch.isWasm(),
|
||||
.field_reordering => use_llvm,
|
||||
.field_reordering => ofmt == .c or use_llvm,
|
||||
.safety_checked_instructions => use_llvm,
|
||||
};
|
||||
}
|
||||
|
||||
52
src/type.zig
52
src/type.zig
@ -203,7 +203,7 @@ pub const Type = struct {
|
||||
info.flags.alignment
|
||||
else
|
||||
Type.fromInterned(info.child).abiAlignment(mod);
|
||||
try writer.print("align({d}", .{alignment.toByteUnits(0)});
|
||||
try writer.print("align({d}", .{alignment.toByteUnits() orelse 0});
|
||||
|
||||
if (info.packed_offset.bit_offset != 0 or info.packed_offset.host_size != 0) {
|
||||
try writer.print(":{d}:{d}", .{
|
||||
@ -863,7 +863,7 @@ pub const Type = struct {
|
||||
pub fn lazyAbiAlignment(ty: Type, mod: *Module) !Value {
|
||||
switch (try ty.abiAlignmentAdvanced(mod, .lazy)) {
|
||||
.val => |val| return val,
|
||||
.scalar => |x| return mod.intValue(Type.comptime_int, x.toByteUnits(0)),
|
||||
.scalar => |x| return mod.intValue(Type.comptime_int, x.toByteUnits() orelse 0),
|
||||
}
|
||||
}
|
||||
|
||||
@ -905,7 +905,7 @@ pub const Type = struct {
|
||||
return .{ .scalar = intAbiAlignment(int_type.bits, target) };
|
||||
},
|
||||
.ptr_type, .anyframe_type => {
|
||||
return .{ .scalar = Alignment.fromByteUnits(@divExact(target.ptrBitWidth(), 8)) };
|
||||
return .{ .scalar = ptrAbiAlignment(target) };
|
||||
},
|
||||
.array_type => |array_type| {
|
||||
return Type.fromInterned(array_type.child).abiAlignmentAdvanced(mod, strat);
|
||||
@ -920,6 +920,9 @@ pub const Type = struct {
|
||||
const alignment = std.math.ceilPowerOfTwoAssert(u32, bytes);
|
||||
return .{ .scalar = Alignment.fromByteUnits(alignment) };
|
||||
},
|
||||
.stage2_c => {
|
||||
return Type.fromInterned(vector_type.child).abiAlignmentAdvanced(mod, strat);
|
||||
},
|
||||
.stage2_x86_64 => {
|
||||
if (vector_type.child == .bool_type) {
|
||||
if (vector_type.len > 256 and std.Target.x86.featureSetHas(target.cpu.features, .avx512f)) return .{ .scalar = .@"64" };
|
||||
@ -966,12 +969,12 @@ pub const Type = struct {
|
||||
|
||||
.usize,
|
||||
.isize,
|
||||
=> return .{ .scalar = intAbiAlignment(target.ptrBitWidth(), target) },
|
||||
|
||||
.export_options,
|
||||
.extern_options,
|
||||
.type_info,
|
||||
=> return .{
|
||||
.scalar = Alignment.fromByteUnits(@divExact(target.ptrBitWidth(), 8)),
|
||||
},
|
||||
=> return .{ .scalar = ptrAbiAlignment(target) },
|
||||
|
||||
.c_char => return .{ .scalar = cTypeAlign(target, .char) },
|
||||
.c_short => return .{ .scalar = cTypeAlign(target, .short) },
|
||||
@ -1160,9 +1163,7 @@ pub const Type = struct {
|
||||
const child_type = ty.optionalChild(mod);
|
||||
|
||||
switch (child_type.zigTypeTag(mod)) {
|
||||
.Pointer => return .{
|
||||
.scalar = Alignment.fromByteUnits(@divExact(target.ptrBitWidth(), 8)),
|
||||
},
|
||||
.Pointer => return .{ .scalar = ptrAbiAlignment(target) },
|
||||
.ErrorSet => return abiAlignmentAdvanced(Type.anyerror, mod, strat),
|
||||
.NoReturn => return .{ .scalar = .@"1" },
|
||||
else => {},
|
||||
@ -1274,6 +1275,10 @@ pub const Type = struct {
|
||||
const total_bits = elem_bits * vector_type.len;
|
||||
break :total_bytes (total_bits + 7) / 8;
|
||||
},
|
||||
.stage2_c => total_bytes: {
|
||||
const elem_bytes: u32 = @intCast((try Type.fromInterned(vector_type.child).abiSizeAdvanced(mod, strat)).scalar);
|
||||
break :total_bytes elem_bytes * vector_type.len;
|
||||
},
|
||||
.stage2_x86_64 => total_bytes: {
|
||||
if (vector_type.child == .bool_type) break :total_bytes std.math.divCeil(u32, vector_type.len, 8) catch unreachable;
|
||||
const elem_bytes: u32 = @intCast((try Type.fromInterned(vector_type.child).abiSizeAdvanced(mod, strat)).scalar);
|
||||
@ -1527,15 +1532,19 @@ pub const Type = struct {
|
||||
// guaranteed to be >= that of bool's (1 byte) the added size is exactly equal
|
||||
// to the child type's ABI alignment.
|
||||
return AbiSizeAdvanced{
|
||||
.scalar = child_ty.abiAlignment(mod).toByteUnits(0) + payload_size,
|
||||
.scalar = (child_ty.abiAlignment(mod).toByteUnits() orelse 0) + payload_size,
|
||||
};
|
||||
}
|
||||
|
||||
fn intAbiSize(bits: u16, target: Target) u64 {
|
||||
pub fn ptrAbiAlignment(target: Target) Alignment {
|
||||
return Alignment.fromNonzeroByteUnits(@divExact(target.ptrBitWidth(), 8));
|
||||
}
|
||||
|
||||
pub fn intAbiSize(bits: u16, target: Target) u64 {
|
||||
return intAbiAlignment(bits, target).forward(@as(u16, @intCast((@as(u17, bits) + 7) / 8)));
|
||||
}
|
||||
|
||||
fn intAbiAlignment(bits: u16, target: Target) Alignment {
|
||||
pub fn intAbiAlignment(bits: u16, target: Target) Alignment {
|
||||
return Alignment.fromByteUnits(@min(
|
||||
std.math.ceilPowerOfTwoPromote(u16, @as(u16, @intCast((@as(u17, bits) + 7) / 8))),
|
||||
target.maxIntAlignment(),
|
||||
@ -1572,7 +1581,7 @@ pub const Type = struct {
|
||||
if (len == 0) return 0;
|
||||
const elem_ty = Type.fromInterned(array_type.child);
|
||||
const elem_size = @max(
|
||||
(try elem_ty.abiAlignmentAdvanced(mod, strat)).scalar.toByteUnits(0),
|
||||
(try elem_ty.abiAlignmentAdvanced(mod, strat)).scalar.toByteUnits() orelse 0,
|
||||
(try elem_ty.abiSizeAdvanced(mod, strat)).scalar,
|
||||
);
|
||||
if (elem_size == 0) return 0;
|
||||
@ -3016,26 +3025,15 @@ pub const Type = struct {
|
||||
}
|
||||
|
||||
/// Returns none in the case of a tuple which uses the integer index as the field name.
|
||||
pub fn structFieldName(ty: Type, field_index: u32, mod: *Module) InternPool.OptionalNullTerminatedString {
|
||||
pub fn structFieldName(ty: Type, index: usize, mod: *Module) InternPool.OptionalNullTerminatedString {
|
||||
const ip = &mod.intern_pool;
|
||||
return switch (ip.indexToKey(ty.toIntern())) {
|
||||
.struct_type => ip.loadStructType(ty.toIntern()).fieldName(ip, field_index),
|
||||
.anon_struct_type => |anon_struct| anon_struct.fieldName(ip, field_index),
|
||||
.struct_type => ip.loadStructType(ty.toIntern()).fieldName(ip, index),
|
||||
.anon_struct_type => |anon_struct| anon_struct.fieldName(ip, index),
|
||||
else => unreachable,
|
||||
};
|
||||
}
|
||||
|
||||
/// When struct types have no field names, the names are implicitly understood to be
|
||||
/// strings corresponding to the field indexes in declaration order. It used to be the
|
||||
/// case that a NullTerminatedString would be stored for each field in this case, however,
|
||||
/// now, callers must handle the possibility that there are no names stored at all.
|
||||
/// Here we fake the previous behavior. Probably something better could be done by examining
|
||||
/// all the callsites of this function.
|
||||
pub fn legacyStructFieldName(ty: Type, i: u32, mod: *Module) InternPool.NullTerminatedString {
|
||||
return ty.structFieldName(i, mod).unwrap() orelse
|
||||
mod.intern_pool.getOrPutStringFmt(mod.gpa, "{d}", .{i}) catch @panic("OOM");
|
||||
}
|
||||
|
||||
pub fn structFieldCount(ty: Type, mod: *Module) u32 {
|
||||
const ip = &mod.intern_pool;
|
||||
return switch (ip.indexToKey(ty.toIntern())) {
|
||||
|
||||
29
stage1/zig.h
29
stage1/zig.h
@ -130,22 +130,18 @@ typedef char bool;
|
||||
#define zig_restrict
|
||||
#endif
|
||||
|
||||
#if __STDC_VERSION__ >= 201112L
|
||||
#define zig_align(alignment) _Alignas(alignment)
|
||||
#elif zig_has_attribute(aligned)
|
||||
#define zig_align(alignment) __attribute__((aligned(alignment)))
|
||||
#elif _MSC_VER
|
||||
#define zig_align(alignment) __declspec(align(alignment))
|
||||
#else
|
||||
#define zig_align zig_align_unavailable
|
||||
#endif
|
||||
|
||||
#if zig_has_attribute(aligned)
|
||||
#define zig_under_align(alignment) __attribute__((aligned(alignment)))
|
||||
#elif _MSC_VER
|
||||
#define zig_under_align(alignment) zig_align(alignment)
|
||||
#define zig_under_align(alignment) __declspec(align(alignment))
|
||||
#else
|
||||
#define zig_align zig_align_unavailable
|
||||
#define zig_under_align zig_align_unavailable
|
||||
#endif
|
||||
|
||||
#if __STDC_VERSION__ >= 201112L
|
||||
#define zig_align(alignment) _Alignas(alignment)
|
||||
#else
|
||||
#define zig_align(alignment) zig_under_align(alignment)
|
||||
#endif
|
||||
|
||||
#if zig_has_attribute(aligned)
|
||||
@ -165,11 +161,14 @@ typedef char bool;
|
||||
#endif
|
||||
|
||||
#if zig_has_attribute(section)
|
||||
#define zig_linksection(name, def, ...) def __attribute__((section(name)))
|
||||
#define zig_linksection(name) __attribute__((section(name)))
|
||||
#define zig_linksection_fn zig_linksection
|
||||
#elif _MSC_VER
|
||||
#define zig_linksection(name, def, ...) __pragma(section(name, __VA_ARGS__)) __declspec(allocate(name)) def
|
||||
#define zig_linksection(name) __pragma(section(name, read, write)) __declspec(allocate(name))
|
||||
#define zig_linksection_fn(name) __pragma(section(name, read, execute)) __declspec(code_seg(name))
|
||||
#else
|
||||
#define zig_linksection(name, def, ...) zig_linksection_unavailable
|
||||
#define zig_linksection(name) zig_linksection_unavailable
|
||||
#define zig_linksection_fn zig_linksection
|
||||
#endif
|
||||
|
||||
#if zig_has_builtin(unreachable) || defined(zig_gnuc)
|
||||
|
||||
BIN
stage1/zig1.wasm
BIN
stage1/zig1.wasm
Binary file not shown.
@ -624,7 +624,6 @@ test "sub-aligned pointer field access" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest;
|
||||
|
||||
// Originally reported at https://github.com/ziglang/zig/issues/14904
|
||||
|
||||
@ -694,5 +693,5 @@ test "zero-bit fields in extern struct pad fields appropriately" {
|
||||
try expect(@intFromPtr(&s) % 2 == 0);
|
||||
try expect(@intFromPtr(&s.y) - @intFromPtr(&s.x) == 2);
|
||||
try expect(@intFromPtr(&s.y) == @intFromPtr(&s.a));
|
||||
try expect(@fieldParentPtr(S, "a", &s.a) == &s);
|
||||
try expect(@as(*S, @fieldParentPtr("a", &s.a)) == &s);
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1392,13 +1392,13 @@ test "fieldParentPtr of a zero-bit field" {
|
||||
{
|
||||
const a = A{ .u = 0 };
|
||||
const b_ptr = &a.b;
|
||||
const a_ptr = @fieldParentPtr(A, "b", b_ptr);
|
||||
const a_ptr: *const A = @fieldParentPtr("b", b_ptr);
|
||||
try std.testing.expectEqual(&a, a_ptr);
|
||||
}
|
||||
{
|
||||
var a = A{ .u = 0 };
|
||||
const b_ptr = &a.b;
|
||||
const a_ptr = @fieldParentPtr(A, "b", b_ptr);
|
||||
const a_ptr: *A = @fieldParentPtr("b", b_ptr);
|
||||
try std.testing.expectEqual(&a, a_ptr);
|
||||
}
|
||||
}
|
||||
@ -1406,17 +1406,17 @@ test "fieldParentPtr of a zero-bit field" {
|
||||
{
|
||||
const a = A{ .u = 0 };
|
||||
const c_ptr = &a.b.c;
|
||||
const b_ptr = @fieldParentPtr(@TypeOf(a.b), "c", c_ptr);
|
||||
const b_ptr: @TypeOf(&a.b) = @fieldParentPtr("c", c_ptr);
|
||||
try std.testing.expectEqual(&a.b, b_ptr);
|
||||
const a_ptr = @fieldParentPtr(A, "b", b_ptr);
|
||||
const a_ptr: *const A = @fieldParentPtr("b", b_ptr);
|
||||
try std.testing.expectEqual(&a, a_ptr);
|
||||
}
|
||||
{
|
||||
var a = A{ .u = 0 };
|
||||
const c_ptr = &a.b.c;
|
||||
const b_ptr = @fieldParentPtr(@TypeOf(a.b), "c", c_ptr);
|
||||
const b_ptr: @TypeOf(&a.b) = @fieldParentPtr("c", c_ptr);
|
||||
try std.testing.expectEqual(&a.b, b_ptr);
|
||||
const a_ptr = @fieldParentPtr(A, "b", b_ptr);
|
||||
const a_ptr: *const A = @fieldParentPtr("b", b_ptr);
|
||||
try std.testing.expectEqual(&a, a_ptr);
|
||||
}
|
||||
}
|
||||
|
||||
@ -222,7 +222,7 @@ test "fieldParentPtr of tuple" {
|
||||
var x: u32 = 0;
|
||||
_ = &x;
|
||||
const tuple = .{ x, x };
|
||||
try testing.expect(&tuple == @fieldParentPtr(@TypeOf(tuple), "1", &tuple[1]));
|
||||
try testing.expect(&tuple == @as(@TypeOf(&tuple), @fieldParentPtr("1", &tuple[1])));
|
||||
}
|
||||
|
||||
test "fieldParentPtr of anon struct" {
|
||||
@ -233,7 +233,7 @@ test "fieldParentPtr of anon struct" {
|
||||
var x: u32 = 0;
|
||||
_ = &x;
|
||||
const anon_st = .{ .foo = x, .bar = x };
|
||||
try testing.expect(&anon_st == @fieldParentPtr(@TypeOf(anon_st), "bar", &anon_st.bar));
|
||||
try testing.expect(&anon_st == @as(@TypeOf(&anon_st), @fieldParentPtr("bar", &anon_st.bar)));
|
||||
}
|
||||
|
||||
test "offsetOf tuple" {
|
||||
|
||||
@ -1176,18 +1176,22 @@ test "@shlWithOverflow" {
|
||||
test "alignment of vectors" {
|
||||
try expect(@alignOf(@Vector(2, u8)) == switch (builtin.zig_backend) {
|
||||
else => 2,
|
||||
.stage2_c => @alignOf(u8),
|
||||
.stage2_x86_64 => 16,
|
||||
});
|
||||
try expect(@alignOf(@Vector(2, u1)) == switch (builtin.zig_backend) {
|
||||
else => 1,
|
||||
.stage2_c => @alignOf(u1),
|
||||
.stage2_x86_64 => 16,
|
||||
});
|
||||
try expect(@alignOf(@Vector(1, u1)) == switch (builtin.zig_backend) {
|
||||
else => 1,
|
||||
.stage2_c => @alignOf(u1),
|
||||
.stage2_x86_64 => 16,
|
||||
});
|
||||
try expect(@alignOf(@Vector(2, u16)) == switch (builtin.zig_backend) {
|
||||
else => 4,
|
||||
.stage2_c => @alignOf(u16),
|
||||
.stage2_x86_64 => 16,
|
||||
});
|
||||
}
|
||||
|
||||
@ -2,12 +2,12 @@ const Foo = extern struct {
|
||||
derp: i32,
|
||||
};
|
||||
export fn foo(a: *i32) *Foo {
|
||||
return @fieldParentPtr(Foo, "a", a);
|
||||
return @fieldParentPtr("a", a);
|
||||
}
|
||||
|
||||
// error
|
||||
// backend=stage2
|
||||
// target=native
|
||||
//
|
||||
// :5:33: error: no field named 'a' in struct 'tmp.Foo'
|
||||
// :5:28: error: no field named 'a' in struct 'tmp.Foo'
|
||||
// :1:20: note: struct declared here
|
||||
|
||||
@ -9,7 +9,7 @@ const foo = Foo{
|
||||
|
||||
comptime {
|
||||
const field_ptr: *i32 = @ptrFromInt(0x1234);
|
||||
const another_foo_ptr = @fieldParentPtr(Foo, "b", field_ptr);
|
||||
const another_foo_ptr: *const Foo = @fieldParentPtr("b", field_ptr);
|
||||
_ = another_foo_ptr;
|
||||
}
|
||||
|
||||
@ -17,4 +17,4 @@ comptime {
|
||||
// backend=stage2
|
||||
// target=native
|
||||
//
|
||||
// :12:55: error: pointer value not based on parent struct
|
||||
// :12:62: error: pointer value not based on parent struct
|
||||
|
||||
@ -8,7 +8,7 @@ const foo = Foo{
|
||||
};
|
||||
|
||||
comptime {
|
||||
const another_foo_ptr = @fieldParentPtr(Foo, "b", &foo.a);
|
||||
const another_foo_ptr: *const Foo = @fieldParentPtr("b", &foo.a);
|
||||
_ = another_foo_ptr;
|
||||
}
|
||||
|
||||
@ -16,5 +16,5 @@ comptime {
|
||||
// backend=stage2
|
||||
// target=native
|
||||
//
|
||||
// :11:29: error: field 'b' has index '1' but pointer value is index '0' of struct 'tmp.Foo'
|
||||
// :11:41: error: field 'b' has index '1' but pointer value is index '0' of struct 'tmp.Foo'
|
||||
// :1:13: note: struct declared here
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
const Foo = extern struct {
|
||||
a: i32,
|
||||
};
|
||||
export fn foo(a: i32) *Foo {
|
||||
return @fieldParentPtr(Foo, "a", a);
|
||||
export fn foo(a: i32) *const Foo {
|
||||
return @fieldParentPtr("a", a);
|
||||
}
|
||||
|
||||
// error
|
||||
// backend=stage2
|
||||
// target=native
|
||||
//
|
||||
// :5:38: error: expected pointer type, found 'i32'
|
||||
// :5:33: error: expected pointer type, found 'i32'
|
||||
|
||||
10
test/cases/compile_errors/fieldParentPtr-non_pointer.zig
Normal file
10
test/cases/compile_errors/fieldParentPtr-non_pointer.zig
Normal file
@ -0,0 +1,10 @@
|
||||
const Foo = i32;
|
||||
export fn foo(a: *i32) Foo {
|
||||
return @fieldParentPtr("a", a);
|
||||
}
|
||||
|
||||
// error
|
||||
// backend=llvm
|
||||
// target=native
|
||||
//
|
||||
// :3:12: error: expected pointer type, found 'i32'
|
||||
@ -1,10 +0,0 @@
|
||||
const Foo = i32;
|
||||
export fn foo(a: *i32) *Foo {
|
||||
return @fieldParentPtr(Foo, "a", a);
|
||||
}
|
||||
|
||||
// error
|
||||
// backend=llvm
|
||||
// target=native
|
||||
//
|
||||
// :3:28: error: expected struct or union type, found 'i32'
|
||||
@ -5,7 +5,7 @@ pub export fn entry1() void {
|
||||
@offsetOf(T, "a");
|
||||
}
|
||||
pub export fn entry2() void {
|
||||
@fieldParentPtr(T, "a", undefined);
|
||||
@as(*T, @fieldParentPtr("a", undefined));
|
||||
}
|
||||
|
||||
// error
|
||||
@ -13,4 +13,4 @@ pub export fn entry2() void {
|
||||
// target=native
|
||||
//
|
||||
// :5:5: error: no offset available for comptime field
|
||||
// :8:5: error: cannot get @fieldParentPtr of a comptime field
|
||||
// :8:29: error: cannot get @fieldParentPtr of a comptime field
|
||||
|
||||
@ -8,7 +8,7 @@ export fn entry() u32 {
|
||||
// backend=stage2
|
||||
// target=native
|
||||
//
|
||||
// :3:23: error: cast increases pointer alignment
|
||||
// :3:23: error: @ptrCast increases pointer alignment
|
||||
// :3:32: note: '*u8' has alignment '1'
|
||||
// :3:23: note: '*u32' has alignment '4'
|
||||
// :3:23: note: use @alignCast to assert pointer alignment
|
||||
|
||||
13
test/cases/compile_errors/invalid_bit_pointer.zig
Normal file
13
test/cases/compile_errors/invalid_bit_pointer.zig
Normal file
@ -0,0 +1,13 @@
|
||||
comptime {
|
||||
_ = *align(1:32:4) u8;
|
||||
}
|
||||
comptime {
|
||||
_ = *align(1:25:4) u8;
|
||||
}
|
||||
|
||||
// error
|
||||
// backend=stage2
|
||||
// target=native
|
||||
//
|
||||
// :2:18: error: packed type 'u8' at bit offset 32 starts 0 bits after the end of a 4 byte host integer
|
||||
// :5:18: error: packed type 'u8' at bit offset 25 ends 1 bits after the end of a 4 byte host integer
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user