AstGen: implement alignment on locals

This commit is contained in:
Andrew Kelley 2021-04-21 22:43:57 -07:00
parent ea00ddfe37
commit 389020009a
4 changed files with 139 additions and 16 deletions

View File

@ -1902,9 +1902,6 @@ fn varDecl(
) InnerError!*Scope {
try emitDbgNode(gz, node);
const astgen = gz.astgen;
if (var_decl.ast.align_node != 0) {
return astgen.failNode(var_decl.ast.align_node, "TODO implement alignment on locals", .{});
}
const gpa = astgen.gpa;
const tree = &astgen.file.tree;
const token_tags = tree.tokens.items(.tag);
@ -1919,12 +1916,12 @@ fn varDecl(
.local_val => {
const local_val = s.cast(Scope.LocalVal).?;
if (mem.eql(u8, local_val.name, ident_name)) {
return astgen.failTokNotes(name_token, "redefinition of '{s}'", .{
return astgen.failTokNotes(name_token, "redeclaration of '{s}'", .{
ident_name,
}, &[_]u32{
try astgen.errNoteTok(
local_val.token_src,
"previous definition is here",
"previous declaration is here",
.{},
),
});
@ -1934,12 +1931,12 @@ fn varDecl(
.local_ptr => {
const local_ptr = s.cast(Scope.LocalPtr).?;
if (mem.eql(u8, local_ptr.name, ident_name)) {
return astgen.failTokNotes(name_token, "redefinition of '{s}'", .{
return astgen.failTokNotes(name_token, "redeclaration of '{s}'", .{
ident_name,
}, &[_]u32{
try astgen.errNoteTok(
local_ptr.token_src,
"previous definition is here",
"previous declaration is here",
.{},
),
});
@ -1957,15 +1954,21 @@ fn varDecl(
return astgen.failNode(node, "variables must be initialized", .{});
}
const align_inst: Zir.Inst.Ref = if (var_decl.ast.align_node != 0)
try expr(gz, scope, align_rl, var_decl.ast.align_node)
else
.none;
switch (token_tags[var_decl.ast.mut_token]) {
.keyword_const => {
if (var_decl.comptime_token) |comptime_token| {
return astgen.failTok(comptime_token, "'comptime const' is redundant; instead wrap the initialization expression with 'comptime'", .{});
}
// Depending on the type of AST the initialization expression is, we may need an lvalue
// or an rvalue as a result location. If it is an rvalue, we can use the instruction as
// the variable, no memory location needed.
if (!nodeMayNeedMemoryLocation(tree, var_decl.ast.init_node)) {
if (align_inst == .none and !nodeMayNeedMemoryLocation(tree, var_decl.ast.init_node)) {
const result_loc: ResultLoc = if (var_decl.ast.type_node != 0) .{
.ty = try typeExpr(gz, scope, var_decl.ast.type_node),
} else .none;
@ -1997,10 +2000,29 @@ fn varDecl(
if (var_decl.ast.type_node != 0) {
const type_inst = try typeExpr(gz, &init_scope.base, var_decl.ast.type_node);
opt_type_inst = type_inst;
init_scope.rl_ptr = try init_scope.addUnNode(.alloc, type_inst, node);
if (align_inst == .none) {
init_scope.rl_ptr = try init_scope.addUnNode(.alloc, type_inst, node);
} else {
init_scope.rl_ptr = try gz.addAllocExtended(.{
.node = node,
.type_inst = type_inst,
.align_inst = align_inst,
.is_const = true,
.is_comptime = false,
});
}
init_scope.rl_ty_inst = type_inst;
} else {
const alloc = try init_scope.addNode(.alloc_inferred, node);
const alloc = if (align_inst == .none)
try init_scope.addNode(.alloc_inferred, node)
else
try gz.addAllocExtended(.{
.node = node,
.type_inst = .none,
.align_inst = align_inst,
.is_const = true,
.is_comptime = false,
});
resolve_inferred_alloc = alloc;
init_scope.rl_ptr = alloc;
}
@ -2010,7 +2032,7 @@ fn varDecl(
const zir_datas = astgen.instructions.items(.data);
const parent_zir = &gz.instructions;
if (init_scope.rvalue_rl_count == 1) {
if (align_inst == .none and init_scope.rvalue_rl_count == 1) {
// Result location pointer not used. We don't need an alloc for this
// const local, and type inference becomes trivial.
// Move the init_scope instructions into the parent scope, eliding
@ -2070,12 +2092,36 @@ fn varDecl(
alloc: Zir.Inst.Ref,
} = if (var_decl.ast.type_node != 0) a: {
const type_inst = try typeExpr(gz, scope, var_decl.ast.type_node);
const tag: Zir.Inst.Tag = if (is_comptime) .alloc_comptime else .alloc_mut;
const alloc = try gz.addUnNode(tag, type_inst, node);
const alloc = alloc: {
if (align_inst == .none) {
const tag: Zir.Inst.Tag = if (is_comptime) .alloc_comptime else .alloc_mut;
break :alloc try gz.addUnNode(tag, type_inst, node);
} else {
break :alloc try gz.addAllocExtended(.{
.node = node,
.type_inst = type_inst,
.align_inst = align_inst,
.is_const = false,
.is_comptime = is_comptime,
});
}
};
break :a .{ .alloc = alloc, .result_loc = .{ .ptr = alloc } };
} else a: {
const tag: Zir.Inst.Tag = if (is_comptime) .alloc_inferred_comptime else .alloc_inferred_mut;
const alloc = try gz.addNode(tag, node);
const alloc = alloc: {
if (align_inst == .none) {
const tag: Zir.Inst.Tag = if (is_comptime) .alloc_inferred_comptime else .alloc_inferred_mut;
break :alloc try gz.addNode(tag, node);
} else {
break :alloc try gz.addAllocExtended(.{
.node = node,
.type_inst = .none,
.align_inst = align_inst,
.is_const = false,
.is_comptime = is_comptime,
});
}
};
resolve_inferred_alloc = alloc;
break :a .{ .alloc = alloc, .result_loc = .{ .inferred_ptr = alloc } };
};

View File

@ -1630,6 +1630,57 @@ pub const Scope = struct {
});
}
pub fn addAllocExtended(
gz: *GenZir,
args: struct {
/// Absolute node index. This function does the conversion to offset from Decl.
node: ast.Node.Index,
type_inst: Zir.Inst.Ref,
align_inst: Zir.Inst.Ref,
is_const: bool,
is_comptime: bool,
},
) !Zir.Inst.Ref {
const astgen = gz.astgen;
const gpa = astgen.gpa;
try gz.instructions.ensureUnusedCapacity(gpa, 1);
try astgen.instructions.ensureUnusedCapacity(gpa, 1);
try astgen.extra.ensureUnusedCapacity(
gpa,
@typeInfo(Zir.Inst.AllocExtended).Struct.fields.len +
@as(usize, @boolToInt(args.type_inst != .none)) +
@as(usize, @boolToInt(args.align_inst != .none)),
);
const payload_index = gz.astgen.addExtra(Zir.Inst.AllocExtended{
.src_node = gz.nodeIndexToRelative(args.node),
}) catch unreachable; // ensureUnusedCapacity above
if (args.type_inst != .none) {
astgen.extra.appendAssumeCapacity(@enumToInt(args.type_inst));
}
if (args.align_inst != .none) {
astgen.extra.appendAssumeCapacity(@enumToInt(args.align_inst));
}
const has_type: u4 = @boolToInt(args.type_inst != .none);
const has_align: u4 = @boolToInt(args.align_inst != .none);
const is_const: u4 = @boolToInt(args.is_const);
const is_comptime: u4 = @boolToInt(args.is_comptime);
const small: u16 = has_type | (has_align << 1) | (is_const << 2) | (is_comptime << 3);
const new_index = @intCast(Zir.Inst.Index, astgen.instructions.len);
astgen.instructions.appendAssumeCapacity(.{
.tag = .extended,
.data = .{ .extended = .{
.opcode = .alloc,
.small = small,
.operand = payload_index,
} },
});
gz.instructions.appendAssumeCapacity(new_index);
return gz.indexToRef(new_index);
}
/// Asserts that `str` is 8 or fewer bytes.
pub fn addSmallStr(
gz: *GenZir,

View File

@ -516,6 +516,7 @@ fn zirExtended(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerErro
.error_return_trace => return sema.zirErrorReturnTrace(block, extended),
.frame => return sema.zirFrame( block, extended),
.frame_address => return sema.zirFrameAddress( block, extended),
.alloc => return sema.zirAllocExtended( block, extended),
.c_undef => return sema.zirCUndef( block, extended),
.c_include => return sema.zirCInclude( block, extended),
.c_define => return sema.zirCDefine( block, extended),
@ -1131,6 +1132,16 @@ fn zirIndexablePtrLen(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) In
return sema.analyzeLoad(block, src, result_ptr, result_ptr.src);
}
fn zirAllocExtended(
sema: *Sema,
block: *Scope.Block,
extended: Zir.Inst.Extended.InstData,
) InnerError!*Inst {
const extra = sema.code.extraData(Zir.Inst.AllocExtended, extended.operand);
const src: LazySrcLoc = .{ .node_offset = extra.data.src_node };
return sema.mod.fail(&block.base, src, "TODO implement Sema.zirAllocExtended", .{});
}
fn zirAllocComptime(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!*Inst {
const inst_data = sema.code.instructions.items(.data)[inst].un_node;
const src = inst_data.src();

View File

@ -926,7 +926,6 @@ pub const Inst = struct {
/// Allocates stack local memory.
/// Uses the `un_node` union field. The operand is the type of the allocated object.
/// The node source location points to a var decl node.
/// Indicates the beginning of a new statement in debug info.
alloc,
/// Same as `alloc` except mutable.
alloc_mut,
@ -1251,6 +1250,14 @@ pub const Inst = struct {
/// Implements the `@frameAddress` builtin.
/// `operand` is `src_node: i32`.
frame_address,
/// Same as `alloc` from `Tag` but may contain an alignment instruction.
/// `operand` is payload index to `AllocExtended`.
/// `small`:
/// * 0b000X - has type
/// * 0b00X0 - has alignment
/// * 0b0X00 - 1=const, 0=var
/// * 0bX000 - is comptime
alloc,
/// `operand` is payload index to `UnNode`.
c_undef,
/// `operand` is payload index to `UnNode`.
@ -2200,6 +2207,13 @@ pub const Inst = struct {
args: Ref,
};
/// Trailing:
/// 0. type_inst: Ref, // if small 0b000X is set
/// 1. align_inst: Ref, // if small 0b00X0 is set
pub const AllocExtended = struct {
src_node: i32,
};
/// Trailing: `CompileErrors.Item` for each `items_len`.
pub const CompileErrors = struct {
items_len: u32,
@ -2571,6 +2585,7 @@ const Writer = struct {
=> try self.writeExtNode(stream, extended),
.func,
.alloc,
.c_undef,
.c_include,
.c_define,