stage2: update ZIR for generic functions

ZIR encoding for functions is changed in preparation for generic
function support. As an example:

```zig
const std = @import("std");
const expect = std.testing.expect;

test "example" {
    var x: usize = 0;
    x += checkSize(i32, 1);
    x += checkSize(bool, true);
    try expect(x == 5);
}

fn checkSize(comptime T: type, x: T) usize {
    _ = x;
    return @sizeOf(T);
}
```

Previous ZIR for the `checkSize` function:

```zir
  [165] checkSize line(10) hash(0226f62e189fd0b1c5fca02cf4617562): %55 = block_inline({
    %56 = decl_val("T") token_offset:11:35
    %57 = as_node(@Ref.type_type, %56) node_offset:11:35
    %69 = extended(func([comptime @Ref.type_type, %57], @Ref.usize_type, {
      %58 = arg("T") token_offset:11:23
      %59 = as_node(@Ref.type_type, %58) node_offset:11:35
      %60 = arg("x") token_offset:11:32
      %61 = dbg_stmt(11, 4)
```

ZIR for the `checkSize` function after this commit:

```zir
  [157] checkSize line(10) hash(0226f62e189fd0b1c5fca02cf4617562): %55 = block_inline({
    %56 = param_comptime("T", @Ref.type_type) token_offset:11:23
    %57 = as_node(@Ref.type_type, %56) node_offset:11:35
    %58 = param("x", %57) token_offset:11:32
    %67 = func(@Ref.usize_type, {
      %59 = dbg_stmt(11, 4)
```

Noted differences:
 * Previously the type expression was redundantly repeated.
 * Previously the parameter names were redundantly stored in the ZIR
   extra array.
 * Instead of `arg` ZIR instructions as the first instructions within a
   function body, they are now outside the function body, in the same
   block as the `func` instruction. There are variants:
   - param
   - param_comptime
   - param_anytype
   - param_anytype_comptime
 * The param instructions additionally encode the type.
 * Because of the param instructions, the `func` instruction no longer
   encodes the list of parameter types or the comptime bits.

It's implied that Sema will collect the parameters so that when a `func`
instruction is encountered, they will be implicitly used to construct
the function's type. This is so that we can satisfy all 3 ways of
performing semantic analysis on a function:

 1. runtime: Sema will insert AIR arg instructions for each parameter,
    and insert into the Sema inst_map ZIR param => AIR arg.

 2. comptime/inline: Sema will insert into the inst_map ZIR param =>
    callsite arguments.

 3. generic: Sema will map *only the comptime* ZIR param instructions to
    the AIR instructions for the comptime arguments at the callsite, and
    then re-run Sema for the function's Decl. This will produce a new
    function which is the monomorphized function.

Additionally:

 * AstGen: Update usage of deprecated `ensureCapacity` to
   `ensureUnusedCapacity` or `ensureTotalCapacity`.
 * Introduce `Type.fnInfo` for getting a bunch of data about a function
   type at once, and use it in `analyzeCall`.

This commit starts a branch to implement generic functions in stage2.
Test regressions have not been addressed yet.
This commit is contained in:
Andrew Kelley 2021-08-02 20:35:55 -07:00
parent b465037a65
commit 1472dc3ddb
6 changed files with 431 additions and 378 deletions

9
BRANCH_TODO Normal file
View File

@ -0,0 +1,9 @@
* update arg instructions:
- runtime function call inserts AIR arg instructions and Sema map items for them
- comptime/inline function call inserts Sema map items for the args
- generic instantiation inserts Sema map items for the comptime args only, re-runs the
Decl ZIR to get the new Fn.
* generic function call where it makes a new function
* memoize the instantiation in a table
* anytype with next parameter expression using it
* comptime anytype

View File

@ -42,7 +42,7 @@ const InnerError = error{ OutOfMemory, AnalysisFail };
fn addExtra(astgen: *AstGen, extra: anytype) Allocator.Error!u32 {
const fields = std.meta.fields(@TypeOf(extra));
try astgen.extra.ensureCapacity(astgen.gpa, astgen.extra.items.len + fields.len);
try astgen.extra.ensureUnusedCapacity(astgen.gpa, fields.len);
return addExtraAssumeCapacity(astgen, extra);
}
@ -259,6 +259,7 @@ pub const ResultLoc = union(enum) {
pub const align_rl: ResultLoc = .{ .ty = .u16_type };
pub const bool_rl: ResultLoc = .{ .ty = .bool_type };
pub const type_rl: ResultLoc = .{ .ty = .type_type };
fn typeExpr(gz: *GenZir, scope: *Scope, type_node: ast.Node.Index) InnerError!Zir.Inst.Ref {
const prev_force_comptime = gz.force_comptime;
@ -1036,7 +1037,6 @@ fn fnProtoExpr(
fn_proto: ast.full.FnProto,
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const gpa = astgen.gpa;
const tree = astgen.tree;
const token_tags = tree.tokens.items(.tag);
@ -1046,71 +1046,53 @@ fn fnProtoExpr(
};
assert(!is_extern);
// The AST params array does not contain anytype and ... parameters.
// We must iterate to count how many param types to allocate.
const param_count = blk: {
var count: usize = 0;
var it = fn_proto.iterate(tree.*);
while (it.next()) |param| {
if (param.anytype_ellipsis3) |token| switch (token_tags[token]) {
.ellipsis3 => break,
.keyword_anytype => {},
else => unreachable,
};
count += 1;
}
break :blk count;
};
const param_types = try gpa.alloc(Zir.Inst.Ref, param_count);
defer gpa.free(param_types);
const bits_per_param = 1;
const params_per_u32 = 32 / bits_per_param;
// We only need this if there are greater than params_per_u32 fields.
var bit_bag = ArrayListUnmanaged(u32){};
defer bit_bag.deinit(gpa);
var cur_bit_bag: u32 = 0;
var is_var_args = false;
{
const is_var_args = is_var_args: {
var param_type_i: usize = 0;
var it = fn_proto.iterate(tree.*);
while (it.next()) |param| : (param_type_i += 1) {
if (param_type_i % params_per_u32 == 0 and param_type_i != 0) {
try bit_bag.append(gpa, cur_bit_bag);
cur_bit_bag = 0;
}
const is_comptime = if (param.comptime_noalias) |token|
token_tags[token] == .keyword_comptime
else
false;
cur_bit_bag = (cur_bit_bag >> bits_per_param) |
(@as(u32, @boolToInt(is_comptime)) << 31);
if (param.anytype_ellipsis3) |token| {
const is_anytype = if (param.anytype_ellipsis3) |token| blk: {
switch (token_tags[token]) {
.keyword_anytype => {
param_types[param_type_i] = .none;
continue;
},
.ellipsis3 => {
is_var_args = true;
break;
},
.keyword_anytype => break :blk true,
.ellipsis3 => break :is_var_args true,
else => unreachable,
}
}
const param_type_node = param.type_expr;
assert(param_type_node != 0);
param_types[param_type_i] =
try expr(gz, scope, .{ .ty = .type_type }, param_type_node);
}
assert(param_type_i == param_count);
} else false;
const empty_slot_count = params_per_u32 - (param_type_i % params_per_u32);
if (empty_slot_count < params_per_u32) {
cur_bit_bag >>= @intCast(u5, empty_slot_count * bits_per_param);
const param_name: u32 = if (param.name_token) |name_token| blk: {
if (mem.eql(u8, "_", tree.tokenSlice(name_token)))
break :blk 0;
break :blk try astgen.identAsString(name_token);
} else 0;
if (is_anytype) {
const name_token = param.name_token orelse param.anytype_ellipsis3.?;
const tag: Zir.Inst.Tag = if (is_comptime)
.param_anytype_comptime
else
.param_anytype;
_ = try gz.addStrTok(tag, param_name, name_token);
} else {
const param_type_node = param.type_expr;
assert(param_type_node != 0);
const param_type = try expr(gz, scope, type_rl, param_type_node);
const main_tokens = tree.nodes.items(.main_token);
const name_token = param.name_token orelse main_tokens[param_type_node];
const tag: Zir.Inst.Tag = if (is_comptime) .param_comptime else .param;
_ = try gz.addPlTok(tag, name_token, Zir.Inst.Param{
.name = param_name,
.ty = param_type,
});
}
}
}
break :is_var_args false;
};
const align_inst: Zir.Inst.Ref = if (fn_proto.ast.align_expr == 0) .none else inst: {
break :inst try expr(gz, scope, align_rl, fn_proto.ast.align_expr);
@ -1144,7 +1126,6 @@ fn fnProtoExpr(
const result = try gz.addFunc(.{
.src_node = fn_proto.ast.proto_node,
.ret_ty = return_type_inst,
.param_types = param_types,
.body = &[0]Zir.Inst.Index{},
.cc = cc,
.align_inst = align_inst,
@ -1153,8 +1134,6 @@ fn fnProtoExpr(
.is_inferred_error = false,
.is_test = false,
.is_extern = false,
.cur_bit_bag = cur_bit_bag,
.bit_bag = bit_bag.items,
});
return rvalue(gz, rl, result, fn_proto.ast.proto_node);
}
@ -1447,8 +1426,8 @@ fn structInitExprRlNone(
const init_inst = try gz.addPlNode(tag, node, Zir.Inst.StructInitAnon{
.fields_len = @intCast(u32, fields_list.len),
});
try astgen.extra.ensureCapacity(gpa, astgen.extra.items.len +
fields_list.len * @typeInfo(Zir.Inst.StructInitAnon.Item).Struct.fields.len);
try astgen.extra.ensureUnusedCapacity(gpa, fields_list.len *
@typeInfo(Zir.Inst.StructInitAnon.Item).Struct.fields.len);
for (fields_list) |field| {
_ = gz.astgen.addExtraAssumeCapacity(field);
}
@ -1520,8 +1499,8 @@ fn structInitExprRlTy(
const init_inst = try gz.addPlNode(tag, node, Zir.Inst.StructInit{
.fields_len = @intCast(u32, fields_list.len),
});
try astgen.extra.ensureCapacity(gpa, astgen.extra.items.len +
fields_list.len * @typeInfo(Zir.Inst.StructInit.Item).Struct.fields.len);
try astgen.extra.ensureUnusedCapacity(gpa, fields_list.len *
@typeInfo(Zir.Inst.StructInit.Item).Struct.fields.len);
for (fields_list) |field| {
_ = gz.astgen.addExtraAssumeCapacity(field);
}
@ -1918,7 +1897,10 @@ fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: ast.Node.Index) Inner
// ZIR instructions that might be a type other than `noreturn` or `void`.
.add,
.addwrap,
.arg,
.param,
.param_comptime,
.param_anytype,
.param_anytype_comptime,
.alloc,
.alloc_mut,
.alloc_comptime,
@ -2488,7 +2470,7 @@ fn varDecl(
// Move the init_scope instructions into the parent scope, swapping
// store_to_block_ptr for store_to_inferred_ptr.
const expected_len = parent_zir.items.len + init_scope.instructions.items.len;
try parent_zir.ensureCapacity(gpa, expected_len);
try parent_zir.ensureTotalCapacity(gpa, expected_len);
for (init_scope.instructions.items) |src_inst| {
if (zir_tags[src_inst] == .store_to_block_ptr) {
if (zir_datas[src_inst].bin.lhs == init_scope.rl_ptr) {
@ -2750,10 +2732,10 @@ fn ptrType(
}
const gpa = gz.astgen.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
try gz.astgen.extra.ensureCapacity(gpa, gz.astgen.extra.items.len +
@typeInfo(Zir.Inst.PtrType).Struct.fields.len + trailing_count);
try gz.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.PtrType).Struct.fields.len +
trailing_count);
const payload_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.PtrType{ .elem_type = elem_type });
if (sentinel_ref != .none) {
@ -2899,6 +2881,16 @@ fn fnDecl(
};
defer decl_gz.instructions.deinit(gpa);
var fn_gz: GenZir = .{
.force_comptime = false,
.in_defer = false,
.decl_node_index = fn_proto.ast.proto_node,
.decl_line = decl_gz.decl_line,
.parent = &decl_gz.base,
.astgen = astgen,
};
defer fn_gz.instructions.deinit(gpa);
// TODO: support noinline
const is_pub = fn_proto.visib_token != null;
const is_export = blk: {
@ -2922,71 +2914,76 @@ fn fnDecl(
try wip_decls.next(gpa, is_pub, is_export, align_inst != .none, section_inst != .none);
// The AST params array does not contain anytype and ... parameters.
// We must iterate to count how many param types to allocate.
const param_count = blk: {
var count: usize = 0;
var it = fn_proto.iterate(tree.*);
while (it.next()) |param| {
if (param.anytype_ellipsis3) |token| switch (token_tags[token]) {
.ellipsis3 => break,
.keyword_anytype => {},
else => unreachable,
};
count += 1;
}
break :blk count;
};
const param_types = try gpa.alloc(Zir.Inst.Ref, param_count);
defer gpa.free(param_types);
const bits_per_param = 1;
const params_per_u32 = 32 / bits_per_param;
// We only need this if there are greater than params_per_u32 fields.
var bit_bag = ArrayListUnmanaged(u32){};
defer bit_bag.deinit(gpa);
var cur_bit_bag: u32 = 0;
var is_var_args = false;
{
var params_scope = &fn_gz.base;
const is_var_args = is_var_args: {
var param_type_i: usize = 0;
var it = fn_proto.iterate(tree.*);
while (it.next()) |param| : (param_type_i += 1) {
if (param_type_i % params_per_u32 == 0 and param_type_i != 0) {
try bit_bag.append(gpa, cur_bit_bag);
cur_bit_bag = 0;
}
const is_comptime = if (param.comptime_noalias) |token|
token_tags[token] == .keyword_comptime
else
false;
cur_bit_bag = (cur_bit_bag >> bits_per_param) |
(@as(u32, @boolToInt(is_comptime)) << 31);
if (param.anytype_ellipsis3) |token| {
const is_anytype = if (param.anytype_ellipsis3) |token| blk: {
switch (token_tags[token]) {
.keyword_anytype => {
param_types[param_type_i] = .none;
continue;
},
.ellipsis3 => {
is_var_args = true;
break;
},
.keyword_anytype => break :blk true,
.ellipsis3 => break :is_var_args true,
else => unreachable,
}
}
const param_type_node = param.type_expr;
assert(param_type_node != 0);
param_types[param_type_i] =
try expr(&decl_gz, &decl_gz.base, .{ .ty = .type_type }, param_type_node);
}
assert(param_type_i == param_count);
} else false;
const empty_slot_count = params_per_u32 - (param_type_i % params_per_u32);
if (empty_slot_count < params_per_u32) {
cur_bit_bag >>= @intCast(u5, empty_slot_count * bits_per_param);
const param_name: u32 = if (param.name_token) |name_token| blk: {
if (mem.eql(u8, "_", tree.tokenSlice(name_token)))
break :blk 0;
const param_name = try astgen.identAsString(name_token);
if (!is_extern) {
try astgen.detectLocalShadowing(params_scope, param_name, name_token);
}
break :blk param_name;
} else if (!is_extern) {
if (param.anytype_ellipsis3) |tok| {
return astgen.failTok(tok, "missing parameter name", .{});
} else {
return astgen.failNode(param.type_expr, "missing parameter name", .{});
}
} else 0;
const param_inst = if (is_anytype) param: {
const name_token = param.name_token orelse param.anytype_ellipsis3.?;
const tag: Zir.Inst.Tag = if (is_comptime)
.param_anytype_comptime
else
.param_anytype;
break :param try decl_gz.addStrTok(tag, param_name, name_token);
} else param: {
const param_type_node = param.type_expr;
assert(param_type_node != 0);
const param_type = try expr(&decl_gz, params_scope, type_rl, param_type_node);
const main_tokens = tree.nodes.items(.main_token);
const name_token = param.name_token orelse main_tokens[param_type_node];
const tag: Zir.Inst.Tag = if (is_comptime) .param_comptime else .param;
break :param try decl_gz.addPlTok(tag, name_token, Zir.Inst.Param{
.name = param_name,
.ty = param_type,
});
};
if (param_name == 0) continue;
const sub_scope = try astgen.arena.create(Scope.LocalVal);
sub_scope.* = .{
.parent = params_scope,
.gen_zir = &decl_gz,
.name = param_name,
.inst = param_inst,
.token_src = param.name_token.?,
.id_cat = .@"function parameter",
};
params_scope = &sub_scope.base;
}
}
break :is_var_args false;
};
const lib_name: u32 = if (fn_proto.lib_name) |lib_name_token| blk: {
const lib_name_str = try astgen.strLitAsString(lib_name_token);
@ -2998,7 +2995,7 @@ fn fnDecl(
const return_type_inst = try AstGen.expr(
&decl_gz,
&decl_gz.base,
params_scope,
.{ .ty = .type_type },
fn_proto.ast.return_type,
);
@ -3014,7 +3011,7 @@ fn fnDecl(
}
break :blk try AstGen.expr(
&decl_gz,
&decl_gz.base,
params_scope,
.{ .ty = .calling_convention_type },
fn_proto.ast.callconv_expr,
);
@ -3038,7 +3035,6 @@ fn fnDecl(
break :func try decl_gz.addFunc(.{
.src_node = decl_node,
.ret_ty = return_type_inst,
.param_types = param_types,
.body = &[0]Zir.Inst.Index{},
.cc = cc,
.align_inst = .none, // passed in the per-decl data
@ -3047,75 +3043,18 @@ fn fnDecl(
.is_inferred_error = false,
.is_test = false,
.is_extern = true,
.cur_bit_bag = cur_bit_bag,
.bit_bag = bit_bag.items,
});
} else func: {
if (is_var_args) {
return astgen.failTok(fn_proto.ast.fn_token, "non-extern function is variadic", .{});
}
var fn_gz: GenZir = .{
.force_comptime = false,
.in_defer = false,
.decl_node_index = fn_proto.ast.proto_node,
.decl_line = decl_gz.decl_line,
.parent = &decl_gz.base,
.astgen = astgen,
};
defer fn_gz.instructions.deinit(gpa);
const prev_fn_block = astgen.fn_block;
astgen.fn_block = &fn_gz;
defer astgen.fn_block = prev_fn_block;
// Iterate over the parameters. We put the param names as the first N
// items inside `extra` so that debug info later can refer to the parameter names
// even while the respective source code is unloaded.
try astgen.extra.ensureUnusedCapacity(gpa, param_count);
{
var params_scope = &fn_gz.base;
var i: usize = 0;
var it = fn_proto.iterate(tree.*);
while (it.next()) |param| : (i += 1) {
const name_token = param.name_token orelse {
if (param.anytype_ellipsis3) |tok| {
return astgen.failTok(tok, "missing parameter name", .{});
} else {
return astgen.failNode(param.type_expr, "missing parameter name", .{});
}
};
if (param.type_expr != 0)
_ = try typeExpr(&fn_gz, params_scope, param.type_expr);
if (mem.eql(u8, "_", tree.tokenSlice(name_token)))
continue;
const param_name = try astgen.identAsString(name_token);
// Create an arg instruction. This is needed to emit a semantic analysis
// error for shadowing decls.
try astgen.detectLocalShadowing(params_scope, param_name, name_token);
const arg_inst = try fn_gz.addStrTok(.arg, param_name, name_token);
const sub_scope = try astgen.arena.create(Scope.LocalVal);
sub_scope.* = .{
.parent = params_scope,
.gen_zir = &fn_gz,
.name = param_name,
.inst = arg_inst,
.token_src = name_token,
.id_cat = .@"function parameter",
};
params_scope = &sub_scope.base;
// Additionally put the param name into `string_bytes` and reference it with
// `extra` so that we have access to the data in codegen, for debug info.
const str_index = try astgen.identAsString(name_token);
try astgen.extra.append(astgen.gpa, str_index);
}
_ = try typeExpr(&fn_gz, params_scope, fn_proto.ast.return_type);
_ = try expr(&fn_gz, params_scope, .none, body_node);
try checkUsed(gz, &fn_gz.base, params_scope);
}
_ = try expr(&fn_gz, params_scope, .none, body_node);
try checkUsed(gz, &fn_gz.base, params_scope);
const need_implicit_ret = blk: {
if (fn_gz.instructions.items.len == 0)
@ -3133,7 +3072,6 @@ fn fnDecl(
break :func try decl_gz.addFunc(.{
.src_node = decl_node,
.ret_ty = return_type_inst,
.param_types = param_types,
.body = fn_gz.instructions.items,
.cc = cc,
.align_inst = .none, // passed in the per-decl data
@ -3142,8 +3080,6 @@ fn fnDecl(
.is_inferred_error = is_inferred_error,
.is_test = false,
.is_extern = false,
.cur_bit_bag = cur_bit_bag,
.bit_bag = bit_bag.items,
});
};
@ -3480,7 +3416,6 @@ fn testDecl(
const func_inst = try decl_block.addFunc(.{
.src_node = node,
.ret_ty = .void_type,
.param_types = &[0]Zir.Inst.Ref{},
.body = fn_block.instructions.items,
.cc = .none,
.align_inst = .none,
@ -3489,8 +3424,6 @@ fn testDecl(
.is_inferred_error = true,
.is_test = true,
.is_extern = false,
.cur_bit_bag = 0,
.bit_bag = &.{},
});
_ = try decl_block.addBreak(.break_inline, block_inst, func_inst);
@ -4238,7 +4171,7 @@ fn containerDecl(
var fields_data = ArrayListUnmanaged(u32){};
defer fields_data.deinit(gpa);
try fields_data.ensureCapacity(gpa, counts.total_fields + counts.values);
try fields_data.ensureTotalCapacity(gpa, counts.total_fields + counts.values);
// We only need this if there are greater than 32 fields.
var bit_bag = ArrayListUnmanaged(u32){};
@ -5184,8 +5117,7 @@ fn setCondBrPayload(
) !void {
const astgen = then_scope.astgen;
try astgen.extra.ensureCapacity(astgen.gpa, astgen.extra.items.len +
@typeInfo(Zir.Inst.CondBr).Struct.fields.len +
try astgen.extra.ensureUnusedCapacity(astgen.gpa, @typeInfo(Zir.Inst.CondBr).Struct.fields.len +
then_scope.instructions.items.len + else_scope.instructions.items.len);
const zir_datas = astgen.instructions.items(.data);
@ -5839,10 +5771,9 @@ fn switchExpr(
_ = try case_scope.addBreak(.@"break", switch_block, case_result);
}
// Documentation for this: `Zir.Inst.SwitchBlock` and `Zir.Inst.SwitchBlockMulti`.
try scalar_cases_payload.ensureCapacity(gpa, scalar_cases_payload.items.len +
try scalar_cases_payload.ensureUnusedCapacity(gpa, case_scope.instructions.items.len +
3 + // operand, scalar_cases_len, else body len
@boolToInt(multi_cases_len != 0) +
case_scope.instructions.items.len);
@boolToInt(multi_cases_len != 0));
scalar_cases_payload.appendAssumeCapacity(@enumToInt(operand));
scalar_cases_payload.appendAssumeCapacity(scalar_cases_len);
if (multi_cases_len != 0) {
@ -5852,9 +5783,11 @@ fn switchExpr(
scalar_cases_payload.appendSliceAssumeCapacity(case_scope.instructions.items);
} else {
// Documentation for this: `Zir.Inst.SwitchBlock` and `Zir.Inst.SwitchBlockMulti`.
try scalar_cases_payload.ensureCapacity(gpa, scalar_cases_payload.items.len +
2 + // operand, scalar_cases_len
@boolToInt(multi_cases_len != 0));
try scalar_cases_payload.ensureUnusedCapacity(
gpa,
@as(usize, 2) + // operand, scalar_cases_len
@boolToInt(multi_cases_len != 0),
);
scalar_cases_payload.appendAssumeCapacity(@enumToInt(operand));
scalar_cases_payload.appendAssumeCapacity(scalar_cases_len);
if (multi_cases_len != 0) {
@ -5975,8 +5908,8 @@ fn switchExpr(
block_scope.break_count += 1;
_ = try case_scope.addBreak(.@"break", switch_block, case_result);
}
try scalar_cases_payload.ensureCapacity(gpa, scalar_cases_payload.items.len +
2 + case_scope.instructions.items.len);
try scalar_cases_payload.ensureUnusedCapacity(gpa, 2 +
case_scope.instructions.items.len);
scalar_cases_payload.appendAssumeCapacity(@enumToInt(item_inst));
scalar_cases_payload.appendAssumeCapacity(@intCast(u32, case_scope.instructions.items.len));
scalar_cases_payload.appendSliceAssumeCapacity(case_scope.instructions.items);
@ -6012,8 +5945,8 @@ fn switchExpr(
const payload_index = astgen.extra.items.len;
const zir_datas = astgen.instructions.items(.data);
zir_datas[switch_block].pl_node.payload_index = @intCast(u32, payload_index);
try astgen.extra.ensureCapacity(gpa, astgen.extra.items.len +
scalar_cases_payload.items.len + multi_cases_payload.items.len);
try astgen.extra.ensureUnusedCapacity(gpa, scalar_cases_payload.items.len +
multi_cases_payload.items.len);
const strat = rl.strategy(&block_scope);
switch (strat.tag) {
.break_operand => {
@ -8659,7 +8592,7 @@ fn failNodeNotes(
}
const notes_index: u32 = if (notes.len != 0) blk: {
const notes_start = astgen.extra.items.len;
try astgen.extra.ensureCapacity(astgen.gpa, notes_start + 1 + notes.len);
try astgen.extra.ensureTotalCapacity(astgen.gpa, notes_start + 1 + notes.len);
astgen.extra.appendAssumeCapacity(@intCast(u32, notes.len));
astgen.extra.appendSliceAssumeCapacity(notes);
break :blk @intCast(u32, notes_start);
@ -8700,7 +8633,7 @@ fn failTokNotes(
}
const notes_index: u32 = if (notes.len != 0) blk: {
const notes_start = astgen.extra.items.len;
try astgen.extra.ensureCapacity(astgen.gpa, notes_start + 1 + notes.len);
try astgen.extra.ensureTotalCapacity(astgen.gpa, notes_start + 1 + notes.len);
astgen.extra.appendAssumeCapacity(@intCast(u32, notes.len));
astgen.extra.appendSliceAssumeCapacity(notes);
break :blk @intCast(u32, notes_start);
@ -8864,7 +8797,7 @@ fn strLitNodeAsString(astgen: *AstGen, node: ast.Node.Index) !IndexSlice {
while (tok_i <= end) : (tok_i += 1) {
const slice = tree.tokenSlice(tok_i);
const line_bytes = slice[2 .. slice.len - 1];
try string_bytes.ensureCapacity(gpa, string_bytes.items.len + line_bytes.len + 1);
try string_bytes.ensureUnusedCapacity(gpa, line_bytes.len + 1);
string_bytes.appendAssumeCapacity('\n');
string_bytes.appendSliceAssumeCapacity(line_bytes);
}
@ -9131,8 +9064,8 @@ const GenZir = struct {
fn setBoolBrBody(gz: GenZir, inst: Zir.Inst.Index) !void {
const gpa = gz.astgen.gpa;
try gz.astgen.extra.ensureCapacity(gpa, gz.astgen.extra.items.len +
@typeInfo(Zir.Inst.Block).Struct.fields.len + gz.instructions.items.len);
try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Block).Struct.fields.len +
gz.instructions.items.len);
const zir_datas = gz.astgen.instructions.items(.data);
zir_datas[inst].bool_br.payload_index = gz.astgen.addExtraAssumeCapacity(
Zir.Inst.Block{ .body_len = @intCast(u32, gz.instructions.items.len) },
@ -9142,8 +9075,8 @@ const GenZir = struct {
fn setBlockBody(gz: GenZir, inst: Zir.Inst.Index) !void {
const gpa = gz.astgen.gpa;
try gz.astgen.extra.ensureCapacity(gpa, gz.astgen.extra.items.len +
@typeInfo(Zir.Inst.Block).Struct.fields.len + gz.instructions.items.len);
try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Block).Struct.fields.len +
gz.instructions.items.len);
const zir_datas = gz.astgen.instructions.items(.data);
zir_datas[inst].pl_node.payload_index = gz.astgen.addExtraAssumeCapacity(
Zir.Inst.Block{ .body_len = @intCast(u32, gz.instructions.items.len) },
@ -9155,8 +9088,8 @@ const GenZir = struct {
/// `store_to_block_ptr` instructions with lhs set to .none.
fn setBlockBodyEliding(gz: GenZir, inst: Zir.Inst.Index) !void {
const gpa = gz.astgen.gpa;
try gz.astgen.extra.ensureCapacity(gpa, gz.astgen.extra.items.len +
@typeInfo(Zir.Inst.Block).Struct.fields.len + gz.instructions.items.len);
try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Block).Struct.fields.len +
gz.instructions.items.len);
const zir_datas = gz.astgen.instructions.items(.data);
const zir_tags = gz.astgen.instructions.items(.tag);
const block_pl_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.Block{
@ -9177,7 +9110,6 @@ const GenZir = struct {
fn addFunc(gz: *GenZir, args: struct {
src_node: ast.Node.Index,
param_types: []const Zir.Inst.Ref,
body: []const Zir.Inst.Index,
ret_ty: Zir.Inst.Ref,
cc: Zir.Inst.Ref,
@ -9187,8 +9119,6 @@ const GenZir = struct {
is_inferred_error: bool,
is_test: bool,
is_extern: bool,
cur_bit_bag: u32,
bit_bag: []const u32,
}) !Zir.Inst.Ref {
assert(args.src_node != 0);
assert(args.ret_ty != .none);
@ -9226,19 +9156,14 @@ const GenZir = struct {
src_locs = &src_locs_buffer;
}
const any_are_comptime = args.cur_bit_bag != 0 or for (args.bit_bag) |x| {
if (x != 0) break true;
} else false;
if (args.cc != .none or args.lib_name != 0 or
args.is_var_args or args.is_test or args.align_inst != .none or
args.is_extern or any_are_comptime)
args.is_extern)
{
try astgen.extra.ensureUnusedCapacity(
gpa,
@typeInfo(Zir.Inst.ExtendedFunc).Struct.fields.len +
@boolToInt(any_are_comptime) + args.bit_bag.len +
args.param_types.len + args.body.len + src_locs.len +
args.body.len + src_locs.len +
@boolToInt(args.lib_name != 0) +
@boolToInt(args.align_inst != .none) +
@boolToInt(args.cc != .none),
@ -9246,7 +9171,6 @@ const GenZir = struct {
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.ExtendedFunc{
.src_node = gz.nodeIndexToRelative(args.src_node),
.return_type = args.ret_ty,
.param_types_len = @intCast(u32, args.param_types.len),
.body_len = @intCast(u32, args.body.len),
});
if (args.lib_name != 0) {
@ -9258,11 +9182,6 @@ const GenZir = struct {
if (args.align_inst != .none) {
astgen.extra.appendAssumeCapacity(@enumToInt(args.align_inst));
}
if (any_are_comptime) {
astgen.extra.appendSliceAssumeCapacity(args.bit_bag); // Likely empty.
astgen.extra.appendAssumeCapacity(args.cur_bit_bag);
}
astgen.appendRefsAssumeCapacity(args.param_types);
astgen.extra.appendSliceAssumeCapacity(args.body);
astgen.extra.appendSliceAssumeCapacity(src_locs);
@ -9279,7 +9198,6 @@ const GenZir = struct {
.has_align = args.align_inst != .none,
.is_test = args.is_test,
.is_extern = args.is_extern,
.has_comptime_bits = any_are_comptime,
}),
.operand = payload_index,
} },
@ -9290,15 +9208,13 @@ const GenZir = struct {
try gz.astgen.extra.ensureUnusedCapacity(
gpa,
@typeInfo(Zir.Inst.Func).Struct.fields.len +
args.param_types.len + args.body.len + src_locs.len,
args.body.len + src_locs.len,
);
const payload_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.Func{
.return_type = args.ret_ty,
.param_types_len = @intCast(u32, args.param_types.len),
.body_len = @intCast(u32, args.body.len),
});
gz.astgen.appendRefsAssumeCapacity(args.param_types);
gz.astgen.extra.appendSliceAssumeCapacity(args.body);
gz.astgen.extra.appendSliceAssumeCapacity(src_locs);
@ -9380,10 +9296,10 @@ const GenZir = struct {
assert(callee != .none);
assert(src_node != 0);
const gpa = gz.astgen.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
try gz.astgen.extra.ensureCapacity(gpa, gz.astgen.extra.items.len +
@typeInfo(Zir.Inst.Call).Struct.fields.len + args.len);
try gz.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Call).Struct.fields.len +
args.len);
const payload_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.Call{
.callee = callee,
@ -9412,8 +9328,8 @@ const GenZir = struct {
) !Zir.Inst.Index {
assert(lhs != .none);
const gpa = gz.astgen.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
try gz.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.instructions.ensureUnusedCapacity(gpa, 1);
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
gz.astgen.instructions.appendAssumeCapacity(.{
@ -9486,8 +9402,8 @@ const GenZir = struct {
extra: anytype,
) !Zir.Inst.Ref {
const gpa = gz.astgen.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
try gz.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.instructions.ensureUnusedCapacity(gpa, 1);
const payload_index = try gz.astgen.addExtra(extra);
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
@ -9502,6 +9418,30 @@ const GenZir = struct {
return indexToRef(new_index);
}
fn addPlTok(
gz: *GenZir,
tag: Zir.Inst.Tag,
/// Absolute token index. This function does the conversion to Decl offset.
abs_tok_index: ast.TokenIndex,
extra: anytype,
) !Zir.Inst.Ref {
const gpa = gz.astgen.gpa;
try gz.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.instructions.ensureUnusedCapacity(gpa, 1);
const payload_index = try gz.astgen.addExtra(extra);
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
gz.astgen.instructions.appendAssumeCapacity(.{
.tag = tag,
.data = .{ .pl_tok = .{
.src_tok = gz.tokenIndexToRelative(abs_tok_index),
.payload_index = payload_index,
} },
});
gz.instructions.appendAssumeCapacity(new_index);
return indexToRef(new_index);
}
fn addExtendedPayload(
gz: *GenZir,
opcode: Zir.Inst.Extended,
@ -9509,8 +9449,8 @@ const GenZir = struct {
) !Zir.Inst.Ref {
const gpa = gz.astgen.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
try gz.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.instructions.ensureUnusedCapacity(gpa, 1);
const payload_index = try gz.astgen.addExtra(extra);
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
@ -9566,8 +9506,8 @@ const GenZir = struct {
elem_type: Zir.Inst.Ref,
) !Zir.Inst.Ref {
const gpa = gz.astgen.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.astgen.instructions.ensureCapacity(gpa, gz.astgen.instructions.len + 1);
try gz.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.instructions.ensureUnusedCapacity(gpa, 1);
const payload_index = try gz.astgen.addExtra(Zir.Inst.ArrayTypeSentinel{
.sentinel = sentinel,
@ -9822,7 +9762,7 @@ const GenZir = struct {
/// Leaves the `payload_index` field undefined.
fn addCondBr(gz: *GenZir, tag: Zir.Inst.Tag, node: ast.Node.Index) !Zir.Inst.Index {
const gpa = gz.astgen.gpa;
try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
try gz.instructions.ensureUnusedCapacity(gpa, 1);
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
try gz.astgen.instructions.append(gpa, .{
.tag = tag,

View File

@ -3714,7 +3714,7 @@ fn markOutdatedDecl(mod: *Module, decl: *Decl) !void {
decl.analysis = .outdated;
}
fn allocateNewDecl(mod: *Module, namespace: *Scope.Namespace, src_node: ast.Node.Index) !*Decl {
pub fn allocateNewDecl(mod: *Module, namespace: *Scope.Namespace, src_node: ast.Node.Index) !*Decl {
// If we have emit-h then we must allocate a bigger structure to store the emit-h state.
const new_decl: *Decl = if (mod.emit_h != null) blk: {
const parent_struct = try mod.gpa.create(DeclPlusEmitH);

View File

@ -44,6 +44,7 @@ branch_count: u32 = 0,
/// contain a mapped source location.
src: LazySrcLoc = .{ .token_offset = 0 },
next_arg_index: usize = 0,
params: std.ArrayListUnmanaged(Param) = .{},
decl_val_table: std.AutoHashMapUnmanaged(*Decl, Air.Inst.Ref) = .{},
const std = @import("std");
@ -68,6 +69,13 @@ const LazySrcLoc = Module.LazySrcLoc;
const RangeSet = @import("RangeSet.zig");
const target_util = @import("target.zig");
const Param = struct {
name: [:0]const u8,
/// `none` means `anytype`.
ty: Air.Inst.Ref,
is_comptime: bool,
};
pub const InstMap = std.AutoHashMapUnmanaged(Zir.Inst.Index, Air.Inst.Ref);
pub fn deinit(sema: *Sema) void {
@ -91,8 +99,7 @@ pub fn analyzeFnBody(
.func, .func_inferred => blk: {
const inst_data = datas[fn_body_inst].pl_node;
const extra = sema.code.extraData(Zir.Inst.Func, inst_data.payload_index);
const param_types_len = extra.data.param_types_len;
const body = sema.code.extra[extra.end + param_types_len ..][0..extra.data.body_len];
const body = sema.code.extra[extra.end..][0..extra.data.body_len];
break :blk body;
},
.extended => blk: {
@ -104,10 +111,6 @@ pub fn analyzeFnBody(
extra_index += @boolToInt(small.has_lib_name);
extra_index += @boolToInt(small.has_cc);
extra_index += @boolToInt(small.has_align);
if (small.has_comptime_bits) {
extra_index += (extra.data.param_types_len + 31) / 32;
}
extra_index += extra.data.param_types_len;
const body = sema.code.extra[extra_index..][0..extra.data.body_len];
break :blk body;
},
@ -162,7 +165,6 @@ pub fn analyzeBody(
const inst = body[i];
const air_inst: Air.Inst.Ref = switch (tags[inst]) {
// zig fmt: off
.arg => try sema.zirArg(block, inst),
.alloc => try sema.zirAlloc(block, inst),
.alloc_inferred => try sema.zirAllocInferred(block, inst, Type.initTag(.inferred_alloc_const)),
.alloc_inferred_mut => try sema.zirAllocInferred(block, inst, Type.initTag(.inferred_alloc_mut)),
@ -404,6 +406,26 @@ pub fn analyzeBody(
// continue the loop.
// We also know that they cannot be referenced later, so we avoid
// putting them into the map.
.param => {
try sema.zirParam(inst, false);
i += 1;
continue;
},
.param_comptime => {
try sema.zirParam(inst, true);
i += 1;
continue;
},
.param_anytype => {
try sema.zirParamAnytype(inst, false);
i += 1;
continue;
},
.param_anytype_comptime => {
try sema.zirParamAnytype(inst, true);
i += 1;
continue;
},
.breakpoint => {
try sema.zirBreakpoint(block, inst);
i += 1;
@ -1358,23 +1380,34 @@ fn zirIndexablePtrLen(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) Co
return sema.analyzeLoad(block, src, result_ptr, result_ptr_src);
}
fn zirArg(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
fn zirParam(sema: *Sema, inst: Zir.Inst.Index, is_comptime: bool) CompileError!void {
const inst_data = sema.code.instructions.items(.data)[inst].pl_tok;
const extra = sema.code.extraData(Zir.Inst.Param, inst_data.payload_index).data;
const param_name = sema.code.nullTerminatedString(extra.name);
// TODO check if param_name shadows a Decl. This only needs to be done if
// usingnamespace is implemented.
const param_ty = sema.resolveInst(extra.ty);
try sema.params.append(sema.gpa, .{
.name = param_name,
.ty = param_ty,
.is_comptime = is_comptime,
});
}
fn zirParamAnytype(sema: *Sema, inst: Zir.Inst.Index, is_comptime: bool) CompileError!void {
const inst_data = sema.code.instructions.items(.data)[inst].str_tok;
const arg_name = inst_data.get(sema.code);
const arg_index = sema.next_arg_index;
sema.next_arg_index += 1;
const param_name = inst_data.get(sema.code);
// TODO check if arg_name shadows a Decl
_ = arg_name;
// TODO check if param_name shadows a Decl. This only needs to be done if
// usingnamespace is implemented.
if (block.inlining) |_| {
return sema.param_inst_list[arg_index];
}
// Set the name of the Air.Arg instruction for use by codegen debug info.
const air_arg = sema.param_inst_list[arg_index];
sema.air_instructions.items(.data)[Air.refToIndex(air_arg).?].ty_str.str = inst_data.start;
return air_arg;
try sema.params.append(sema.gpa, .{
.name = param_name,
.ty = .none,
.is_comptime = is_comptime,
});
}
fn zirAllocExtended(
@ -2395,26 +2428,29 @@ fn analyzeCall(
ensure_result_used: bool,
args: []const Air.Inst.Ref,
) CompileError!Air.Inst.Ref {
const mod = sema.mod;
const func_ty = sema.typeOf(func);
if (func_ty.zigTypeTag() != .Fn)
return sema.mod.fail(&block.base, func_src, "type '{}' not a function", .{func_ty});
return mod.fail(&block.base, func_src, "type '{}' not a function", .{func_ty});
const cc = func_ty.fnCallingConvention();
const func_ty_info = func_ty.fnInfo();
const cc = func_ty_info.cc;
if (cc == .Naked) {
// TODO add error note: declared here
return sema.mod.fail(
return mod.fail(
&block.base,
func_src,
"unable to call function with naked calling convention",
.{},
);
}
const fn_params_len = func_ty.fnParamLen();
if (func_ty.fnIsVarArgs()) {
const fn_params_len = func_ty_info.param_types.len;
if (func_ty_info.is_var_args) {
assert(cc == .C);
if (args.len < fn_params_len) {
// TODO add error note: declared here
return sema.mod.fail(
return mod.fail(
&block.base,
func_src,
"expected at least {d} argument(s), found {d}",
@ -2423,7 +2459,7 @@ fn analyzeCall(
}
} else if (fn_params_len != args.len) {
// TODO add error note: declared here
return sema.mod.fail(
return mod.fail(
&block.base,
func_src,
"expected {d} argument(s), found {d}",
@ -2442,7 +2478,7 @@ fn analyzeCall(
.never_inline,
.no_async,
.always_tail,
=> return sema.mod.fail(&block.base, call_src, "TODO implement call with modifier {}", .{
=> return mod.fail(&block.base, call_src, "TODO implement call with modifier {}", .{
modifier,
}),
}
@ -2451,12 +2487,12 @@ fn analyzeCall(
const is_comptime_call = block.is_comptime or modifier == .compile_time;
const is_inline_call = is_comptime_call or modifier == .always_inline or
func_ty.fnCallingConvention() == .Inline;
func_ty_info.cc == .Inline;
const result: Air.Inst.Ref = if (is_inline_call) res: {
const func_val = try sema.resolveConstValue(block, func_src, func);
const module_fn = switch (func_val.tag()) {
.function => func_val.castTag(.function).?.data,
.extern_fn => return sema.mod.fail(&block.base, call_src, "{s} call of extern function", .{
.extern_fn => return mod.fail(&block.base, call_src, "{s} call of extern function", .{
@as([]const u8, if (is_comptime_call) "comptime" else "inline"),
}),
else => unreachable,
@ -2535,10 +2571,46 @@ fn analyzeCall(
const result = try sema.analyzeBlockBody(block, call_src, &child_block, merges);
break :res result;
} else if (func_ty_info.is_generic) {
const func_val = try sema.resolveConstValue(block, func_src, func);
const module_fn = func_val.castTag(.function).?.data;
// Check the Module's generic function map with an adapted context, so that we
// can match against `args` rather than doing the work below to create a generic Scope
// only to junk it if it matches an existing instantiation.
// TODO
// Create a Decl for the new function.
const generic_namespace = try sema.arena.create(Module.Scope.Namespace);
generic_namespace.* = .{
.parent = block.src_decl.namespace,
.file_scope = block.src_decl.namespace.file_scope,
.ty = func_ty,
};
const new_decl = try mod.allocateNewDecl(generic_namespace, module_fn.owner_decl.src_node);
_ = new_decl;
// Iterate over the parameters that are comptime, evaluating their type expressions
// inside a Scope which contains the previous parameters.
//for (args) |arg, arg_i| {
//}
// Create a new Fn with only the runtime-known parameters.
// TODO
// Populate the Decl ty/val with the function and its type.
// TODO
// Queue up a `codegen_func` work item for the new Fn, making sure it will have
// `analyzeFnBody` called with the Scope which contains the comptime parameters.
// TODO
// Save it into the Module's generic function map.
// TODO
// Call it the same as a runtime function.
// TODO
return mod.fail(&block.base, func_src, "TODO implement generic fn call", .{});
} else res: {
if (func_ty.fnIsGeneric()) {
return sema.mod.fail(&block.base, func_src, "TODO implement generic fn call", .{});
}
try sema.requireRuntimeBlock(block, call_src);
try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.Call).Struct.fields.len +
args.len);
@ -3186,13 +3258,12 @@ fn zirFunc(
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
const extra = sema.code.extraData(Zir.Inst.Func, inst_data.payload_index);
const param_types = sema.code.refSlice(extra.end, extra.data.param_types_len);
var body_inst: Zir.Inst.Index = 0;
var src_locs: Zir.Inst.Func.SrcLocs = undefined;
if (extra.data.body_len != 0) {
body_inst = inst;
const extra_index = extra.end + extra.data.param_types_len + extra.data.body_len;
const extra_index = extra.end + extra.data.body_len;
src_locs = sema.code.extraData(Zir.Inst.Func.SrcLocs, extra_index).data;
}
@ -3204,7 +3275,6 @@ fn zirFunc(
return sema.funcCommon(
block,
inst_data.src_node,
param_types,
body_inst,
extra.data.return_type,
cc,
@ -3214,7 +3284,6 @@ fn zirFunc(
false,
src_locs,
null,
&.{},
);
}
@ -3222,7 +3291,6 @@ fn funcCommon(
sema: *Sema,
block: *Scope.Block,
src_node_offset: i32,
zir_param_types: []const Zir.Inst.Ref,
body_inst: Zir.Inst.Index,
zir_return_type: Zir.Inst.Ref,
cc: std.builtin.CallingConvention,
@ -3232,7 +3300,6 @@ fn funcCommon(
is_extern: bool,
src_locs: Zir.Inst.Func.SrcLocs,
opt_lib_name: ?[]const u8,
comptime_bits: []const u32,
) CompileError!Air.Inst.Ref {
const src: LazySrcLoc = .{ .node_offset = src_node_offset };
const ret_ty_src: LazySrcLoc = .{ .node_offset_fn_type_ret_ty = src_node_offset };
@ -3245,7 +3312,7 @@ fn funcCommon(
const fn_ty: Type = fn_ty: {
// Hot path for some common function types.
if (zir_param_types.len == 0 and !var_args and align_val.tag() == .null_value and
if (sema.params.items.len == 0 and !var_args and align_val.tag() == .null_value and
!inferred_error_set)
{
if (bare_return_type.zigTypeTag() == .NoReturn and cc == .Unspecified) {
@ -3266,22 +3333,21 @@ fn funcCommon(
}
var any_are_comptime = false;
const param_types = try sema.arena.alloc(Type, zir_param_types.len);
for (zir_param_types) |param_type, i| {
// TODO make a compile error from `resolveType` report the source location
// of the specific parameter. Will need to take a similar strategy as
// `resolveSwitchItemVal` to avoid resolving the source location unless
// we actually need to report an error.
const param_src = src;
param_types[i] = try sema.resolveType(block, param_src, param_type);
any_are_comptime = any_are_comptime or blk: {
if (comptime_bits.len == 0)
break :blk false;
const bag = comptime_bits[i / 32];
const is_comptime = @truncate(u1, bag >> @intCast(u5, i % 32)) != 0;
break :blk is_comptime;
};
const param_types = try sema.arena.alloc(Type, sema.params.items.len);
const comptime_params = try sema.arena.alloc(bool, sema.params.items.len);
for (sema.params.items) |param, i| {
if (param.ty == .none) {
param_types[i] = Type.initTag(.noreturn); // indicates anytype
} else {
// TODO make a compile error from `resolveType` report the source location
// of the specific parameter. Will need to take a similar strategy as
// `resolveSwitchItemVal` to avoid resolving the source location unless
// we actually need to report an error.
const param_src = src;
param_types[i] = try sema.resolveType(block, param_src, param.ty);
}
comptime_params[i] = param.is_comptime;
any_are_comptime = any_are_comptime or param.is_comptime;
}
if (align_val.tag() != .null_value) {
@ -3301,6 +3367,7 @@ fn funcCommon(
break :fn_ty try Type.Tag.function.create(sema.arena, .{
.param_types = param_types,
.comptime_params = comptime_params.ptr,
.return_type = return_type,
.cc = cc,
.is_var_args = var_args,
@ -6545,16 +6612,6 @@ fn zirFuncExtended(
break :blk align_tv.val;
} else Value.initTag(.null_value);
const comptime_bits: []const u32 = if (!small.has_comptime_bits) &.{} else blk: {
const amt = (extra.data.param_types_len + 31) / 32;
const bit_bags = sema.code.extra[extra_index..][0..amt];
extra_index += amt;
break :blk bit_bags;
};
const param_types = sema.code.refSlice(extra_index, extra.data.param_types_len);
extra_index += param_types.len;
var body_inst: Zir.Inst.Index = 0;
var src_locs: Zir.Inst.Func.SrcLocs = undefined;
if (extra.data.body_len != 0) {
@ -6570,7 +6627,6 @@ fn zirFuncExtended(
return sema.funcCommon(
block,
extra.data.src_node,
param_types,
body_inst,
extra.data.return_type,
cc,
@ -6580,7 +6636,6 @@ fn zirFuncExtended(
is_extern,
src_locs,
lib_name,
comptime_bits,
);
}

View File

@ -173,11 +173,22 @@ pub const Inst = struct {
/// Twos complement wrapping integer addition.
/// Uses the `pl_node` union field. Payload is `Bin`.
addwrap,
/// Declares a parameter of the current function. Used for debug info and
/// for checking shadowing against declarations in the current namespace.
/// Uses the `str_tok` field. Token is the parameter name, string is the
/// parameter name.
arg,
/// Declares a parameter of the current function. Used for:
/// * debug info
/// * checking shadowing against declarations in the current namespace
/// * parameter type expressions referencing other parameters
/// These occur in the block outside a function body (the same block as
/// contains the func instruction).
/// Uses the `pl_tok` field. Token is the parameter name, payload is a `Param`.
param,
/// Same as `param` except the parameter is marked comptime.
param_comptime,
/// Same as `param` except the parameter is marked anytype.
/// Uses the `str_tok` field. Token is the parameter name. String is the parameter name.
param_anytype,
/// Same as `param` except the parameter is marked both comptime and anytype.
/// Uses the `str_tok` field. Token is the parameter name. String is the parameter name.
param_anytype_comptime,
/// Array concatenation. `a ++ b`
/// Uses the `pl_node` union field. Payload is `Bin`.
array_cat,
@ -971,7 +982,10 @@ pub const Inst = struct {
/// Function calls do not count.
pub fn isNoReturn(tag: Tag) bool {
return switch (tag) {
.arg,
.param,
.param_comptime,
.param_anytype,
.param_anytype_comptime,
.add,
.addwrap,
.alloc,
@ -1233,7 +1247,10 @@ pub const Inst = struct {
break :list std.enums.directEnumArray(Tag, Data.FieldEnum, 0, .{
.add = .pl_node,
.addwrap = .pl_node,
.arg = .str_tok,
.param = .pl_tok,
.param_comptime = .pl_tok,
.param_anytype = .str_tok,
.param_anytype_comptime = .str_tok,
.array_cat = .pl_node,
.array_mul = .pl_node,
.array_type = .bin,
@ -2047,6 +2064,17 @@ pub const Inst = struct {
return .{ .node_offset = self.src_node };
}
},
pl_tok: struct {
/// Offset from Decl AST token index.
src_tok: ast.TokenIndex,
/// index into extra.
/// `Tag` determines what lives there.
payload_index: u32,
pub fn src(self: @This()) LazySrcLoc {
return .{ .token_offset = self.src_tok };
}
},
bin: Bin,
/// For strings which may contain null bytes.
str: struct {
@ -2170,6 +2198,7 @@ pub const Inst = struct {
un_node,
un_tok,
pl_node,
pl_tok,
bin,
str,
str_tok,
@ -2226,17 +2255,11 @@ pub const Inst = struct {
/// 0. lib_name: u32, // null terminated string index, if has_lib_name is set
/// 1. cc: Ref, // if has_cc is set
/// 2. align: Ref, // if has_align is set
/// 3. comptime_bits: u32 // for every 32 parameters, if has_comptime_bits is set
/// - sets of 1 bit:
/// 0bX: whether corresponding parameter is comptime
/// 4. param_type: Ref // for each param_types_len
/// - `none` indicates that the param type is `anytype`.
/// 5. body: Index // for each body_len
/// 6. src_locs: Func.SrcLocs // if body_len != 0
/// 3. body: Index // for each body_len
/// 4. src_locs: Func.SrcLocs // if body_len != 0
pub const ExtendedFunc = struct {
src_node: i32,
return_type: Ref,
param_types_len: u32,
body_len: u32,
pub const Small = packed struct {
@ -2247,8 +2270,7 @@ pub const Inst = struct {
has_align: bool,
is_test: bool,
is_extern: bool,
has_comptime_bits: bool,
_: u8 = undefined,
_: u9 = undefined,
};
};
@ -2271,13 +2293,10 @@ pub const Inst = struct {
};
/// Trailing:
/// 0. param_type: Ref // for each param_types_len
/// - `none` indicates that the param type is `anytype`.
/// 1. body: Index // for each body_len
/// 2. src_locs: SrcLocs // if body_len != 0
/// 0. body: Index // for each body_len
/// 1. src_locs: SrcLocs // if body_len != 0
pub const Func = struct {
return_type: Ref,
param_types_len: u32,
body_len: u32,
pub const SrcLocs = struct {
@ -2764,6 +2783,12 @@ pub const Inst = struct {
args: Ref,
};
pub const Param = struct {
/// Null-terminated string index.
name: u32,
ty: Ref,
};
/// Trailing:
/// 0. type_inst: Ref, // if small 0b000X is set
/// 1. align_inst: Ref, // if small 0b00X0 is set
@ -3108,11 +3133,14 @@ const Writer = struct {
.decl_ref,
.decl_val,
.import,
.arg,
.ret_err_value,
.ret_err_value_code,
.param_anytype,
.param_anytype_comptime,
=> try self.writeStrTok(stream, inst),
.param, .param_comptime => try self.writeParam(stream, inst),
.func => try self.writeFunc(stream, inst, false),
.func_inferred => try self.writeFunc(stream, inst, true),
@ -3314,6 +3342,17 @@ const Writer = struct {
try self.writeSrc(stream, inst_data.src());
}
fn writeParam(self: *Writer, stream: anytype, inst: Inst.Index) !void {
const inst_data = self.code.instructions.items(.data)[inst].pl_tok;
const extra = self.code.extraData(Inst.Param, inst_data.payload_index).data;
try stream.print("\"{}\", ", .{
std.zig.fmtEscapes(self.code.nullTerminatedString(extra.name)),
});
try self.writeInstRef(stream, extra.ty);
try stream.writeAll(") ");
try self.writeSrc(stream, inst_data.src());
}
fn writePlNodeBin(self: *Writer, stream: anytype, inst: Inst.Index) !void {
const inst_data = self.code.instructions.items(.data)[inst].pl_node;
const extra = self.code.extraData(Inst.Bin, inst_data.payload_index).data;
@ -4277,16 +4316,14 @@ const Writer = struct {
const inst_data = self.code.instructions.items(.data)[inst].pl_node;
const src = inst_data.src();
const extra = self.code.extraData(Inst.Func, inst_data.payload_index);
const param_types = self.code.refSlice(extra.end, extra.data.param_types_len);
const body = self.code.extra[extra.end + param_types.len ..][0..extra.data.body_len];
const body = self.code.extra[extra.end..][0..extra.data.body_len];
var src_locs: Zir.Inst.Func.SrcLocs = undefined;
if (body.len != 0) {
const extra_index = extra.end + param_types.len + body.len;
const extra_index = extra.end + body.len;
src_locs = self.code.extraData(Zir.Inst.Func.SrcLocs, extra_index).data;
}
return self.writeFuncCommon(
stream,
param_types,
extra.data.return_type,
inferred_error_set,
false,
@ -4296,7 +4333,6 @@ const Writer = struct {
body,
src,
src_locs,
&.{},
);
}
@ -4323,16 +4359,6 @@ const Writer = struct {
break :blk align_inst;
};
const comptime_bits: []const u32 = if (!small.has_comptime_bits) &.{} else blk: {
const amt = (extra.data.param_types_len + 31) / 32;
const bit_bags = self.code.extra[extra_index..][0..amt];
extra_index += amt;
break :blk bit_bags;
};
const param_types = self.code.refSlice(extra_index, extra.data.param_types_len);
extra_index += param_types.len;
const body = self.code.extra[extra_index..][0..extra.data.body_len];
extra_index += body.len;
@ -4342,7 +4368,6 @@ const Writer = struct {
}
return self.writeFuncCommon(
stream,
param_types,
extra.data.return_type,
small.is_inferred_error,
small.is_var_args,
@ -4352,7 +4377,6 @@ const Writer = struct {
body,
src,
src_locs,
comptime_bits,
);
}
@ -4426,7 +4450,6 @@ const Writer = struct {
fn writeFuncCommon(
self: *Writer,
stream: anytype,
param_types: []const Inst.Ref,
ret_ty: Inst.Ref,
inferred_error_set: bool,
var_args: bool,
@ -4436,19 +4459,7 @@ const Writer = struct {
body: []const Inst.Index,
src: LazySrcLoc,
src_locs: Zir.Inst.Func.SrcLocs,
comptime_bits: []const u32,
) !void {
try stream.writeAll("[");
for (param_types) |param_type, i| {
if (i != 0) try stream.writeAll(", ");
if (comptime_bits.len != 0) {
const bag = comptime_bits[i / 32];
const is_comptime = @truncate(u1, bag >> @intCast(u5, i % 32)) != 0;
try self.writeFlag(stream, "comptime ", is_comptime);
}
try self.writeInstRef(stream, param_type);
}
try stream.writeAll("], ");
try self.writeInstRef(stream, ret_ty);
try self.writeOptionalInstRef(stream, ", cc=", cc);
try self.writeOptionalInstRef(stream, ", align=", align_inst);
@ -4714,8 +4725,7 @@ fn findDeclsInner(
const inst_data = datas[inst].pl_node;
const extra = zir.extraData(Inst.Func, inst_data.payload_index);
const param_types_len = extra.data.param_types_len;
const body = zir.extra[extra.end + param_types_len ..][0..extra.data.body_len];
const body = zir.extra[extra.end..][0..extra.data.body_len];
return zir.findDeclsBody(list, body);
},
.extended => {
@ -4730,7 +4740,6 @@ fn findDeclsInner(
extra_index += @boolToInt(small.has_lib_name);
extra_index += @boolToInt(small.has_cc);
extra_index += @boolToInt(small.has_align);
extra_index += extra.data.param_types_len;
const body = zir.extra[extra_index..][0..extra.data.body_len];
return zir.findDeclsBody(list, body);
},

View File

@ -759,12 +759,15 @@ pub const Type = extern union {
for (payload.param_types) |param_type, i| {
param_types[i] = try param_type.copy(allocator);
}
const other_comptime_params = payload.comptime_params[0..payload.param_types.len];
const comptime_params = try allocator.dupe(bool, other_comptime_params);
return Tag.function.create(allocator, .{
.return_type = try payload.return_type.copy(allocator),
.param_types = param_types,
.cc = payload.cc,
.is_var_args = payload.is_var_args,
.is_generic = payload.is_generic,
.comptime_params = comptime_params.ptr,
});
},
.pointer => {
@ -2408,14 +2411,41 @@ pub const Type = extern union {
};
}
/// Asserts the type is a function.
pub fn fnIsGeneric(self: Type) bool {
return switch (self.tag()) {
.fn_noreturn_no_args => false,
.fn_void_no_args => false,
.fn_naked_noreturn_no_args => false,
.fn_ccc_void_no_args => false,
.function => self.castTag(.function).?.data.is_generic,
pub fn fnInfo(ty: Type) Payload.Function.Data {
return switch (ty.tag()) {
.fn_noreturn_no_args => .{
.param_types = &.{},
.comptime_params = undefined,
.return_type = initTag(.noreturn),
.cc = .Unspecified,
.is_var_args = false,
.is_generic = false,
},
.fn_void_no_args => .{
.param_types = &.{},
.comptime_params = undefined,
.return_type = initTag(.void),
.cc = .Unspecified,
.is_var_args = false,
.is_generic = false,
},
.fn_naked_noreturn_no_args => .{
.param_types = &.{},
.comptime_params = undefined,
.return_type = initTag(.noreturn),
.cc = .Naked,
.is_var_args = false,
.is_generic = false,
},
.fn_ccc_void_no_args => .{
.param_types = &.{},
.comptime_params = undefined,
.return_type = initTag(.void),
.cc = .C,
.is_var_args = false,
.is_generic = false,
},
.function => ty.castTag(.function).?.data,
else => unreachable,
};
@ -3223,13 +3253,23 @@ pub const Type = extern union {
pub const base_tag = Tag.function;
base: Payload = Payload{ .tag = base_tag },
data: struct {
data: Data,
// TODO look into optimizing this memory to take fewer bytes
const Data = struct {
param_types: []Type,
comptime_params: [*]bool,
return_type: Type,
cc: std.builtin.CallingConvention,
is_var_args: bool,
is_generic: bool,
},
fn paramIsComptime(self: @This(), i: usize) bool {
if (!self.is_generic) return false;
assert(i < self.param_types.len);
return self.comptime_params[i];
}
};
};
pub const ErrorSet = struct {