IR: support var type args and fix phi peer type resolution

This commit is contained in:
Andrew Kelley 2016-12-18 00:09:43 -05:00
parent e73faf9a9e
commit 85b6d14637
3 changed files with 260 additions and 190 deletions

View File

@ -723,6 +723,8 @@ TypeTableEntry *get_fn_type(CodeGen *g, FnTypeId *fn_type_id) {
fn_type->data.fn.calling_convention = LLVMFastCallConv;
}
bool skip_debug_info = false;
// populate the name of the type
buf_resize(&fn_type->name, 0);
const char *extern_str = fn_type_id->is_extern ? "extern " : "";
@ -736,6 +738,8 @@ TypeTableEntry *get_fn_type(CodeGen *g, FnTypeId *fn_type_id) {
const char *comma = (i == 0) ? "" : ", ";
const char *noalias_str = param_info->is_noalias ? "noalias " : "";
buf_appendf(&fn_type->name, "%s%s%s", comma, noalias_str, buf_ptr(&param_type->name));
skip_debug_info = skip_debug_info || !param_type->di_type;
}
if (fn_type_id->is_var_args) {
@ -746,67 +750,70 @@ TypeTableEntry *get_fn_type(CodeGen *g, FnTypeId *fn_type_id) {
if (fn_type_id->return_type->id != TypeTableEntryIdVoid) {
buf_appendf(&fn_type->name, " -> %s", buf_ptr(&fn_type_id->return_type->name));
}
skip_debug_info = skip_debug_info || !fn_type_id->return_type->di_type;
// next, loop over the parameters again and compute debug information
// and codegen information
bool first_arg_return = !fn_type_id->is_extern && handle_is_ptr(fn_type_id->return_type);
// +1 for maybe making the first argument the return value
LLVMTypeRef *gen_param_types = allocate<LLVMTypeRef>(1 + fn_type_id->param_count);
// +1 because 0 is the return type and +1 for maybe making first arg ret val
ZigLLVMDIType **param_di_types = allocate<ZigLLVMDIType*>(2 + fn_type_id->param_count);
param_di_types[0] = fn_type_id->return_type->di_type;
size_t gen_param_index = 0;
TypeTableEntry *gen_return_type;
if (!type_has_bits(fn_type_id->return_type)) {
gen_return_type = g->builtin_types.entry_void;
} else if (first_arg_return) {
TypeTableEntry *gen_type = get_pointer_to_type(g, fn_type_id->return_type, false);
gen_param_types[gen_param_index] = gen_type->type_ref;
gen_param_index += 1;
// after the gen_param_index += 1 because 0 is the return type
param_di_types[gen_param_index] = gen_type->di_type;
gen_return_type = g->builtin_types.entry_void;
} else {
gen_return_type = fn_type_id->return_type;
}
fn_type->data.fn.gen_return_type = gen_return_type;
fn_type->data.fn.gen_param_info = allocate<FnGenParamInfo>(fn_type_id->param_count);
for (size_t i = 0; i < fn_type_id->param_count; i += 1) {
FnTypeParamInfo *src_param_info = &fn_type->data.fn.fn_type_id.param_info[i];
TypeTableEntry *type_entry = src_param_info->type;
FnGenParamInfo *gen_param_info = &fn_type->data.fn.gen_param_info[i];
gen_param_info->src_index = i;
gen_param_info->gen_index = SIZE_MAX;
assert(type_is_complete(type_entry));
if (type_has_bits(type_entry)) {
TypeTableEntry *gen_type;
if (handle_is_ptr(type_entry)) {
gen_type = get_pointer_to_type(g, type_entry, true);
gen_param_info->is_byval = true;
} else {
gen_type = type_entry;
}
if (!skip_debug_info) {
bool first_arg_return = !fn_type_id->is_extern && handle_is_ptr(fn_type_id->return_type);
// +1 for maybe making the first argument the return value
LLVMTypeRef *gen_param_types = allocate<LLVMTypeRef>(1 + fn_type_id->param_count);
// +1 because 0 is the return type and +1 for maybe making first arg ret val
ZigLLVMDIType **param_di_types = allocate<ZigLLVMDIType*>(2 + fn_type_id->param_count);
param_di_types[0] = fn_type_id->return_type->di_type;
size_t gen_param_index = 0;
TypeTableEntry *gen_return_type;
if (!type_has_bits(fn_type_id->return_type)) {
gen_return_type = g->builtin_types.entry_void;
} else if (first_arg_return) {
TypeTableEntry *gen_type = get_pointer_to_type(g, fn_type_id->return_type, false);
gen_param_types[gen_param_index] = gen_type->type_ref;
gen_param_info->gen_index = gen_param_index;
gen_param_info->type = gen_type;
gen_param_index += 1;
// after the gen_param_index += 1 because 0 is the return type
param_di_types[gen_param_index] = gen_type->di_type;
gen_return_type = g->builtin_types.entry_void;
} else {
gen_return_type = fn_type_id->return_type;
}
fn_type->data.fn.gen_return_type = gen_return_type;
fn_type->data.fn.gen_param_info = allocate<FnGenParamInfo>(fn_type_id->param_count);
for (size_t i = 0; i < fn_type_id->param_count; i += 1) {
FnTypeParamInfo *src_param_info = &fn_type->data.fn.fn_type_id.param_info[i];
TypeTableEntry *type_entry = src_param_info->type;
FnGenParamInfo *gen_param_info = &fn_type->data.fn.gen_param_info[i];
gen_param_info->src_index = i;
gen_param_info->gen_index = SIZE_MAX;
assert(type_is_complete(type_entry));
if (type_has_bits(type_entry)) {
TypeTableEntry *gen_type;
if (handle_is_ptr(type_entry)) {
gen_type = get_pointer_to_type(g, type_entry, true);
gen_param_info->is_byval = true;
} else {
gen_type = type_entry;
}
gen_param_types[gen_param_index] = gen_type->type_ref;
gen_param_info->gen_index = gen_param_index;
gen_param_info->type = gen_type;
gen_param_index += 1;
// after the gen_param_index += 1 because 0 is the return type
param_di_types[gen_param_index] = gen_type->di_type;
}
}
fn_type->data.fn.gen_param_count = gen_param_index;
fn_type->data.fn.raw_type_ref = LLVMFunctionType(gen_return_type->type_ref,
gen_param_types, gen_param_index, fn_type_id->is_var_args);
fn_type->type_ref = LLVMPointerType(fn_type->data.fn.raw_type_ref, 0);
fn_type->di_type = ZigLLVMCreateSubroutineType(g->dbuilder, param_di_types, gen_param_index + 1, 0);
}
fn_type->data.fn.gen_param_count = gen_param_index;
fn_type->data.fn.raw_type_ref = LLVMFunctionType(gen_return_type->type_ref,
gen_param_types, gen_param_index, fn_type_id->is_var_args);
fn_type->type_ref = LLVMPointerType(fn_type->data.fn.raw_type_ref, 0);
fn_type->di_type = ZigLLVMCreateSubroutineType(g->dbuilder, param_di_types, gen_param_index + 1, 0);
g->fn_type_table.put(&fn_type->data.fn.fn_type_id, fn_type);
return fn_type;
@ -2765,6 +2772,40 @@ static TypeTableEntry *type_of_first_thing_in_memory(TypeTableEntry *type_entry)
zig_unreachable();
}
bool type_requires_comptime(TypeTableEntry *type_entry) {
switch (type_entry->id) {
case TypeTableEntryIdInvalid:
case TypeTableEntryIdUnreachable:
case TypeTableEntryIdVar:
zig_unreachable();
case TypeTableEntryIdNumLitFloat:
case TypeTableEntryIdNumLitInt:
case TypeTableEntryIdUndefLit:
case TypeTableEntryIdNullLit:
case TypeTableEntryIdMetaType:
case TypeTableEntryIdVoid:
case TypeTableEntryIdNamespace:
case TypeTableEntryIdBlock:
case TypeTableEntryIdBoundFn:
return true;
case TypeTableEntryIdArray:
case TypeTableEntryIdStruct:
case TypeTableEntryIdUnion:
case TypeTableEntryIdMaybe:
case TypeTableEntryIdErrorUnion:
case TypeTableEntryIdTypeDecl:
case TypeTableEntryIdEnum:
case TypeTableEntryIdPureError:
case TypeTableEntryIdFn:
case TypeTableEntryIdBool:
case TypeTableEntryIdInt:
case TypeTableEntryIdFloat:
case TypeTableEntryIdPointer:
return false;
}
zig_unreachable();
}
uint64_t get_memcpy_align(CodeGen *g, TypeTableEntry *type_entry) {
TypeTableEntry *first_type_in_mem = type_of_first_thing_in_memory(type_entry);
return LLVMABISizeOfType(g->target_data_ref, first_type_in_mem->type_ref);

View File

@ -75,6 +75,7 @@ FnTableEntry *create_fn_raw(FnInline inline_value, bool internal_linkage);
void init_fn_type_id(FnTypeId *fn_type_id, AstNode *proto_node);
AstNode *get_param_decl_node(FnTableEntry *fn_entry, size_t index);
FnTableEntry *scope_get_fn_if_root(Scope *scope);
bool type_requires_comptime(TypeTableEntry *type_entry);
ScopeBlock *create_block_scope(AstNode *node, Scope *parent);
ScopeDefer *create_defer_scope(AstNode *node, Scope *parent);

View File

@ -2307,6 +2307,12 @@ static IrInstruction *ir_gen_null_literal(IrBuilder *irb, Scope *scope, AstNode
return ir_build_const_null(irb, scope, node);
}
static IrInstruction *ir_gen_var_literal(IrBuilder *irb, Scope *scope, AstNode *node) {
assert(node->type == NodeTypeVarLiteral);
return ir_build_const_type(irb, scope, node, irb->codegen->builtin_types.entry_var);
}
static IrInstruction *ir_gen_decl_ref(IrBuilder *irb, AstNode *source_node, Tld *tld,
LValPurpose lval, Scope *scope)
{
@ -3927,6 +3933,8 @@ static IrInstruction *ir_gen_node_raw(IrBuilder *irb, AstNode *node, Scope *scop
return ir_lval_wrap(irb, scope, ir_gen_asm_expr(irb, scope, node), lval);
case NodeTypeNullLiteral:
return ir_lval_wrap(irb, scope, ir_gen_null_literal(irb, scope, node), lval);
case NodeTypeVarLiteral:
return ir_lval_wrap(irb, scope, ir_gen_var_literal(irb, scope, node), lval);
case NodeTypeIfVarExpr:
return ir_lval_wrap(irb, scope, ir_gen_if_var_expr(irb, scope, node), lval);
case NodeTypeSwitchExpr:
@ -3949,8 +3957,6 @@ static IrInstruction *ir_gen_node_raw(IrBuilder *irb, AstNode *node, Scope *scop
return ir_lval_wrap(irb, scope, ir_gen_slice(irb, scope, node), lval);
case NodeTypeUnwrapErrorExpr:
return ir_lval_wrap(irb, scope, ir_gen_err_ok_or(irb, scope, node), lval);
case NodeTypeZeroesLiteral:
case NodeTypeVarLiteral:
case NodeTypeFnProto:
case NodeTypeFnDef:
case NodeTypeFnDecl:
@ -3959,6 +3965,8 @@ static IrInstruction *ir_gen_node_raw(IrBuilder *irb, AstNode *node, Scope *scop
case NodeTypeErrorValueDecl:
case NodeTypeTypeDecl:
zig_panic("TODO more IR gen for node types");
case NodeTypeZeroesLiteral:
zig_panic("TODO zeroes is deprecated");
}
zig_unreachable();
}
@ -4065,17 +4073,18 @@ static IrInstruction *ir_exec_const_result(IrExecutable *exec) {
return nullptr;
IrBasicBlock *bb = exec->basic_block_list.at(0);
if (bb->instruction_list.length != 1)
return nullptr;
IrInstruction *only_inst = bb->instruction_list.at(0);
if (only_inst->id != IrInstructionIdReturn)
return nullptr;
IrInstructionReturn *ret_inst = (IrInstructionReturn *)only_inst;
IrInstruction *value = ret_inst->value;
assert(value->static_value.special != ConstValSpecialRuntime);
return value;
for (size_t i = 0; i < bb->instruction_list.length; i += 1) {
IrInstruction *instruction = bb->instruction_list.at(i);
if (instruction->id == IrInstructionIdReturn) {
IrInstructionReturn *ret_inst = (IrInstructionReturn *)instruction;
IrInstruction *value = ret_inst->value;
assert(value->static_value.special != ConstValSpecialRuntime);
return value;
} else if (ir_has_side_effects(instruction)) {
return nullptr;
}
}
return nullptr;
}
static bool ir_emit_global_runtime_side_effect(IrAnalyze *ira, IrInstruction *source_instruction) {
@ -4123,9 +4132,109 @@ static bool ir_num_lit_fits_in_other_type(IrAnalyze *ira, IrInstruction *instruc
return false;
}
static TypeTableEntry *ir_determine_peer_types(IrAnalyze *ira, AstNode *source_node,
IrInstruction **instructions, size_t instruction_count)
enum ImplicitCastMatchResult {
ImplicitCastMatchResultNo,
ImplicitCastMatchResultYes,
ImplicitCastMatchResultReportedError,
};
static ImplicitCastMatchResult ir_types_match_with_implicit_cast(IrAnalyze *ira, TypeTableEntry *expected_type,
TypeTableEntry *actual_type, IrInstruction *value)
{
if (types_match_const_cast_only(expected_type, actual_type)) {
return ImplicitCastMatchResultYes;
}
// implicit conversion from anything to var
if (expected_type->id == TypeTableEntryIdVar) {
return ImplicitCastMatchResultYes;
}
// implicit conversion from non maybe type to maybe type
if (expected_type->id == TypeTableEntryIdMaybe &&
ir_types_match_with_implicit_cast(ira, expected_type->data.maybe.child_type, actual_type, value))
{
return ImplicitCastMatchResultYes;
}
// implicit conversion from null literal to maybe type
if (expected_type->id == TypeTableEntryIdMaybe &&
actual_type->id == TypeTableEntryIdNullLit)
{
return ImplicitCastMatchResultYes;
}
// implicit conversion from error child type to error type
if (expected_type->id == TypeTableEntryIdErrorUnion &&
ir_types_match_with_implicit_cast(ira, expected_type->data.error.child_type, actual_type, value))
{
return ImplicitCastMatchResultYes;
}
// implicit conversion from pure error to error union type
if (expected_type->id == TypeTableEntryIdErrorUnion &&
actual_type->id == TypeTableEntryIdPureError)
{
return ImplicitCastMatchResultYes;
}
// implicit widening conversion
if (expected_type->id == TypeTableEntryIdInt &&
actual_type->id == TypeTableEntryIdInt &&
expected_type->data.integral.is_signed == actual_type->data.integral.is_signed &&
expected_type->data.integral.bit_count >= actual_type->data.integral.bit_count)
{
return ImplicitCastMatchResultYes;
}
// small enough unsigned ints can get casted to large enough signed ints
if (expected_type->id == TypeTableEntryIdInt && expected_type->data.integral.is_signed &&
actual_type->id == TypeTableEntryIdInt && !actual_type->data.integral.is_signed &&
expected_type->data.integral.bit_count > actual_type->data.integral.bit_count)
{
return ImplicitCastMatchResultYes;
}
// implicit float widening conversion
if (expected_type->id == TypeTableEntryIdFloat &&
actual_type->id == TypeTableEntryIdFloat &&
expected_type->data.floating.bit_count >= actual_type->data.floating.bit_count)
{
return ImplicitCastMatchResultYes;
}
// implicit array to slice conversion
if (expected_type->id == TypeTableEntryIdStruct &&
expected_type->data.structure.is_slice &&
actual_type->id == TypeTableEntryIdArray &&
types_match_const_cast_only(
expected_type->data.structure.fields[0].type_entry->data.pointer.child_type,
actual_type->data.array.child_type))
{
return ImplicitCastMatchResultYes;
}
// implicit number literal to typed number
if ((actual_type->id == TypeTableEntryIdNumLitFloat ||
actual_type->id == TypeTableEntryIdNumLitInt))
{
if (ir_num_lit_fits_in_other_type(ira, value, expected_type)) {
return ImplicitCastMatchResultYes;
} else {
return ImplicitCastMatchResultReportedError;
}
}
// implicit undefined literal to anything
if (actual_type->id == TypeTableEntryIdUndefLit) {
return ImplicitCastMatchResultYes;
}
return ImplicitCastMatchResultNo;
}
static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_node, IrInstruction **instructions, size_t instruction_count) {
assert(instruction_count >= 1);
IrInstruction *prev_inst = instructions[0];
if (prev_inst->type_entry->id == TypeTableEntryIdInvalid) {
@ -4241,109 +4350,6 @@ static TypeTableEntry *ir_determine_peer_types(IrAnalyze *ira, AstNode *source_n
}
}
enum ImplicitCastMatchResult {
ImplicitCastMatchResultNo,
ImplicitCastMatchResultYes,
ImplicitCastMatchResultReportedError,
};
static ImplicitCastMatchResult ir_types_match_with_implicit_cast(IrAnalyze *ira, TypeTableEntry *expected_type,
TypeTableEntry *actual_type, IrInstruction *value)
{
if (types_match_const_cast_only(expected_type, actual_type)) {
return ImplicitCastMatchResultYes;
}
// implicit conversion from non maybe type to maybe type
if (expected_type->id == TypeTableEntryIdMaybe &&
ir_types_match_with_implicit_cast(ira, expected_type->data.maybe.child_type, actual_type, value))
{
return ImplicitCastMatchResultYes;
}
// implicit conversion from null literal to maybe type
if (expected_type->id == TypeTableEntryIdMaybe &&
actual_type->id == TypeTableEntryIdNullLit)
{
return ImplicitCastMatchResultYes;
}
// implicit conversion from error child type to error type
if (expected_type->id == TypeTableEntryIdErrorUnion &&
ir_types_match_with_implicit_cast(ira, expected_type->data.error.child_type, actual_type, value))
{
return ImplicitCastMatchResultYes;
}
// implicit conversion from pure error to error union type
if (expected_type->id == TypeTableEntryIdErrorUnion &&
actual_type->id == TypeTableEntryIdPureError)
{
return ImplicitCastMatchResultYes;
}
// implicit widening conversion
if (expected_type->id == TypeTableEntryIdInt &&
actual_type->id == TypeTableEntryIdInt &&
expected_type->data.integral.is_signed == actual_type->data.integral.is_signed &&
expected_type->data.integral.bit_count >= actual_type->data.integral.bit_count)
{
return ImplicitCastMatchResultYes;
}
// small enough unsigned ints can get casted to large enough signed ints
if (expected_type->id == TypeTableEntryIdInt && expected_type->data.integral.is_signed &&
actual_type->id == TypeTableEntryIdInt && !actual_type->data.integral.is_signed &&
expected_type->data.integral.bit_count > actual_type->data.integral.bit_count)
{
return ImplicitCastMatchResultYes;
}
// implicit float widening conversion
if (expected_type->id == TypeTableEntryIdFloat &&
actual_type->id == TypeTableEntryIdFloat &&
expected_type->data.floating.bit_count >= actual_type->data.floating.bit_count)
{
return ImplicitCastMatchResultYes;
}
// implicit array to slice conversion
if (expected_type->id == TypeTableEntryIdStruct &&
expected_type->data.structure.is_slice &&
actual_type->id == TypeTableEntryIdArray &&
types_match_const_cast_only(
expected_type->data.structure.fields[0].type_entry->data.pointer.child_type,
actual_type->data.array.child_type))
{
return ImplicitCastMatchResultYes;
}
// implicit number literal to typed number
if ((actual_type->id == TypeTableEntryIdNumLitFloat ||
actual_type->id == TypeTableEntryIdNumLitInt))
{
if (ir_num_lit_fits_in_other_type(ira, value, expected_type)) {
return ImplicitCastMatchResultYes;
} else {
return ImplicitCastMatchResultReportedError;
}
}
// implicit undefined literal to anything
if (actual_type->id == TypeTableEntryIdUndefLit) {
return ImplicitCastMatchResultYes;
}
return ImplicitCastMatchResultNo;
}
static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_node,
IrInstruction **instructions, size_t instruction_count)
{
return ir_determine_peer_types(ira, source_node, instructions, instruction_count);
}
static void ir_add_alloca(IrAnalyze *ira, IrInstruction *instruction, TypeTableEntry *type_entry) {
if (type_has_bits(type_entry) && handle_is_ptr(type_entry)) {
FnTableEntry *fn_entry = exec_fn_entry(ira->new_irb.exec);
@ -4746,6 +4752,9 @@ static IrInstruction *ir_analyze_cast(IrAnalyze *ira, IrInstruction *source_inst
return ira->codegen->invalid_instruction;
}
if (wanted_type->id == TypeTableEntryIdVar)
return value;
// explicit match or non-const to const
if (types_match_const_cast_only(wanted_type, actual_type)) {
return ir_resolve_cast(ira, source_instr, value, wanted_type, CastOpNoop, false);
@ -5833,7 +5842,7 @@ static bool ir_analyze_fn_call_inline_arg(IrAnalyze *ira, AstNode *fn_proto_node
Buf *param_name = param_decl_node->data.param_decl.name;
VariableTableEntry *var = add_variable(ira->codegen, param_decl_node,
*exec_scope, param_name, param_type, true, first_arg_val);
*exec_scope, param_name, casted_arg->type_entry, true, first_arg_val);
*exec_scope = var->child_scope;
*next_proto_i += 1;
@ -5852,38 +5861,49 @@ static bool ir_analyze_fn_call_generic_arg(IrAnalyze *ira, AstNode *fn_proto_nod
if (param_type->id == TypeTableEntryIdInvalid)
return false;
bool is_var_type = (param_type->id == TypeTableEntryIdVar);
IrInstruction *casted_arg;
if (is_var_type) {
casted_arg = arg;
} else {
casted_arg = ir_implicit_cast(ira, arg, param_type);
if (casted_arg->type_entry->id == TypeTableEntryIdInvalid)
return false;
}
IrInstruction *casted_arg = ir_implicit_cast(ira, arg, param_type);
if (casted_arg->type_entry->id == TypeTableEntryIdInvalid)
return false;
bool inline_arg = param_decl_node->data.param_decl.is_inline;
if (inline_arg || is_var_type) {
ConstExprValue *arg_val = ir_resolve_const(ira, casted_arg, UndefBad);
bool is_var_type = (param_type->id == TypeTableEntryIdVar);
ConstExprValue *arg_val;
if (inline_arg) {
arg_val = ir_resolve_const(ira, casted_arg, UndefBad);
if (!arg_val)
return false;
Buf *param_name = param_decl_node->data.param_decl.name;
VariableTableEntry *var = add_variable(ira->codegen, param_decl_node,
*child_scope, param_name, param_type, true, arg_val);
*child_scope = var->child_scope;
// This generic function instance could be called with anything, so when this variable is read it
// needs to know that it depends on compile time variable data.
var->value->depends_on_compile_var = true;
arg_val->depends_on_compile_var = true;
} else {
arg_val = nullptr;
}
Buf *param_name = param_decl_node->data.param_decl.name;
VariableTableEntry *var = add_variable(ira->codegen, param_decl_node,
*child_scope, param_name, casted_arg->type_entry, true, arg_val);
*child_scope = var->child_scope;
if (inline_arg || is_var_type) {
GenericParamValue *generic_param = &generic_id->params[generic_id->param_count];
generic_param->type = casted_arg->type_entry;
generic_param->value = arg_val;
generic_id->param_count += 1;
} else {
}
if (!inline_arg) {
if (type_requires_comptime(var->type)) {
ir_add_error(ira, arg,
buf_sprintf("parameter of type '%s' not allowed", buf_ptr(&var->type->name)));
return false;
}
var->shadowable = true;
casted_args[fn_type_id->param_count] = casted_arg;
FnTypeParamInfo *param_info = &fn_type_id->param_info[fn_type_id->param_count];
param_info->type = param_type;
param_info->type = casted_arg->type_entry;
param_info->is_noalias = param_decl_node->data.param_decl.is_noalias;
impl_fn->param_source_nodes[fn_type_id->param_count] = param_decl_node;
fn_type_id->param_count += 1;
@ -6475,12 +6495,20 @@ static TypeTableEntry *ir_analyze_instruction_phi(IrAnalyze *ira, IrInstructionP
return ira->codegen->builtin_types.entry_invalid;
}
// cast all literal values to the resolved type
// cast all values to the resolved type. however we can't put cast instructions in front of the phi instruction.
// so we go back and insert the casts as the last instruction in the corresponding predecessor blocks, and
// then make sure the branch instruction is preserved.
IrBasicBlock *cur_bb = ira->new_irb.current_basic_block;
for (size_t i = 0; i < new_incoming_values.length; i += 1) {
IrInstruction *new_value = new_incoming_values.at(i);
IrBasicBlock *predecessor = new_incoming_blocks.at(i);
IrInstruction *branch_instruction = predecessor->instruction_list.pop();
ir_set_cursor_at_end(&ira->new_irb, predecessor);
IrInstruction *casted_value = ir_implicit_cast(ira, new_value, resolved_type);
new_incoming_values.items[i] = casted_value;
predecessor->instruction_list.append(branch_instruction);
}
ir_set_cursor_at_end(&ira->new_irb, cur_bb);
ir_build_phi_from(&ira->new_irb, &phi_instruction->base, new_incoming_blocks.length,
new_incoming_blocks.items, new_incoming_values.items);