From 0148f39df929cc00c1b2231acce41c22f74f9969 Mon Sep 17 00:00:00 2001 From: Andrew Kelley Date: Thu, 16 Feb 2017 19:35:42 -0500 Subject: [PATCH] pointers with bit offset contain length adds compile error when passing pointer that is byte-aligned at the beginning but not the end to a function expecting a fully byte aligned pointer closes #261 --- src/all_types.hpp | 3 +++ src/analyze.cpp | 34 +++++++++++++++++++++----------- src/analyze.hpp | 2 +- src/codegen.cpp | 49 +++++++++++++++------------------------------- src/ir.cpp | 24 +++++++++++------------ test/run_tests.cpp | 19 ++++++++++++++++++ 6 files changed, 74 insertions(+), 57 deletions(-) diff --git a/src/all_types.hpp b/src/all_types.hpp index 78800ae6fb..785ae98163 100644 --- a/src/all_types.hpp +++ b/src/all_types.hpp @@ -853,6 +853,7 @@ struct TypeTableEntryPointer { bool is_const; bool is_volatile; uint32_t bit_offset; + uint32_t unaligned_bit_count; }; struct TypeTableEntryInt { @@ -877,6 +878,7 @@ struct TypeStructField { // offset from the memory at gen_index size_t packed_bits_offset; size_t packed_bits_size; + size_t unaligned_bit_count; }; struct TypeTableEntryStruct { AstNode *decl_node; @@ -1204,6 +1206,7 @@ struct TypeId { bool is_const; bool is_volatile; uint32_t bit_offset; + uint32_t unaligned_bit_count; } pointer; struct { TypeTableEntry *child_type; diff --git a/src/analyze.cpp b/src/analyze.cpp index 8ac641ebfc..e4ccf699ba 100644 --- a/src/analyze.cpp +++ b/src/analyze.cpp @@ -287,23 +287,25 @@ TypeTableEntry *get_smallest_unsigned_int_type(CodeGen *g, uint64_t x) { } TypeTableEntry *get_pointer_to_type_extra(CodeGen *g, TypeTableEntry *child_type, bool is_const, - uint32_t bit_offset, bool is_volatile) + bool is_volatile, uint32_t bit_offset, uint32_t unaligned_bit_count) { assert(child_type->id != TypeTableEntryIdInvalid); TypeId type_id = {}; TypeTableEntry **parent_pointer = nullptr; - if (bit_offset != 0 || is_volatile) { + if (unaligned_bit_count != 0 || is_volatile) { type_id.id = TypeTableEntryIdPointer; type_id.data.pointer.child_type = child_type; type_id.data.pointer.is_const = is_const; type_id.data.pointer.is_volatile = is_volatile; type_id.data.pointer.bit_offset = bit_offset; + type_id.data.pointer.unaligned_bit_count = unaligned_bit_count; auto existing_entry = g->type_table.maybe_get(type_id); if (existing_entry) return existing_entry->value; } else { + assert(bit_offset == 0); parent_pointer = &child_type->pointer_parent[(is_const ? 1 : 0)]; if (*parent_pointer) return *parent_pointer; @@ -316,11 +318,11 @@ TypeTableEntry *get_pointer_to_type_extra(CodeGen *g, TypeTableEntry *child_type const char *const_str = is_const ? "const " : ""; const char *volatile_str = is_volatile ? "volatile " : ""; buf_resize(&entry->name, 0); - if (bit_offset == 0) { + if (unaligned_bit_count == 0) { buf_appendf(&entry->name, "&%s%s%s", const_str, volatile_str, buf_ptr(&child_type->name)); } else { - buf_appendf(&entry->name, "&:%" PRIu8 " %s%s%s", bit_offset, const_str, - volatile_str, buf_ptr(&child_type->name)); + buf_appendf(&entry->name, "&:%" PRIu32 ":%" PRIu32 " %s%s%s", bit_offset, + bit_offset + unaligned_bit_count, const_str, volatile_str, buf_ptr(&child_type->name)); } TypeTableEntry *canon_child_type = get_underlying_type(child_type); @@ -344,6 +346,7 @@ TypeTableEntry *get_pointer_to_type_extra(CodeGen *g, TypeTableEntry *child_type entry->data.pointer.is_const = is_const; entry->data.pointer.is_volatile = is_volatile; entry->data.pointer.bit_offset = bit_offset; + entry->data.pointer.unaligned_bit_count = unaligned_bit_count; if (parent_pointer) { *parent_pointer = entry; @@ -354,7 +357,7 @@ TypeTableEntry *get_pointer_to_type_extra(CodeGen *g, TypeTableEntry *child_type } TypeTableEntry *get_pointer_to_type(CodeGen *g, TypeTableEntry *child_type, bool is_const) { - return get_pointer_to_type_extra(g, child_type, is_const, 0, false); + return get_pointer_to_type_extra(g, child_type, is_const, false, 0, 0); } TypeTableEntry *get_maybe_type(CodeGen *g, TypeTableEntry *child_type) { @@ -1429,13 +1432,15 @@ static void resolve_struct_type(CodeGen *g, TypeTableEntry *struct_type) { break; } - type_struct_field->packed_bits_size = type_size_bits(g, field_type); + size_t field_size_in_bits = type_size_bits(g, field_type); + size_t next_packed_bits_offset = packed_bits_offset + field_size_in_bits; - size_t next_packed_bits_offset = packed_bits_offset + type_struct_field->packed_bits_size; + type_struct_field->packed_bits_size = field_size_in_bits; if (first_packed_bits_offset_misalign != SIZE_MAX) { // this field is not byte-aligned; it is part of the previous field with a bit offset type_struct_field->packed_bits_offset = packed_bits_offset - first_packed_bits_offset_misalign; + type_struct_field->unaligned_bit_count = field_size_in_bits; if (next_packed_bits_offset % 8 == 0) { // next field recovers byte alignment @@ -1448,9 +1453,12 @@ static void resolve_struct_type(CodeGen *g, TypeTableEntry *struct_type) { } else if (next_packed_bits_offset % 8 != 0) { first_packed_bits_offset_misalign = packed_bits_offset; type_struct_field->packed_bits_offset = 0; + type_struct_field->unaligned_bit_count = field_size_in_bits; } else { + // This is a byte-aligned field (both start and end) in a packed struct. element_types[gen_field_index] = field_type->type_ref; type_struct_field->packed_bits_offset = 0; + type_struct_field->unaligned_bit_count = 0; gen_field_index += 1; } packed_bits_offset = next_packed_bits_offset; @@ -2237,7 +2245,9 @@ bool types_match_const_cast_only(TypeTableEntry *expected_type, TypeTableEntry * if (expected_type->id == TypeTableEntryIdPointer && actual_type->id == TypeTableEntryIdPointer && (!actual_type->data.pointer.is_const || expected_type->data.pointer.is_const) && - (!actual_type->data.pointer.is_volatile || expected_type->data.pointer.is_volatile)) + (!actual_type->data.pointer.is_volatile || expected_type->data.pointer.is_volatile) && + actual_type->data.pointer.bit_offset == expected_type->data.pointer.bit_offset && + actual_type->data.pointer.unaligned_bit_count == expected_type->data.pointer.unaligned_bit_count) { return types_match_const_cast_only(expected_type->data.pointer.child_type, actual_type->data.pointer.child_type); @@ -3943,7 +3953,8 @@ uint32_t type_id_hash(TypeId x) { return hash_ptr(x.data.pointer.child_type) + (x.data.pointer.is_const ? 2749109194 : 4047371087) + (x.data.pointer.is_volatile ? 536730450 : 1685612214) + - (((uint32_t)x.data.pointer.bit_offset) * 2639019452); + (((uint32_t)x.data.pointer.bit_offset) * 2639019452) + + (((uint32_t)x.data.pointer.unaligned_bit_count) * 529908881); case TypeTableEntryIdArray: return hash_ptr(x.data.array.child_type) + (x.data.array.size * 2122979968); @@ -3987,7 +3998,8 @@ bool type_id_eql(TypeId a, TypeId b) { return a.data.pointer.child_type == b.data.pointer.child_type && a.data.pointer.is_const == b.data.pointer.is_const && a.data.pointer.is_volatile == b.data.pointer.is_volatile && - a.data.pointer.bit_offset == b.data.pointer.bit_offset; + a.data.pointer.bit_offset == b.data.pointer.bit_offset && + a.data.pointer.unaligned_bit_count == b.data.pointer.unaligned_bit_count; case TypeTableEntryIdArray: return a.data.array.child_type == b.data.array.child_type && a.data.array.size == b.data.array.size; diff --git a/src/analyze.hpp b/src/analyze.hpp index 4cdcea556f..4a53c7fa73 100644 --- a/src/analyze.hpp +++ b/src/analyze.hpp @@ -16,7 +16,7 @@ ErrorMsg *add_error_note(CodeGen *g, ErrorMsg *parent_msg, AstNode *node, Buf *m TypeTableEntry *new_type_table_entry(TypeTableEntryId id); TypeTableEntry *get_pointer_to_type(CodeGen *g, TypeTableEntry *child_type, bool is_const); TypeTableEntry *get_pointer_to_type_extra(CodeGen *g, TypeTableEntry *child_type, bool is_const, - uint32_t bit_offset, bool is_volatile); + bool is_volatile, uint32_t bit_offset, uint32_t unaligned_bit_count); bool is_node_void_expr(AstNode *node); uint64_t type_size(CodeGen *g, TypeTableEntry *type_entry); uint64_t type_size_bits(CodeGen *g, TypeTableEntry *type_entry); diff --git a/src/codegen.cpp b/src/codegen.cpp index 69cfa66f04..90c910adf3 100644 --- a/src/codegen.cpp +++ b/src/codegen.cpp @@ -1374,27 +1374,17 @@ static LLVMValueRef ir_render_load_ptr(CodeGen *g, IrExecutable *executable, IrI assert(ptr_type->id == TypeTableEntryIdPointer); bool is_volatile = ptr_type->data.pointer.is_volatile; - uint32_t bit_offset = ptr_type->data.pointer.bit_offset; - LLVMValueRef containing_int; - if (bit_offset == 0) { - LLVMValueRef result_val = get_handle_value(g, ptr, child_type, is_volatile); - if (LLVMGetTypeKind(LLVMTypeOf(result_val)) == LLVMIntegerTypeKind && - LLVMGetTypeKind(child_type->type_ref) == LLVMIntegerTypeKind && - LLVMGetIntTypeWidth(child_type->type_ref) < LLVMGetIntTypeWidth(LLVMTypeOf(result_val))) - { - containing_int = result_val; - } else { - return result_val; - } - } else { - assert(!handle_is_ptr(child_type)); - containing_int = LLVMBuildLoad(g->builder, ptr, ""); - LLVMSetVolatile(containing_int, is_volatile); - } + uint32_t unaligned_bit_count = ptr_type->data.pointer.unaligned_bit_count; + if (unaligned_bit_count == 0) + return get_handle_value(g, ptr, child_type, is_volatile); - uint32_t child_bit_count = type_size_bits(g, child_type); + assert(!handle_is_ptr(child_type)); + LLVMValueRef containing_int = LLVMBuildLoad(g->builder, ptr, ""); + LLVMSetVolatile(containing_int, is_volatile); + + uint32_t bit_offset = ptr_type->data.pointer.bit_offset; uint32_t host_bit_count = LLVMGetIntTypeWidth(LLVMTypeOf(containing_int)); - uint32_t shift_amt = host_bit_count - bit_offset - child_bit_count; + uint32_t shift_amt = host_bit_count - bit_offset - unaligned_bit_count; LLVMValueRef shift_amt_val = LLVMConstInt(LLVMTypeOf(containing_int), shift_amt, false); LLVMValueRef shifted_value = LLVMBuildLShr(g->builder, containing_int, shift_amt_val, ""); @@ -1416,25 +1406,18 @@ static LLVMValueRef ir_render_store_ptr(CodeGen *g, IrExecutable *executable, Ir if (handle_is_ptr(child_type)) return gen_struct_memcpy(g, value, ptr, child_type); - uint32_t bit_offset = ptr_type->data.pointer.bit_offset; - if (bit_offset == 0) { - LLVMTypeRef ptr_child_ref = LLVMGetElementType(LLVMTypeOf(ptr)); - bool need_to_do_some_bit_stuff = - LLVMGetTypeKind(ptr_child_ref) == LLVMIntegerTypeKind && - LLVMGetTypeKind(child_type->type_ref) == LLVMIntegerTypeKind && - LLVMGetIntTypeWidth(child_type->type_ref) < LLVMGetIntTypeWidth(ptr_child_ref); - if (!need_to_do_some_bit_stuff) { - LLVMValueRef llvm_instruction = LLVMBuildStore(g->builder, value, ptr); - LLVMSetVolatile(llvm_instruction, ptr_type->data.pointer.is_volatile); - return nullptr; - } + uint32_t unaligned_bit_count = ptr_type->data.pointer.unaligned_bit_count; + if (unaligned_bit_count == 0) { + LLVMValueRef llvm_instruction = LLVMBuildStore(g->builder, value, ptr); + LLVMSetVolatile(llvm_instruction, ptr_type->data.pointer.is_volatile); + return nullptr; } LLVMValueRef containing_int = LLVMBuildLoad(g->builder, ptr, ""); - uint32_t child_bit_count = type_size_bits(g, child_type); + uint32_t bit_offset = ptr_type->data.pointer.bit_offset; uint32_t host_bit_count = LLVMGetIntTypeWidth(LLVMTypeOf(containing_int)); - uint32_t shift_amt = host_bit_count - bit_offset - child_bit_count; + uint32_t shift_amt = host_bit_count - bit_offset - unaligned_bit_count; LLVMValueRef shift_amt_val = LLVMConstInt(LLVMTypeOf(containing_int), shift_amt, false); LLVMValueRef mask_val = LLVMConstAllOnes(child_type->type_ref); diff --git a/src/ir.cpp b/src/ir.cpp index ddb2164ad5..d71079d77f 100644 --- a/src/ir.cpp +++ b/src/ir.cpp @@ -6227,11 +6227,11 @@ static IrInstruction *ir_get_const_ptr(IrAnalyze *ira, IrInstruction *instructio const_val->type = pointee_type; type_ensure_zero_bits_known(ira->codegen, type_entry); const_val->data.x_type = get_pointer_to_type_extra(ira->codegen, type_entry, - ptr_is_const, 0, ptr_is_volatile); + ptr_is_const, ptr_is_volatile, 0, 0); return const_instr; } else { TypeTableEntry *ptr_type = get_pointer_to_type_extra(ira->codegen, pointee_type, - ptr_is_const, 0, ptr_is_volatile); + ptr_is_const, ptr_is_volatile, 0, 0); IrInstruction *const_instr = ir_get_const(ira, instruction); ConstExprValue *const_val = &const_instr->value; const_val->type = ptr_type; @@ -6547,7 +6547,7 @@ static IrInstruction *ir_get_ref(IrAnalyze *ira, IrInstruction *source_instructi ConstPtrMutComptimeConst, is_const, is_volatile); } - TypeTableEntry *ptr_type = get_pointer_to_type_extra(ira->codegen, value->value.type, is_const, 0, is_volatile); + TypeTableEntry *ptr_type = get_pointer_to_type_extra(ira->codegen, value->value.type, is_const, is_volatile, 0, 0); FnTableEntry *fn_entry = exec_fn_entry(ira->new_irb.exec); assert(fn_entry); IrInstruction *new_instruction = ir_build_ref(&ira->new_irb, source_instruction->scope, @@ -8839,7 +8839,7 @@ static TypeTableEntry *ir_analyze_instruction_elem_ptr(IrAnalyze *ira, IrInstruc } TypeTableEntry *child_type = array_type->data.array.child_type; return_type = get_pointer_to_type_extra(ira->codegen, child_type, - ptr_type->data.pointer.is_const, 0, ptr_type->data.pointer.is_volatile); + ptr_type->data.pointer.is_const, ptr_type->data.pointer.is_volatile, 0, 0); } else if (array_type->id == TypeTableEntryIdPointer) { return_type = array_type; } else if (is_slice(array_type)) { @@ -9057,7 +9057,7 @@ static TypeTableEntry *ir_analyze_container_field_ptr(IrAnalyze *ira, Buf *field ConstExprValue *struct_val = const_ptr_pointee(ptr_val); ConstExprValue *field_val = &struct_val->data.x_struct.fields[field->src_index]; TypeTableEntry *ptr_type = get_pointer_to_type_extra(ira->codegen, field_val->type, - is_const, 0, is_volatile); + is_const, is_volatile, 0, 0); ConstExprValue *const_val = ir_build_const_from(ira, &field_ptr_instruction->base); const_val->data.x_ptr.special = ConstPtrSpecialBaseStruct; const_val->data.x_ptr.mut = container_ptr->value.data.x_ptr.mut; @@ -9068,7 +9068,7 @@ static TypeTableEntry *ir_analyze_container_field_ptr(IrAnalyze *ira, Buf *field } ir_build_struct_field_ptr_from(&ira->new_irb, &field_ptr_instruction->base, container_ptr, field); return get_pointer_to_type_extra(ira->codegen, field->type_entry, is_const, - field->packed_bits_offset, is_volatile); + is_volatile, field->packed_bits_offset, field->unaligned_bit_count); } else { return ir_analyze_container_member_access_inner(ira, bare_type, field_name, field_ptr_instruction, container_ptr, container_type); @@ -9080,7 +9080,7 @@ static TypeTableEntry *ir_analyze_container_field_ptr(IrAnalyze *ira, Buf *field TypeEnumField *field = find_enum_type_field(bare_type, field_name); if (field) { ir_build_enum_field_ptr_from(&ira->new_irb, &field_ptr_instruction->base, container_ptr, field); - return get_pointer_to_type_extra(ira->codegen, field->type_entry, is_const, 0, is_volatile); + return get_pointer_to_type_extra(ira->codegen, field->type_entry, is_const, is_volatile, 0, 0); } else { return ir_analyze_container_member_access_inner(ira, bare_type, field_name, field_ptr_instruction, container_ptr, container_type); @@ -10016,7 +10016,7 @@ static TypeTableEntry *ir_analyze_instruction_unwrap_maybe(IrAnalyze *ira, } TypeTableEntry *child_type = type_entry->data.maybe.child_type; TypeTableEntry *result_type = get_pointer_to_type_extra(ira->codegen, child_type, - ptr_type->data.pointer.is_const, 0, ptr_type->data.pointer.is_volatile); + ptr_type->data.pointer.is_const, ptr_type->data.pointer.is_volatile, 0, 0); if (instr_is_comptime(value)) { ConstExprValue *val = ir_resolve_const(ira, value, UndefBad); @@ -11322,7 +11322,7 @@ static TypeTableEntry *ir_analyze_instruction_memset(IrAnalyze *ira, IrInstructi TypeTableEntry *usize = ira->codegen->builtin_types.entry_usize; TypeTableEntry *u8 = ira->codegen->builtin_types.entry_u8; - TypeTableEntry *u8_ptr = get_pointer_to_type_extra(ira->codegen, u8, false, 0, dest_is_volatile); + TypeTableEntry *u8_ptr = get_pointer_to_type_extra(ira->codegen, u8, false, dest_is_volatile, 0, 0); IrInstruction *casted_dest_ptr = ir_implicit_cast(ira, dest_ptr, u8_ptr); if (casted_dest_ptr->value.type->id == TypeTableEntryIdInvalid) @@ -11410,8 +11410,8 @@ static TypeTableEntry *ir_analyze_instruction_memcpy(IrAnalyze *ira, IrInstructi TypeTableEntry *usize = ira->codegen->builtin_types.entry_usize; TypeTableEntry *u8 = ira->codegen->builtin_types.entry_u8; - TypeTableEntry *u8_ptr_mut = get_pointer_to_type_extra(ira->codegen, u8, false, 0, dest_is_volatile); - TypeTableEntry *u8_ptr_const = get_pointer_to_type_extra(ira->codegen, u8, true, 0, src_is_volatile); + TypeTableEntry *u8_ptr_mut = get_pointer_to_type_extra(ira->codegen, u8, false, dest_is_volatile, 0, 0); + TypeTableEntry *u8_ptr_const = get_pointer_to_type_extra(ira->codegen, u8, true, src_is_volatile, 0, 0); IrInstruction *casted_dest_ptr = ir_implicit_cast(ira, dest_ptr, u8_ptr_mut); if (casted_dest_ptr->value.type->id == TypeTableEntryIdInvalid) @@ -11929,7 +11929,7 @@ static TypeTableEntry *ir_analyze_instruction_unwrap_err_payload(IrAnalyze *ira, } else if (canon_type->id == TypeTableEntryIdErrorUnion) { TypeTableEntry *child_type = canon_type->data.error.child_type; TypeTableEntry *result_type = get_pointer_to_type_extra(ira->codegen, child_type, - ptr_type->data.pointer.is_const, 0, ptr_type->data.pointer.is_volatile); + ptr_type->data.pointer.is_const, ptr_type->data.pointer.is_volatile, 0, 0); if (instr_is_comptime(value)) { ConstExprValue *ptr_val = ir_resolve_const(ira, value, UndefBad); if (!ptr_val) diff --git a/test/run_tests.cpp b/test/run_tests.cpp index 9e9333d730..1cc6f64ded 100644 --- a/test/run_tests.cpp +++ b/test/run_tests.cpp @@ -1627,6 +1627,25 @@ pub fn main(args: [][]u8) -> ??void { } )SOURCE", 1, ".tmp_source.zig:2:30: error: expected return type of main to be '%void', instead is '??void'"); + add_compile_fail_case("casting bit offset pointer to regular pointer", R"SOURCE( +const u2 = @intType(false, 2); +const u3 = @intType(false, 3); + +const BitField = packed struct { + a: u3, + b: u3, + c: u2, +}; + +fn foo(bit_field: &const BitField) -> u3 { + return bar(&bit_field.b); +} + +fn bar(x: &const u3) -> u3 { + return *x; +} + )SOURCE", 1, ".tmp_source.zig:12:26: error: expected type '&const u3', found '&:3:6 const u3'"); + } //////////////////////////////////////////////////////////////////////////////