diff --git a/src/stage1/ir.cpp b/src/stage1/ir.cpp index dc74fb9c14..ea9bf6ee8b 100644 --- a/src/stage1/ir.cpp +++ b/src/stage1/ir.cpp @@ -24646,7 +24646,7 @@ static IrInstGen *ir_analyze_instruction_field_parent_ptr(IrAnalyze *ira, return ir_build_field_parent_ptr_gen(ira, &instruction->base.base, casted_field_ptr, field, result_type); } -static TypeStructField *validate_byte_offset(IrAnalyze *ira, +static TypeStructField *validate_host_int_byte_offset(IrAnalyze *ira, IrInstGen *type_value, IrInstGen *field_name_value, size_t *byte_offset) @@ -24694,11 +24694,12 @@ static IrInstGen *ir_analyze_instruction_byte_offset_of(IrAnalyze *ira, IrInstSr return ira->codegen->invalid_inst_gen; IrInstGen *field_name_value = instruction->field_name->child; - size_t byte_offset = 0; - if (!validate_byte_offset(ira, type_value, field_name_value, &byte_offset)) + size_t host_int_byte_offset = 0; + TypeStructField *field = nullptr; + if (!(field = validate_host_int_byte_offset(ira, type_value, field_name_value, &host_int_byte_offset))) return ira->codegen->invalid_inst_gen; - + size_t byte_offset = host_int_byte_offset + (field->bit_offset_in_host / 8); return ir_const_unsigned(ira, &instruction->base.base, byte_offset); } @@ -24707,12 +24708,12 @@ static IrInstGen *ir_analyze_instruction_bit_offset_of(IrAnalyze *ira, IrInstSrc if (type_is_invalid(type_value->value->type)) return ira->codegen->invalid_inst_gen; IrInstGen *field_name_value = instruction->field_name->child; - size_t byte_offset = 0; + size_t host_int_byte_offset = 0; TypeStructField *field = nullptr; - if (!(field = validate_byte_offset(ira, type_value, field_name_value, &byte_offset))) + if (!(field = validate_host_int_byte_offset(ira, type_value, field_name_value, &host_int_byte_offset))) return ira->codegen->invalid_inst_gen; - size_t bit_offset = byte_offset * 8 + field->bit_offset_in_host; + size_t bit_offset = host_int_byte_offset * 8 + field->bit_offset_in_host; return ir_const_unsigned(ira, &instruction->base.base, bit_offset); } diff --git a/test/stage1/behavior/sizeof_and_typeof.zig b/test/stage1/behavior/sizeof_and_typeof.zig index 13078f5dd1..4223bf3373 100644 --- a/test/stage1/behavior/sizeof_and_typeof.zig +++ b/test/stage1/behavior/sizeof_and_typeof.zig @@ -28,6 +28,8 @@ const P = packed struct { e: u5, f: u16, g: u16, + h: u9, + i: u7, }; test "@byteOffsetOf" { @@ -39,6 +41,8 @@ test "@byteOffsetOf" { expect(@byteOffsetOf(P, "e") == 6); expect(@byteOffsetOf(P, "f") == 7); expect(@byteOffsetOf(P, "g") == 9); + expect(@byteOffsetOf(P, "h") == 11); + expect(@byteOffsetOf(P, "i") == 12); // Normal struct fields can be moved/padded var a: A = undefined; @@ -51,6 +55,50 @@ test "@byteOffsetOf" { expect(@ptrToInt(&a.g) - @ptrToInt(&a) == @byteOffsetOf(A, "g")); } +test "@byteOffsetOf packed struct, array length not power of 2 or multiple of native pointer width in bytes" { + const p3a_len = 3; + const P3 = packed struct { + a: [p3a_len]u8, + b: usize, + }; + std.testing.expectEqual(0, @byteOffsetOf(P3, "a")); + std.testing.expectEqual(p3a_len, @byteOffsetOf(P3, "b")); + + const p5a_len = 5; + const P5 = packed struct { + a: [p5a_len]u8, + b: usize, + }; + std.testing.expectEqual(0, @byteOffsetOf(P5, "a")); + std.testing.expectEqual(p5a_len, @byteOffsetOf(P5, "b")); + + const p6a_len = 6; + const P6 = packed struct { + a: [p6a_len]u8, + b: usize, + }; + std.testing.expectEqual(0, @byteOffsetOf(P6, "a")); + std.testing.expectEqual(p6a_len, @byteOffsetOf(P6, "b")); + + const p7a_len = 7; + const P7 = packed struct { + a: [p7a_len]u8, + b: usize, + }; + std.testing.expectEqual(0, @byteOffsetOf(P7, "a")); + std.testing.expectEqual(p7a_len, @byteOffsetOf(P7, "b")); + + const p9a_len = 9; + const P9 = packed struct { + a: [p9a_len]u8, + b: usize, + }; + std.testing.expectEqual(0, @byteOffsetOf(P9, "a")); + std.testing.expectEqual(p9a_len, @byteOffsetOf(P9, "b")); + + // 10, 11, 12, 13, 14, 15, 17, 18, 19, 20, 21, 22, 23, 25 etc. are further cases +} + test "@bitOffsetOf" { // Packed structs have fixed memory layout expect(@bitOffsetOf(P, "a") == 0);