diff --git a/src/codegen/llvm.zig b/src/codegen/llvm.zig index 578772d951..6d6aa2c9e4 100644 --- a/src/codegen/llvm.zig +++ b/src/codegen/llvm.zig @@ -770,37 +770,19 @@ pub const Object = struct { builder: Builder, module: *Module, - di_builder: ?if (build_options.have_llvm) *llvm.DIBuilder else noreturn, - /// One of these mappings: - /// - *Module.File => *DIFile - /// - *Module.Decl (Fn) => *DISubprogram - /// - *Module.Decl (Non-Fn) => *DIGlobalVariable - di_map: if (build_options.have_llvm) std.AutoHashMapUnmanaged(*const anyopaque, *llvm.DINode) else struct { - const K = *const anyopaque; - const V = noreturn; - const Self = @This(); + debug_compile_unit: Builder.Metadata, - metadata: ?noreturn = null, - size: Size = 0, - available: Size = 0, + debug_enums_fwd_ref: Builder.Metadata, + debug_globals_fwd_ref: Builder.Metadata, - pub const Size = u0; + debug_enums: std.ArrayListUnmanaged(Builder.Metadata), + debug_globals: std.ArrayListUnmanaged(Builder.Metadata), - pub fn deinit(self: *Self, allocator: Allocator) void { - _ = allocator; - self.* = undefined; - } + debug_type_map: std.AutoHashMapUnmanaged(Type, Builder.Metadata), + + debug_unresolved_namespace_scopes: std.AutoArrayHashMapUnmanaged(InternPool.NamespaceIndex, Builder.Metadata), - pub fn get(self: Self, key: K) ?V { - _ = self; - _ = key; - return null; - } - }, - di_compile_unit: ?if (build_options.have_llvm) *llvm.DICompileUnit else noreturn, - target_machine: if (build_options.have_llvm) *llvm.TargetMachine else void, - target_data: if (build_options.have_llvm) *llvm.TargetData else void, target: std.Target, /// Ideally we would use `llvm_module.getNamedFunction` to go from *Decl to LLVM function, /// but that has some downsides: @@ -820,7 +802,6 @@ pub const Object = struct { /// TODO when InternPool garbage collection is implemented, this map needs /// to be garbage collected as well. type_map: TypeMap, - di_type_map: DITypeMap, /// The LLVM global table which holds the names corresponding to Zig errors. /// Note that the values are not added until `emit`, when all errors in /// the compilation are known. @@ -850,146 +831,87 @@ pub const Object = struct { pub const TypeMap = std.AutoHashMapUnmanaged(InternPool.Index, Builder.Type); - /// This is an ArrayHashMap as opposed to a HashMap because in `emit` we - /// want to iterate over it while adding entries to it. - pub const DITypeMap = std.AutoArrayHashMapUnmanaged(InternPool.Index, AnnotatedDITypePtr); - pub fn create(arena: Allocator, comp: *Compilation) !*Object { if (build_options.only_c) unreachable; const gpa = comp.gpa; const target = comp.root_mod.resolved_target.result; const llvm_target_triple = try targetTriple(arena, target); const strip = comp.root_mod.strip; - const optimize_mode = comp.root_mod.optimize_mode; - const pic = comp.root_mod.pic; var builder = try Builder.init(.{ .allocator = gpa, - .use_lib_llvm = comp.config.use_lib_llvm, - .strip = strip or !comp.config.use_lib_llvm, // TODO + .use_lib_llvm = false, + .strip = strip, .name = comp.root_name, .target = target, .triple = llvm_target_triple, }); errdefer builder.deinit(); - var target_machine: if (build_options.have_llvm) *llvm.TargetMachine else void = undefined; - var target_data: if (build_options.have_llvm) *llvm.TargetData else void = undefined; - if (builder.useLibLlvm()) { - debug_info: { - switch (comp.config.debug_format) { - .strip => break :debug_info, - .code_view => builder.llvm.module.?.addModuleCodeViewFlag(), - .dwarf => |f| builder.llvm.module.?.addModuleDebugInfoFlag(f == .@"64"), - } - builder.llvm.di_builder = builder.llvm.module.?.createDIBuilder(true); + builder.data_layout = try builder.fmt("{}", .{DataLayoutBuilder{ .target = target }}); - // Don't use the version string here; LLVM misparses it when it - // includes the git revision. - const producer = try builder.fmt("zig {d}.{d}.{d}", .{ - build_options.semver.major, - build_options.semver.minor, - build_options.semver.patch, - }); - - // We fully resolve all paths at this point to avoid lack of - // source line info in stack traces or lack of debugging - // information which, if relative paths were used, would be - // very location dependent. - // TODO: the only concern I have with this is WASI as either host or target, should - // we leave the paths as relative then? - // TODO: This is totally wrong. In dwarf, paths are encoded as relative to - // a particular directory, and then the directory path is specified elsewhere. - // In the compiler frontend we have it stored correctly in this - // way already, but here we throw all that sweet information - // into the garbage can by converting into absolute paths. What - // a terrible tragedy. - const compile_unit_dir_z = blk: { - if (comp.module) |zcu| m: { - const d = try zcu.root_mod.root.joinStringZ(arena, ""); - if (d.len == 0) break :m; - if (std.fs.path.isAbsolute(d)) break :blk d; - const realpath = std.fs.realpathAlloc(arena, d) catch break :blk d; - break :blk try arena.dupeZ(u8, realpath); - } - const cwd = try std.process.getCwdAlloc(arena); - break :blk try arena.dupeZ(u8, cwd); - }; - - builder.llvm.di_compile_unit = builder.llvm.di_builder.?.createCompileUnit( - DW.LANG.C99, - builder.llvm.di_builder.?.createFile(comp.root_name, compile_unit_dir_z), - producer.slice(&builder).?, - optimize_mode != .Debug, - "", // flags - 0, // runtime version - "", // split name - 0, // dwo id - true, // emit debug info - ); + // We fully resolve all paths at this point to avoid lack of + // source line info in stack traces or lack of debugging + // information which, if relative paths were used, would be + // very location dependent. + // TODO: the only concern I have with this is WASI as either host or target, should + // we leave the paths as relative then? + // TODO: This is totally wrong. In dwarf, paths are encoded as relative to + // a particular directory, and then the directory path is specified elsewhere. + // In the compiler frontend we have it stored correctly in this + // way already, but here we throw all that sweet information + // into the garbage can by converting into absolute paths. What + // a terrible tragedy. + const compile_unit_dir = blk: { + if (comp.module) |zcu| m: { + const d = try zcu.root_mod.root.joinString(arena, ""); + if (d.len == 0) break :m; + if (std.fs.path.isAbsolute(d)) break :blk d; + break :blk std.fs.realpathAlloc(arena, d) catch break :blk d; } + break :blk try std.process.getCwdAlloc(arena); + }; - const opt_level: llvm.CodeGenOptLevel = if (optimize_mode == .Debug) - .None - else - .Aggressive; + const debug_file = try builder.debugFile( + try builder.string(compile_unit_dir), + try builder.string(comp.root_name), + ); - const reloc_mode: llvm.RelocMode = if (pic) - .PIC - else if (comp.config.link_mode == .Dynamic) - llvm.RelocMode.DynamicNoPIC - else - .Static; + const debug_enums_fwd_ref = try builder.debugForwardReference(); + const debug_globals_fwd_ref = try builder.debugForwardReference(); - const code_model: llvm.CodeModel = switch (comp.root_mod.code_model) { - .default => .Default, - .tiny => .Tiny, - .small => .Small, - .kernel => .Kernel, - .medium => .Medium, - .large => .Large, - }; + const debug_compile_unit = try builder.debugCompileUnit( + debug_file, + // Don't use the version string here; LLVM misparses it when it + // includes the git revision. + try builder.fmt("zig {d}.{d}.{d}", .{ + build_options.semver.major, + build_options.semver.minor, + build_options.semver.patch, + }), + debug_enums_fwd_ref, + debug_globals_fwd_ref, + .{ .optimized = comp.root_mod.optimize_mode != .Debug }, + ); - // TODO handle float ABI better- it should depend on the ABI portion of std.Target - const float_abi: llvm.ABIType = .Default; - - target_machine = llvm.TargetMachine.create( - builder.llvm.target.?, - builder.target_triple.slice(&builder).?, - if (target.cpu.model.llvm_name) |s| s.ptr else null, - comp.root_mod.resolved_target.llvm_cpu_features.?, - opt_level, - reloc_mode, - code_model, - comp.function_sections, - comp.data_sections, - float_abi, - if (target_util.llvmMachineAbi(target)) |s| s.ptr else null, + if (!builder.strip) { + const debug_info_version = try builder.debugModuleFlag( + try builder.debugConstant(try builder.intConst(.i32, 2)), + try builder.string("Debug Info Version"), + try builder.debugConstant(try builder.intConst(.i32, 3)), + ); + const dwarf_version = try builder.debugModuleFlag( + try builder.debugConstant(try builder.intConst(.i32, 2)), + try builder.string("Dwarf Version"), + try builder.debugConstant(try builder.intConst(.i32, 4)), ); - errdefer target_machine.dispose(); - target_data = target_machine.createTargetDataLayout(); - errdefer target_data.dispose(); + try builder.debugNamed(try builder.string("llvm.module.flags"), &.{ + debug_info_version, + dwarf_version, + }); - builder.llvm.module.?.setModuleDataLayout(target_data); - - if (pic) builder.llvm.module.?.setModulePICLevel(); - if (comp.config.pie) builder.llvm.module.?.setModulePIELevel(); - if (code_model != .Default) builder.llvm.module.?.setModuleCodeModel(code_model); - - if (comp.llvm_opt_bisect_limit >= 0) { - builder.llvm.context.setOptBisectLimit(comp.llvm_opt_bisect_limit); - } - - builder.data_layout = try builder.fmt("{}", .{DataLayoutBuilder{ .target = target }}); - if (std.debug.runtime_safety) { - const rep = target_data.stringRep(); - defer llvm.disposeMessage(rep); - std.testing.expectEqualStrings( - std.mem.span(rep), - builder.data_layout.slice(&builder).?, - ) catch unreachable; - } + try builder.debugNamed(try builder.string("llvm.dbg.cu"), &.{debug_compile_unit}); } const obj = try arena.create(Object); @@ -997,17 +919,18 @@ pub const Object = struct { .gpa = gpa, .builder = builder, .module = comp.module.?, - .di_map = .{}, - .di_builder = if (builder.useLibLlvm()) builder.llvm.di_builder else null, // TODO - .di_compile_unit = if (builder.useLibLlvm()) builder.llvm.di_compile_unit else null, - .target_machine = target_machine, - .target_data = target_data, + .debug_compile_unit = debug_compile_unit, + .debug_enums_fwd_ref = debug_enums_fwd_ref, + .debug_globals_fwd_ref = debug_globals_fwd_ref, + .debug_enums = .{}, + .debug_globals = .{}, + .debug_type_map = .{}, + .debug_unresolved_namespace_scopes = .{}, .target = target, .decl_map = .{}, .anon_decl_map = .{}, .named_enum_map = .{}, .type_map = .{}, - .di_type_map = .{}, .error_name_table = .none, .extern_collisions = .{}, .null_opt_usize = .no_init, @@ -1018,12 +941,10 @@ pub const Object = struct { pub fn deinit(self: *Object) void { const gpa = self.gpa; - self.di_map.deinit(gpa); - self.di_type_map.deinit(gpa); - if (self.builder.useLibLlvm()) { - self.target_data.dispose(); - self.target_machine.dispose(); - } + self.debug_globals.deinit(gpa); + self.debug_enums.deinit(gpa); + self.debug_type_map.deinit(gpa); + self.debug_unresolved_namespace_scopes.deinit(gpa); self.decl_map.deinit(gpa); self.anon_decl_map.deinit(gpa); self.named_enum_map.deinit(gpa); @@ -1193,26 +1114,29 @@ pub const Object = struct { try self.genCmpLtErrorsLenFunction(); try self.genModuleLevelAssembly(); - if (self.di_builder) |dib| { - // When lowering debug info for pointers, we emitted the element types as - // forward decls. Now we must go flesh those out. - // Here we iterate over a hash map while modifying it but it is OK because - // we never add or remove entries during this loop. + { var i: usize = 0; - while (i < self.di_type_map.count()) : (i += 1) { - const value_ptr = &self.di_type_map.values()[i]; - const annotated = value_ptr.*; - if (!annotated.isFwdOnly()) continue; - const entry: Object.DITypeMap.Entry = .{ - .key_ptr = &self.di_type_map.keys()[i], - .value_ptr = value_ptr, - }; - _ = try self.lowerDebugTypeImpl(entry, .full, annotated.toDIType()); - } + while (i < self.debug_unresolved_namespace_scopes.count()) : (i += 1) { + const namespace_index = self.debug_unresolved_namespace_scopes.keys()[i]; + const fwd_ref = self.debug_unresolved_namespace_scopes.values()[i]; + const namespace = self.module.namespacePtr(namespace_index); - dib.finalize(); + const debug_type = try self.lowerDebugType(namespace.ty); + + self.builder.debugForwardReferenceSetType(fwd_ref, debug_type); + } } + self.builder.debugForwardReferenceSetType( + self.debug_enums_fwd_ref, + try self.builder.debugTuple(self.debug_enums.items), + ); + + self.builder.debugForwardReferenceSetType( + self.debug_globals_fwd_ref, + try self.builder.debugTuple(self.debug_globals.items), + ); + if (options.pre_ir_path) |path| { if (std.mem.eql(u8, path, "-")) { self.builder.dump(); @@ -1238,35 +1162,126 @@ pub const Object = struct { if (options.asm_path == null and options.bin_path == null and options.post_ir_path == null and options.post_bc_path == null) return; + var bitcode_arena_allocator = std.heap.ArenaAllocator.init( + std.heap.page_allocator, + ); + errdefer bitcode_arena_allocator.deinit(); + + const bitcode = try self.builder.toBitcode( + bitcode_arena_allocator.allocator(), + ); + if (options.post_bc_path) |path| { - if (!self.builder.useLibLlvm()) { - var arena_allocator = std.heap.ArenaAllocator.init(self.gpa); - defer arena_allocator.deinit(); - const arena = arena_allocator.allocator(); + var file = try std.fs.cwd().createFileZ(path, .{}); + defer file.close(); - var file = try std.fs.cwd().createFileZ(path, .{}); - defer file.close(); - const bitcode = try self.builder.toBitcode(arena); - - const ptr: [*]const u8 = @ptrCast(bitcode.ptr); - try file.writeAll(ptr[0..(bitcode.len * 4)]); - return; - } + const ptr: [*]const u8 = @ptrCast(bitcode.ptr); + try file.writeAll(ptr[0..(bitcode.len * 4)]); } - if (!self.builder.useLibLlvm()) { + if (!self.module.comp.config.use_lib_llvm) { log.err("emitting without libllvm not implemented", .{}); return error.FailedToEmit; } + Builder.initializeLLVMTarget(self.module.comp.root_mod.resolved_target.result.cpu.arch); + + const context: *llvm.Context = llvm.Context.create(); + defer context.dispose(); + + const module = blk: { + const bitcode_memory_buffer = llvm.MemoryBuffer.createMemoryBufferWithMemoryRange( + @ptrCast(bitcode.ptr), + bitcode.len * 4, + "BitcodeBuffer", + llvm.Bool.False, + ); + defer bitcode_memory_buffer.dispose(); + + var module: *llvm.Module = undefined; + if (context.parseBitcodeInContext2(bitcode_memory_buffer, &module).toBool()) { + std.debug.print("Failed to parse bitcode\n", .{}); + return error.FailedToEmit; + } + + break :blk module; + }; + bitcode_arena_allocator.deinit(); + + var error_message: [*:0]const u8 = undefined; + var target: *llvm.Target = undefined; + if (llvm.Target.getFromTriple( + self.builder.target_triple.slice(&self.builder).?, + &target, + &error_message, + ).toBool()) { + defer llvm.disposeMessage(error_message); + + log.err("LLVM failed to parse '{s}': {s}", .{ + self.builder.target_triple.slice(&self.builder).?, + error_message, + }); + @panic("Invalid LLVM triple"); + } + + const optimize_mode = self.module.comp.root_mod.optimize_mode; + const pic = self.module.comp.root_mod.pic; + + const opt_level: llvm.CodeGenOptLevel = if (optimize_mode == .Debug) + .None + else + .Aggressive; + + const reloc_mode: llvm.RelocMode = if (pic) + .PIC + else if (self.module.comp.config.link_mode == .Dynamic) + llvm.RelocMode.DynamicNoPIC + else + .Static; + + const code_model: llvm.CodeModel = switch (self.module.comp.root_mod.code_model) { + .default => .Default, + .tiny => .Tiny, + .small => .Small, + .kernel => .Kernel, + .medium => .Medium, + .large => .Large, + }; + + // TODO handle float ABI better- it should depend on the ABI portion of std.Target + const float_abi: llvm.ABIType = .Default; + + var target_machine = llvm.TargetMachine.create( + target, + self.builder.target_triple.slice(&self.builder).?, + if (self.module.comp.root_mod.resolved_target.result.cpu.model.llvm_name) |s| s.ptr else null, + self.module.comp.root_mod.resolved_target.llvm_cpu_features.?, + opt_level, + reloc_mode, + code_model, + self.module.comp.function_sections, + self.module.comp.data_sections, + float_abi, + if (target_util.llvmMachineAbi(self.module.comp.root_mod.resolved_target.result)) |s| s.ptr else null, + ); + errdefer target_machine.dispose(); + + if (pic) module.setModulePICLevel(); + if (self.module.comp.config.pie) module.setModulePIELevel(); + if (code_model != .Default) module.setModuleCodeModel(code_model); + + if (self.module.comp.llvm_opt_bisect_limit >= 0) { + context.setOptBisectLimit(self.module.comp.llvm_opt_bisect_limit); + } + // Unfortunately, LLVM shits the bed when we ask for both binary and assembly. // So we call the entire pipeline multiple times if this is requested. - var error_message: [*:0]const u8 = undefined; + // var error_message: [*:0]const u8 = undefined; var emit_bin_path = options.bin_path; var post_ir_path = options.post_ir_path; if (options.asm_path != null and options.bin_path != null) { - if (self.target_machine.emitToFile( - self.builder.llvm.module.?, + if (target_machine.emitToFile( + module, &error_message, options.is_debug, options.is_small, @@ -1289,8 +1304,8 @@ pub const Object = struct { post_ir_path = null; } - if (self.target_machine.emitToFile( - self.builder.llvm.module.?, + if (target_machine.emitToFile( + module, &error_message, options.is_debug, options.is_small, @@ -1300,7 +1315,7 @@ pub const Object = struct { options.asm_path, emit_bin_path, post_ir_path, - options.post_bc_path, + null, )) { defer llvm.disposeMessage(error_message); @@ -1440,7 +1455,7 @@ pub const Object = struct { if (isByRef(param_ty, zcu)) { const alignment = param_ty.abiAlignment(zcu).toLlvm(); const param_llvm_ty = param.typeOfWip(&wip); - const arg_ptr = try buildAllocaInner(&wip, false, param_llvm_ty, alignment, target); + const arg_ptr = try buildAllocaInner(&wip, param_llvm_ty, alignment, target); _ = try wip.store(.normal, param, arg_ptr, alignment); args.appendAssumeCapacity(arg_ptr); } else { @@ -1488,7 +1503,7 @@ pub const Object = struct { const param_llvm_ty = try o.lowerType(param_ty); const alignment = param_ty.abiAlignment(zcu).toLlvm(); - const arg_ptr = try buildAllocaInner(&wip, false, param_llvm_ty, alignment, target); + const arg_ptr = try buildAllocaInner(&wip, param_llvm_ty, alignment, target); _ = try wip.store(.normal, param, arg_ptr, alignment); args.appendAssumeCapacity(if (isByRef(param_ty, zcu)) @@ -1533,7 +1548,7 @@ pub const Object = struct { const param_ty = Type.fromInterned(fn_info.param_types.get(ip)[it.zig_index - 1]); const param_llvm_ty = try o.lowerType(param_ty); const param_alignment = param_ty.abiAlignment(zcu).toLlvm(); - const arg_ptr = try buildAllocaInner(&wip, false, param_llvm_ty, param_alignment, target); + const arg_ptr = try buildAllocaInner(&wip, param_llvm_ty, param_alignment, target); const llvm_ty = try o.builder.structType(.normal, field_types); for (0..field_types.len) |field_i| { const param = wip.arg(llvm_arg_i); @@ -1563,7 +1578,7 @@ pub const Object = struct { llvm_arg_i += 1; const alignment = param_ty.abiAlignment(zcu).toLlvm(); - const arg_ptr = try buildAllocaInner(&wip, false, param_llvm_ty, alignment, target); + const arg_ptr = try buildAllocaInner(&wip, param_llvm_ty, alignment, target); _ = try wip.store(.normal, param, arg_ptr, alignment); args.appendAssumeCapacity(if (isByRef(param_ty, zcu)) @@ -1578,7 +1593,7 @@ pub const Object = struct { llvm_arg_i += 1; const alignment = param_ty.abiAlignment(zcu).toLlvm(); - const arg_ptr = try buildAllocaInner(&wip, false, param_llvm_ty, alignment, target); + const arg_ptr = try buildAllocaInner(&wip, param_llvm_ty, alignment, target); _ = try wip.store(.normal, param, arg_ptr, alignment); args.appendAssumeCapacity(if (isByRef(param_ty, zcu)) @@ -1592,40 +1607,34 @@ pub const Object = struct { function_index.setAttributes(try attributes.finish(&o.builder), &o.builder); - var di_file: ?if (build_options.have_llvm) *llvm.DIFile else noreturn = null; - var di_scope: ?if (build_options.have_llvm) *llvm.DIScope else noreturn = null; - - if (o.di_builder) |dib| { - di_file = try o.getDIFile(gpa, namespace.file_scope); + const file = try o.getDebugFile(namespace.file_scope); + const subprogram = blk: { const line_number = decl.src_line + 1; const is_internal_linkage = decl.val.getExternFunc(zcu) == null and !zcu.decl_exports.contains(decl_index); - const noret_bit: c_uint = if (fn_info.return_type == .noreturn_type) + const noret_bit: u29 = if (fn_info.return_type == .noreturn_type) llvm.DIFlags.NoReturn else 0; - const decl_di_ty = try o.lowerDebugType(decl.ty, .full); - const subprogram = dib.createFunction( - di_file.?.toScope(), - ip.stringToSlice(decl.name), - function_index.name(&o.builder).slice(&o.builder).?, - di_file.?, + const debug_decl_type = try o.lowerDebugType(decl.ty); + + break :blk try o.builder.debugSubprogram( + file, + try o.builder.string(ip.stringToSlice(decl.name)), + function_index.name(&o.builder), line_number, - decl_di_ty, - is_internal_linkage, - true, // is definition - line_number + func.lbrace_line, // scope line - llvm.DIFlags.StaticMember | noret_bit, - owner_mod.optimize_mode != .Debug, - null, // decl_subprogram + line_number + func.lbrace_line, + debug_decl_type, + .{ + .optimized = owner_mod.optimize_mode != .Debug, + .definition = true, + .local = is_internal_linkage, + .debug_info_flags = llvm.DIFlags.StaticMember | noret_bit, + }, + o.debug_compile_unit, ); - try o.di_map.put(gpa, decl, subprogram.toNode()); - - function_index.toLlvm(&o.builder).fnSetSubprogram(subprogram); - - di_scope = subprogram.toScope(); - } + }; var fg: FuncGen = .{ .gpa = gpa, @@ -1639,8 +1648,9 @@ pub const Object = struct { .func_inst_table = .{}, .blocks = .{}, .sync_scope = if (owner_mod.single_threaded) .singlethread else .system, - .di_scope = di_scope, - .di_file = di_file, + .file = file, + .subprogram = subprogram, + .current_scope = subprogram, .base_line = dg.decl.src_line, .prev_dbg_line = 0, .prev_dbg_column = 0, @@ -1726,26 +1736,7 @@ pub const Object = struct { global_index.setUnnamedAddr(.default, &self.builder); if (comp.config.dll_export_fns) global_index.setDllStorageClass(.default, &self.builder); - if (self.di_map.get(decl)) |di_node| { - const decl_name_slice = decl_name.slice(&self.builder).?; - if (try decl.isFunction(mod)) { - const di_func: *llvm.DISubprogram = @ptrCast(di_node); - const linkage_name = llvm.MDString.get( - self.builder.llvm.context, - decl_name_slice.ptr, - decl_name_slice.len, - ); - di_func.replaceLinkageName(linkage_name); - } else { - const di_global: *llvm.DIGlobalVariable = @ptrCast(di_node); - const linkage_name = llvm.MDString.get( - self.builder.llvm.context, - decl_name_slice.ptr, - decl_name_slice.len, - ); - di_global.replaceLinkageName(linkage_name); - } - } + if (decl.val.getVariable(mod)) |decl_var| { global_index.ptrConst(&self.builder).kind.variable.setThreadLocal( if (decl_var.is_threadlocal) .generaldynamic else .default, @@ -1759,27 +1750,6 @@ pub const Object = struct { ); try global_index.rename(main_exp_name, &self.builder); - if (self.di_map.get(decl)) |di_node| { - const main_exp_name_slice = main_exp_name.slice(&self.builder).?; - if (try decl.isFunction(mod)) { - const di_func: *llvm.DISubprogram = @ptrCast(di_node); - const linkage_name = llvm.MDString.get( - self.builder.llvm.context, - main_exp_name_slice.ptr, - main_exp_name_slice.len, - ); - di_func.replaceLinkageName(linkage_name); - } else { - const di_global: *llvm.DIGlobalVariable = @ptrCast(di_node); - const linkage_name = llvm.MDString.get( - self.builder.llvm.context, - main_exp_name_slice.ptr, - main_exp_name_slice.len, - ); - di_global.replaceLinkageName(linkage_name); - } - } - if (decl.val.getVariable(mod)) |decl_var| if (decl_var.is_threadlocal) global_index.ptrConst(&self.builder).kind .variable.setThreadLocal(.generaldynamic, &self.builder); @@ -1909,119 +1879,64 @@ pub const Object = struct { global.delete(&self.builder); } - fn getDIFile(o: *Object, gpa: Allocator, file: *const Module.File) !*llvm.DIFile { - const gop = try o.di_map.getOrPut(gpa, file); - errdefer assert(o.di_map.remove(file)); - if (gop.found_existing) { - return @ptrCast(gop.value_ptr.*); - } - const dir_path_z = d: { - var buffer: [std.fs.MAX_PATH_BYTES]u8 = undefined; - const sub_path = std.fs.path.dirname(file.sub_file_path) orelse ""; - const dir_path = try file.mod.root.joinStringZ(gpa, sub_path); - if (std.fs.path.isAbsolute(dir_path)) break :d dir_path; - const abs = std.fs.realpath(dir_path, &buffer) catch break :d dir_path; - gpa.free(dir_path); - break :d try gpa.dupeZ(u8, abs); - }; - defer gpa.free(dir_path_z); - const sub_file_path_z = try gpa.dupeZ(u8, std.fs.path.basename(file.sub_file_path)); - defer gpa.free(sub_file_path_z); - const di_file = o.di_builder.?.createFile(sub_file_path_z, dir_path_z); - gop.value_ptr.* = di_file.toNode(); - return di_file; + fn getDebugFile(o: *Object, file: *const Module.File) Allocator.Error!Builder.Metadata { + return try o.builder.debugFile( + if (std.fs.path.dirname(file.sub_file_path)) |dirname| try o.builder.string(dirname) else .empty, + try o.builder.string(std.fs.path.basename(file.sub_file_path)), + ); } - const DebugResolveStatus = enum { fwd, full }; - - /// In the implementation of this function, it is required to store a forward decl - /// into `gop` before making any recursive calls (even directly). - fn lowerDebugType( + pub fn lowerDebugType( o: *Object, ty: Type, - resolve: DebugResolveStatus, - ) Allocator.Error!*llvm.DIType { - const gpa = o.gpa; - // Be careful not to reference this `gop` variable after any recursive calls - // to `lowerDebugType`. - const gop = try o.di_type_map.getOrPut(gpa, ty.toIntern()); - if (gop.found_existing) { - const annotated = gop.value_ptr.*; - switch (annotated) { - // This type is currently attempting to be resolved fully, so make - // sure a second recursion through the types uses forward resolution. - .null => assert(resolve == .fwd), - // This type already has at least forward resolution, only resolve - // fully during full resolution. - _ => { - const di_type = annotated.toDIType(); - if (!annotated.isFwdOnly() or resolve == .fwd) { - return di_type; - } - const entry: Object.DITypeMap.Entry = .{ - .key_ptr = gop.key_ptr, - .value_ptr = gop.value_ptr, - }; - return o.lowerDebugTypeImpl(entry, resolve, di_type); - }, - } - } else gop.value_ptr.* = .null; - errdefer if (!gop.found_existing) assert(o.di_type_map.orderedRemove(ty.toIntern())); - const entry: Object.DITypeMap.Entry = .{ - .key_ptr = gop.key_ptr, - .value_ptr = gop.value_ptr, - }; - return o.lowerDebugTypeImpl(entry, resolve, null); - } - - /// This is a helper function used by `lowerDebugType`. - fn lowerDebugTypeImpl( - o: *Object, - gop: Object.DITypeMap.Entry, - resolve: DebugResolveStatus, - opt_fwd_decl: ?*llvm.DIType, - ) Allocator.Error!*llvm.DIType { - const ty = Type.fromInterned(gop.key_ptr.*); + ) Allocator.Error!Builder.Metadata { + if (o.builder.strip) return Builder.Metadata.none; const gpa = o.gpa; const target = o.target; - const dib = o.di_builder.?; const mod = o.module; const ip = &mod.intern_pool; + + if (o.debug_type_map.get(ty)) |debug_type| return debug_type; + switch (ty.zigTypeTag(mod)) { - .Void, .NoReturn => { - const di_type = dib.createBasicType("void", 0, DW.ATE.signed); - gop.value_ptr.* = AnnotatedDITypePtr.initFull(di_type); - return di_type; + .Void, + .NoReturn, + => { + const debug_void_type = try o.builder.debugSignedType( + try o.builder.string("void"), + 0, + ); + try o.debug_type_map.put(gpa, ty, debug_void_type); + return debug_void_type; }, .Int => { const info = ty.intInfo(mod); assert(info.bits != 0); const name = try o.allocTypeName(ty); defer gpa.free(name); - const dwarf_encoding: c_uint = switch (info.signedness) { - .signed => DW.ATE.signed, - .unsigned => DW.ATE.unsigned, + const builder_name = try o.builder.string(name); + const debug_bits = ty.abiSize(mod) * 8; // lldb cannot handle non-byte sized types + const debug_int_type = switch (info.signedness) { + .signed => try o.builder.debugSignedType(builder_name, debug_bits), + .unsigned => try o.builder.debugUnsignedType(builder_name, debug_bits), }; - const di_bits = ty.abiSize(mod) * 8; // lldb cannot handle non-byte sized types - const di_type = dib.createBasicType(name, di_bits, dwarf_encoding); - gop.value_ptr.* = AnnotatedDITypePtr.initFull(di_type); - return di_type; + try o.debug_type_map.put(gpa, ty, debug_int_type); + return debug_int_type; }, .Enum => { const owner_decl_index = ty.getOwnerDecl(mod); const owner_decl = o.module.declPtr(owner_decl_index); if (!ty.hasRuntimeBitsIgnoreComptime(mod)) { - const enum_di_ty = try o.makeEmptyNamespaceDIType(owner_decl_index); - // The recursive call to `lowerDebugType` via `makeEmptyNamespaceDIType` - // means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(enum_di_ty)); - return enum_di_ty; + const debug_enum_type = try o.makeEmptyNamespaceDebugType(owner_decl_index); + try o.debug_type_map.put(gpa, ty, debug_enum_type); + try o.debug_enums.append(gpa, debug_enum_type); + return debug_enum_type; } const enum_type = ip.indexToKey(ty.toIntern()).enum_type; - const enumerators = try gpa.alloc(*llvm.DIEnumerator, enum_type.names.len); + const enumerators = try gpa.alloc(Builder.Metadata, enum_type.names.len); defer gpa.free(enumerators); const int_ty = Type.fromInterned(enum_type.tag_ty); @@ -2029,66 +1944,59 @@ pub const Object = struct { assert(int_info.bits != 0); for (enum_type.names.get(ip), 0..) |field_name_ip, i| { - const field_name_z = ip.stringToSlice(field_name_ip); - var bigint_space: Value.BigIntSpace = undefined; const bigint = if (enum_type.values.len != 0) Value.fromInterned(enum_type.values.get(ip)[i]).toBigInt(&bigint_space, mod) else std.math.big.int.Mutable.init(&bigint_space.limbs, i).toConst(); - if (bigint.limbs.len == 1) { - enumerators[i] = dib.createEnumerator(field_name_z, bigint.limbs[0], int_info.signedness == .unsigned); - continue; - } - if (@sizeOf(usize) == @sizeOf(u64)) { - enumerators[i] = dib.createEnumerator2( - field_name_z, - @intCast(bigint.limbs.len), - bigint.limbs.ptr, - int_info.bits, - int_info.signedness == .unsigned, - ); - continue; - } - @panic("TODO implement bigint debug enumerators to llvm int for 32-bit compiler builds"); + enumerators[i] = try o.builder.debugEnumerator( + try o.builder.string(ip.stringToSlice(field_name_ip)), + int_ty.isUnsignedInt(mod), + int_info.bits, + bigint, + ); } - const di_file = try o.getDIFile(gpa, mod.namespacePtr(owner_decl.src_namespace).file_scope); - const di_scope = try o.namespaceToDebugScope(owner_decl.src_namespace); + const file = try o.getDebugFile(mod.namespacePtr(owner_decl.src_namespace).file_scope); + const scope = try o.namespaceToDebugScope(owner_decl.src_namespace); const name = try o.allocTypeName(ty); defer gpa.free(name); - const enum_di_ty = dib.createEnumerationType( - di_scope, - name, - di_file, - owner_decl.src_node + 1, + const debug_enum_type = try o.builder.debugEnumerationType( + try o.builder.string(name), + file, + scope, + owner_decl.src_node + 1, // Line + try o.lowerDebugType(int_ty), ty.abiSize(mod) * 8, ty.abiAlignment(mod).toByteUnits(0) * 8, - enumerators.ptr, - @intCast(enumerators.len), - try o.lowerDebugType(int_ty, resolve), - "", + try o.builder.debugTuple(enumerators), ); - // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(enum_di_ty)); - return enum_di_ty; + + try o.debug_type_map.put(gpa, ty, debug_enum_type); + try o.debug_enums.append(gpa, debug_enum_type); + return debug_enum_type; }, .Float => { const bits = ty.floatBits(target); const name = try o.allocTypeName(ty); defer gpa.free(name); - const di_type = dib.createBasicType(name, bits, DW.ATE.float); - gop.value_ptr.* = AnnotatedDITypePtr.initFull(di_type); - return di_type; + const debug_float_type = try o.builder.debugFloatType( + try o.builder.string(name), + bits, + ); + try o.debug_type_map.put(gpa, ty, debug_float_type); + return debug_float_type; }, .Bool => { - const di_bits = 8; // lldb cannot handle non-byte sized types - const di_type = dib.createBasicType("bool", di_bits, DW.ATE.boolean); - gop.value_ptr.* = AnnotatedDITypePtr.initFull(di_type); - return di_type; + const debug_bool_type = try o.builder.debugBoolType( + try o.builder.string("bool"), + 8, // lldb cannot handle non-byte sized types + ); + try o.debug_type_map.put(gpa, ty, debug_bool_type); + return debug_bool_type; }, .Pointer => { // Normalize everything that the debug info does not represent. @@ -2118,136 +2026,145 @@ pub const Object = struct { }, }, }); - const ptr_di_ty = try o.lowerDebugType(bland_ptr_ty, resolve); - // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.init(ptr_di_ty, resolve)); - return ptr_di_ty; + const debug_ptr_type = try o.lowerDebugType(bland_ptr_ty); + try o.debug_type_map.put(gpa, ty, debug_ptr_type); + return debug_ptr_type; } + const debug_fwd_ref = try o.builder.debugForwardReference(); + + // Set as forward reference while the type is lowered in case it references itself + try o.debug_type_map.put(gpa, ty, debug_fwd_ref); + if (ty.isSlice(mod)) { const ptr_ty = ty.slicePtrFieldType(mod); const len_ty = Type.usize; const name = try o.allocTypeName(ty); defer gpa.free(name); - const di_file: ?*llvm.DIFile = null; const line = 0; - const compile_unit_scope = o.di_compile_unit.?.toScope(); - - const fwd_decl = opt_fwd_decl orelse blk: { - const fwd_decl = dib.createReplaceableCompositeType( - DW.TAG.structure_type, - name.ptr, - compile_unit_scope, - di_file, - line, - ); - gop.value_ptr.* = AnnotatedDITypePtr.initFwd(fwd_decl); - if (resolve == .fwd) return fwd_decl; - break :blk fwd_decl; - }; const ptr_size = ptr_ty.abiSize(mod); const ptr_align = ptr_ty.abiAlignment(mod); const len_size = len_ty.abiSize(mod); const len_align = len_ty.abiAlignment(mod); - var offset: u64 = 0; - offset += ptr_size; - offset = len_align.forward(offset); - const len_offset = offset; + const len_offset = len_align.forward(ptr_size); - const fields: [2]*llvm.DIType = .{ - dib.createMemberType( - fwd_decl.toScope(), - "ptr", - di_file, - line, - ptr_size * 8, // size in bits - ptr_align.toByteUnits(0) * 8, // align in bits - 0, // offset in bits - 0, // flags - try o.lowerDebugType(ptr_ty, resolve), - ), - dib.createMemberType( - fwd_decl.toScope(), - "len", - di_file, - line, - len_size * 8, // size in bits - len_align.toByteUnits(0) * 8, // align in bits - len_offset * 8, // offset in bits - 0, // flags - try o.lowerDebugType(len_ty, resolve), - ), - }; - - const full_di_ty = dib.createStructType( - compile_unit_scope, - name.ptr, - di_file, - line, - ty.abiSize(mod) * 8, // size in bits - ty.abiAlignment(mod).toByteUnits(0) * 8, // align in bits - 0, // flags - null, // derived from - &fields, - fields.len, - 0, // run time lang - null, // vtable holder - "", // unique id + const debug_ptr_type = try o.builder.debugMemberType( + try o.builder.string("ptr"), + Builder.Metadata.none, // File + debug_fwd_ref, + 0, // Line + try o.lowerDebugType(ptr_ty), + ptr_size * 8, + ptr_align.toByteUnits(0) * 8, + 0, // Offset ); - dib.replaceTemporary(fwd_decl, full_di_ty); - // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(full_di_ty)); - return full_di_ty; + + const debug_len_type = try o.builder.debugMemberType( + try o.builder.string("len"), + Builder.Metadata.none, // File + debug_fwd_ref, + 0, // Line + try o.lowerDebugType(len_ty), + len_size * 8, + len_align.toByteUnits(0) * 8, + len_offset * 8, + ); + + const debug_slice_type = try o.builder.debugStructType( + try o.builder.string(name), + Builder.Metadata.none, // File + o.debug_compile_unit, // Scope + line, + Builder.Metadata.none, // Underlying type + ty.abiSize(mod) * 8, + ty.abiAlignment(mod).toByteUnits(0) * 8, + try o.builder.debugTuple(&.{ + debug_ptr_type, + debug_len_type, + }), + ); + + o.builder.debugForwardReferenceSetType(debug_fwd_ref, debug_slice_type); + + // Set to real type now that it has been lowered fully + const map_ptr = o.debug_type_map.getPtr(ty) orelse unreachable; + map_ptr.* = debug_slice_type; + + return debug_slice_type; } - const elem_di_ty = try o.lowerDebugType(Type.fromInterned(ptr_info.child), .fwd); + const debug_elem_ty = try o.lowerDebugType(Type.fromInterned(ptr_info.child)); + const name = try o.allocTypeName(ty); defer gpa.free(name); - const ptr_di_ty = dib.createPointerType( - elem_di_ty, + + const debug_ptr_type = try o.builder.debugPointerType( + try o.builder.string(name), + Builder.Metadata.none, // File + Builder.Metadata.none, // Scope + 0, // Line + debug_elem_ty, target.ptrBitWidth(), ty.ptrAlignment(mod).toByteUnits(0) * 8, - name, + 0, // Offset ); - // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(ptr_di_ty)); - return ptr_di_ty; + + o.builder.debugForwardReferenceSetType(debug_fwd_ref, debug_ptr_type); + + // Set to real type now that it has been lowered fully + const map_ptr = o.debug_type_map.getPtr(ty) orelse unreachable; + map_ptr.* = debug_ptr_type; + + return debug_ptr_type; }, .Opaque => { if (ty.toIntern() == .anyopaque_type) { - const di_ty = dib.createBasicType("anyopaque", 0, DW.ATE.signed); - gop.value_ptr.* = AnnotatedDITypePtr.initFull(di_ty); - return di_ty; + const debug_opaque_type = try o.builder.debugSignedType( + try o.builder.string("anyopaque"), + 0, + ); + try o.debug_type_map.put(gpa, ty, debug_opaque_type); + return debug_opaque_type; } + const name = try o.allocTypeName(ty); defer gpa.free(name); const owner_decl_index = ty.getOwnerDecl(mod); const owner_decl = o.module.declPtr(owner_decl_index); - const opaque_di_ty = dib.createForwardDeclType( - DW.TAG.structure_type, - name, + const debug_opaque_type = try o.builder.debugStructType( + try o.builder.string(name), + try o.getDebugFile(mod.namespacePtr(owner_decl.src_namespace).file_scope), try o.namespaceToDebugScope(owner_decl.src_namespace), - try o.getDIFile(gpa, mod.namespacePtr(owner_decl.src_namespace).file_scope), - owner_decl.src_node + 1, + owner_decl.src_node + 1, // Line + Builder.Metadata.none, // Underlying type + 0, // Size + 0, // Align + Builder.Metadata.none, // Fields ); - // The recursive call to `lowerDebugType` va `namespaceToDebugScope` - // means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(opaque_di_ty)); - return opaque_di_ty; + try o.debug_type_map.put(gpa, ty, debug_opaque_type); + return debug_opaque_type; }, .Array => { - const array_di_ty = dib.createArrayType( + const debug_array_type = try o.builder.debugArrayType( + Builder.String.empty, // Name + Builder.Metadata.none, // File + Builder.Metadata.none, // Scope + 0, // Line + try o.lowerDebugType(ty.childType(mod)), ty.abiSize(mod) * 8, ty.abiAlignment(mod).toByteUnits(0) * 8, - try o.lowerDebugType(ty.childType(mod), resolve), - @intCast(ty.arrayLen(mod)), + try o.builder.debugTuple(&.{ + try o.builder.debugSubrange( + try o.builder.debugConstant(try o.builder.intConst(.i64, 0)), + try o.builder.debugConstant(try o.builder.intConst(.i64, ty.arrayLen(mod))), + ), + }), ); - // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(array_di_ty)); - return array_di_ty; + try o.debug_type_map.put(gpa, ty, debug_array_type); + return debug_array_type; }, .Vector => { const elem_ty = ty.elemType2(mod); @@ -2255,146 +2172,136 @@ pub const Object = struct { // @bitSizOf(elem) * len > @bitSizOf(vec). // Neither gdb nor lldb seem to be able to display non-byte sized // vectors properly. - const elem_di_type = switch (elem_ty.zigTypeTag(mod)) { + const debug_elem_type = switch (elem_ty.zigTypeTag(mod)) { .Int => blk: { const info = elem_ty.intInfo(mod); assert(info.bits != 0); const name = try o.allocTypeName(ty); defer gpa.free(name); - const dwarf_encoding: c_uint = switch (info.signedness) { - .signed => DW.ATE.signed, - .unsigned => DW.ATE.unsigned, + const builder_name = try o.builder.string(name); + break :blk switch (info.signedness) { + .signed => try o.builder.debugSignedType(builder_name, info.bits), + .unsigned => try o.builder.debugUnsignedType(builder_name, info.bits), }; - break :blk dib.createBasicType(name, info.bits, dwarf_encoding); }, - .Bool => dib.createBasicType("bool", 1, DW.ATE.boolean), - else => try o.lowerDebugType(ty.childType(mod), resolve), + .Bool => try o.builder.debugBoolType( + try o.builder.string("bool"), + 1, + ), + else => try o.lowerDebugType(ty.childType(mod)), }; - const vector_di_ty = dib.createVectorType( + const debug_vector_type = try o.builder.debugArrayType( + Builder.String.empty, // Name + Builder.Metadata.none, // File + Builder.Metadata.none, // Scope + 0, // Line + debug_elem_type, ty.abiSize(mod) * 8, - @intCast(ty.abiAlignment(mod).toByteUnits(0) * 8), - elem_di_type, - ty.vectorLen(mod), + ty.abiAlignment(mod).toByteUnits(0) * 8, + try o.builder.debugTuple(&.{ + try o.builder.debugSubrange( + try o.builder.debugConstant(try o.builder.intConst(.i64, 0)), + try o.builder.debugConstant(try o.builder.intConst(.i64, ty.vectorLen(mod))), + ), + }), ); - // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(vector_di_ty)); - return vector_di_ty; + + try o.debug_type_map.put(gpa, ty, debug_vector_type); + return debug_vector_type; }, .Optional => { const name = try o.allocTypeName(ty); defer gpa.free(name); const child_ty = ty.optionalChild(mod); if (!child_ty.hasRuntimeBitsIgnoreComptime(mod)) { - const di_bits = 8; // lldb cannot handle non-byte sized types - const di_ty = dib.createBasicType(name, di_bits, DW.ATE.boolean); - gop.value_ptr.* = AnnotatedDITypePtr.initFull(di_ty); - return di_ty; - } - if (ty.optionalReprIsPayload(mod)) { - const ptr_di_ty = try o.lowerDebugType(child_ty, resolve); - // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.init(ptr_di_ty, resolve)); - return ptr_di_ty; + const debug_bool_type = try o.builder.debugBoolType( + try o.builder.string(name), + 8, + ); + try o.debug_type_map.put(gpa, ty, debug_bool_type); + return debug_bool_type; } - const di_file: ?*llvm.DIFile = null; - const line = 0; - const compile_unit_scope = o.di_compile_unit.?.toScope(); - const fwd_decl = opt_fwd_decl orelse blk: { - const fwd_decl = dib.createReplaceableCompositeType( - DW.TAG.structure_type, - name.ptr, - compile_unit_scope, - di_file, - line, - ); - gop.value_ptr.* = AnnotatedDITypePtr.initFwd(fwd_decl); - if (resolve == .fwd) return fwd_decl; - break :blk fwd_decl; - }; + const debug_fwd_ref = try o.builder.debugForwardReference(); + + // Set as forward reference while the type is lowered in case it references itself + try o.debug_type_map.put(gpa, ty, debug_fwd_ref); + + if (ty.optionalReprIsPayload(mod)) { + const debug_optional_type = try o.lowerDebugType(child_ty); + + o.builder.debugForwardReferenceSetType(debug_fwd_ref, debug_optional_type); + + // Set to real type now that it has been lowered fully + const map_ptr = o.debug_type_map.getPtr(ty) orelse unreachable; + map_ptr.* = debug_optional_type; + + return debug_optional_type; + } const non_null_ty = Type.u8; const payload_size = child_ty.abiSize(mod); const payload_align = child_ty.abiAlignment(mod); const non_null_size = non_null_ty.abiSize(mod); const non_null_align = non_null_ty.abiAlignment(mod); + const non_null_offset = non_null_align.forward(payload_size); - var offset: u64 = 0; - offset += payload_size; - offset = non_null_align.forward(offset); - const non_null_offset = offset; - - const fields: [2]*llvm.DIType = .{ - dib.createMemberType( - fwd_decl.toScope(), - "data", - di_file, - line, - payload_size * 8, // size in bits - payload_align.toByteUnits(0) * 8, // align in bits - 0, // offset in bits - 0, // flags - try o.lowerDebugType(child_ty, resolve), - ), - dib.createMemberType( - fwd_decl.toScope(), - "some", - di_file, - line, - non_null_size * 8, // size in bits - non_null_align.toByteUnits(0) * 8, // align in bits - non_null_offset * 8, // offset in bits - 0, // flags - try o.lowerDebugType(non_null_ty, resolve), - ), - }; - - const full_di_ty = dib.createStructType( - compile_unit_scope, - name.ptr, - di_file, - line, - ty.abiSize(mod) * 8, // size in bits - ty.abiAlignment(mod).toByteUnits(0) * 8, // align in bits - 0, // flags - null, // derived from - &fields, - fields.len, - 0, // run time lang - null, // vtable holder - "", // unique id + const debug_data_type = try o.builder.debugMemberType( + try o.builder.string("data"), + Builder.Metadata.none, // File + debug_fwd_ref, + 0, // Line + try o.lowerDebugType(child_ty), + payload_size * 8, + payload_align.toByteUnits(0) * 8, + 0, // Offset ); - dib.replaceTemporary(fwd_decl, full_di_ty); - // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(full_di_ty)); - return full_di_ty; + + const debug_some_type = try o.builder.debugMemberType( + try o.builder.string("some"), + Builder.Metadata.none, + debug_fwd_ref, + 0, + try o.lowerDebugType(non_null_ty), + non_null_size * 8, + non_null_align.toByteUnits(0) * 8, + non_null_offset * 8, + ); + + const debug_optional_type = try o.builder.debugStructType( + try o.builder.string(name), + Builder.Metadata.none, // File + o.debug_compile_unit, // Scope + 0, // Line + Builder.Metadata.none, // Underlying type + ty.abiSize(mod) * 8, + ty.abiAlignment(mod).toByteUnits(0) * 8, + try o.builder.debugTuple(&.{ + debug_data_type, + debug_some_type, + }), + ); + + o.builder.debugForwardReferenceSetType(debug_fwd_ref, debug_optional_type); + + // Set to real type now that it has been lowered fully + const map_ptr = o.debug_type_map.getPtr(ty) orelse unreachable; + map_ptr.* = debug_optional_type; + + return debug_optional_type; }, .ErrorUnion => { const payload_ty = ty.errorUnionPayload(mod); if (!payload_ty.hasRuntimeBitsIgnoreComptime(mod)) { - const err_set_di_ty = try o.lowerDebugType(Type.anyerror, resolve); - // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(err_set_di_ty)); - return err_set_di_ty; + // TODO: Maybe remove? + const debug_error_union_type = try o.lowerDebugType(Type.anyerror); + try o.debug_type_map.put(gpa, ty, debug_error_union_type); + return debug_error_union_type; } + const name = try o.allocTypeName(ty); defer gpa.free(name); - const di_file: ?*llvm.DIFile = null; - const line = 0; - const compile_unit_scope = o.di_compile_unit.?.toScope(); - const fwd_decl = opt_fwd_decl orelse blk: { - const fwd_decl = dib.createReplaceableCompositeType( - DW.TAG.structure_type, - name.ptr, - compile_unit_scope, - di_file, - line, - ); - gop.value_ptr.* = AnnotatedDITypePtr.initFwd(fwd_decl); - if (resolve == .fwd) return fwd_decl; - break :blk fwd_decl; - }; const error_size = Type.anyerror.abiSize(mod); const error_align = Type.anyerror.abiAlignment(mod); @@ -2417,59 +2324,55 @@ pub const Object = struct { error_offset = error_align.forward(payload_size); } - var fields: [2]*llvm.DIType = undefined; - fields[error_index] = dib.createMemberType( - fwd_decl.toScope(), - "tag", - di_file, - line, - error_size * 8, // size in bits - error_align.toByteUnits(0) * 8, // align in bits - error_offset * 8, // offset in bits - 0, // flags - try o.lowerDebugType(Type.anyerror, resolve), + const debug_fwd_ref = try o.builder.debugForwardReference(); + + var fields: [2]Builder.Metadata = undefined; + fields[error_index] = try o.builder.debugMemberType( + try o.builder.string("tag"), + Builder.Metadata.none, // File + debug_fwd_ref, + 0, // Line + try o.lowerDebugType(Type.anyerror), + error_size * 8, + error_align.toByteUnits(0) * 8, + error_offset * 8, ); - fields[payload_index] = dib.createMemberType( - fwd_decl.toScope(), - "value", - di_file, - line, - payload_size * 8, // size in bits - payload_align.toByteUnits(0) * 8, // align in bits - payload_offset * 8, // offset in bits - 0, // flags - try o.lowerDebugType(payload_ty, resolve), + fields[payload_index] = try o.builder.debugMemberType( + try o.builder.string("value"), + Builder.Metadata.none, // File + debug_fwd_ref, + 0, // Line + try o.lowerDebugType(payload_ty), + payload_size * 8, + payload_align.toByteUnits(0) * 8, + payload_offset * 8, ); - const full_di_ty = dib.createStructType( - compile_unit_scope, - name.ptr, - di_file, - line, - ty.abiSize(mod) * 8, // size in bits - ty.abiAlignment(mod).toByteUnits(0) * 8, // align in bits - 0, // flags - null, // derived from - &fields, - fields.len, - 0, // run time lang - null, // vtable holder - "", // unique id + const debug_error_union_type = try o.builder.debugStructType( + try o.builder.string(name), + Builder.Metadata.none, // File + o.debug_compile_unit, // Sope + 0, // Line + Builder.Metadata.none, // Underlying type + ty.abiSize(mod) * 8, + ty.abiAlignment(mod).toByteUnits(0) * 8, + try o.builder.debugTuple(&fields), ); - dib.replaceTemporary(fwd_decl, full_di_ty); - // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(full_di_ty)); - return full_di_ty; + + o.builder.debugForwardReferenceSetType(debug_fwd_ref, debug_error_union_type); + + try o.debug_type_map.put(gpa, ty, debug_error_union_type); + return debug_error_union_type; }, .ErrorSet => { - // TODO make this a proper enum with all the error codes in it. - // will need to consider how to take incremental compilation into account. - const di_ty = dib.createBasicType("anyerror", 16, DW.ATE.unsigned); - gop.value_ptr.* = AnnotatedDITypePtr.initFull(di_ty); - return di_ty; + const debug_error_set = try o.builder.debugUnsignedType( + try o.builder.string("anyerror"), + 16, + ); + try o.debug_type_map.put(gpa, ty, debug_error_set); + return debug_error_set; }, .Struct => { - const compile_unit_scope = o.di_compile_unit.?.toScope(); const name = try o.allocTypeName(ty); defer gpa.free(name); @@ -2477,40 +2380,28 @@ pub const Object = struct { const backing_int_ty = struct_type.backingIntType(ip).*; if (backing_int_ty != .none) { const info = Type.fromInterned(backing_int_ty).intInfo(mod); - const dwarf_encoding: c_uint = switch (info.signedness) { - .signed => DW.ATE.signed, - .unsigned => DW.ATE.unsigned, + const builder_name = try o.builder.string(name); + const debug_int_type = switch (info.signedness) { + .signed => try o.builder.debugSignedType(builder_name, ty.abiSize(mod) * 8), + .unsigned => try o.builder.debugUnsignedType(builder_name, ty.abiSize(mod) * 8), }; - const di_bits = ty.abiSize(mod) * 8; // lldb cannot handle non-byte sized types - const di_ty = dib.createBasicType(name, di_bits, dwarf_encoding); - gop.value_ptr.* = AnnotatedDITypePtr.initFull(di_ty); - return di_ty; + try o.debug_type_map.put(gpa, ty, debug_int_type); + return debug_int_type; } } - const fwd_decl = opt_fwd_decl orelse blk: { - const fwd_decl = dib.createReplaceableCompositeType( - DW.TAG.structure_type, - name.ptr, - compile_unit_scope, - null, // file - 0, // line - ); - gop.value_ptr.* = AnnotatedDITypePtr.initFwd(fwd_decl); - if (resolve == .fwd) return fwd_decl; - break :blk fwd_decl; - }; - switch (ip.indexToKey(ty.toIntern())) { .anon_struct_type => |tuple| { - var di_fields: std.ArrayListUnmanaged(*llvm.DIType) = .{}; - defer di_fields.deinit(gpa); + var fields: std.ArrayListUnmanaged(Builder.Metadata) = .{}; + defer fields.deinit(gpa); - try di_fields.ensureUnusedCapacity(gpa, tuple.types.len); + try fields.ensureUnusedCapacity(gpa, tuple.types.len); comptime assert(struct_layout_version == 2); var offset: u64 = 0; + const debug_fwd_ref = try o.builder.debugForwardReference(); + for (tuple.types.get(ip), tuple.values.get(ip), 0..) |field_ty, field_val, i| { if (field_val != .none or !Type.fromInterned(field_ty).hasRuntimeBits(mod)) continue; @@ -2525,38 +2416,33 @@ pub const Object = struct { try std.fmt.allocPrintZ(gpa, "{d}", .{i}); defer if (tuple.names.len == 0) gpa.free(field_name); - try di_fields.append(gpa, dib.createMemberType( - fwd_decl.toScope(), - field_name, - null, // file - 0, // line - field_size * 8, // size in bits - field_align.toByteUnits(0) * 8, // align in bits - field_offset * 8, // offset in bits - 0, // flags - try o.lowerDebugType(Type.fromInterned(field_ty), resolve), + fields.appendAssumeCapacity(try o.builder.debugMemberType( + try o.builder.string(field_name), + Builder.Metadata.none, // File + debug_fwd_ref, + 0, + try o.lowerDebugType(Type.fromInterned(field_ty)), + field_size * 8, + field_align.toByteUnits(0) * 8, + field_offset * 8, )); } - const full_di_ty = dib.createStructType( - compile_unit_scope, - name.ptr, - null, // file - 0, // line - ty.abiSize(mod) * 8, // size in bits - ty.abiAlignment(mod).toByteUnits(0) * 8, // align in bits - 0, // flags - null, // derived from - di_fields.items.ptr, - @intCast(di_fields.items.len), - 0, // run time lang - null, // vtable holder - "", // unique id + const debug_struct_type = try o.builder.debugStructType( + try o.builder.string(name), + Builder.Metadata.none, // File + o.debug_compile_unit, // Scope + 0, // Line + Builder.Metadata.none, // Underlying type + ty.abiSize(mod) * 8, + ty.abiAlignment(mod).toByteUnits(0) * 8, + try o.builder.debugTuple(fields.items), ); - dib.replaceTemporary(fwd_decl, full_di_ty); - // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(full_di_ty)); - return full_di_ty; + + o.builder.debugForwardReferenceSetType(debug_fwd_ref, debug_struct_type); + + try o.debug_type_map.put(gpa, ty, debug_struct_type); + return debug_struct_type; }, .struct_type => |struct_type| { if (!struct_type.haveFieldTypes(ip)) { @@ -2568,12 +2454,9 @@ pub const Object = struct { // rather than changing the frontend to unnecessarily resolve the // struct field types. const owner_decl_index = ty.getOwnerDecl(mod); - const struct_di_ty = try o.makeEmptyNamespaceDIType(owner_decl_index); - dib.replaceTemporary(fwd_decl, struct_di_ty); - // The recursive call to `lowerDebugType` via `makeEmptyNamespaceDIType` - // means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(struct_di_ty)); - return struct_di_ty; + const debug_struct_type = try o.makeEmptyNamespaceDebugType(owner_decl_index); + try o.debug_type_map.put(gpa, ty, debug_struct_type); + return debug_struct_type; } }, else => {}, @@ -2581,20 +2464,22 @@ pub const Object = struct { if (!ty.hasRuntimeBitsIgnoreComptime(mod)) { const owner_decl_index = ty.getOwnerDecl(mod); - const struct_di_ty = try o.makeEmptyNamespaceDIType(owner_decl_index); - dib.replaceTemporary(fwd_decl, struct_di_ty); - // The recursive call to `lowerDebugType` via `makeEmptyNamespaceDIType` - // means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(struct_di_ty)); - return struct_di_ty; + const debug_struct_type = try o.makeEmptyNamespaceDebugType(owner_decl_index); + try o.debug_type_map.put(gpa, ty, debug_struct_type); + return debug_struct_type; } const struct_type = mod.typeToStruct(ty).?; - var di_fields: std.ArrayListUnmanaged(*llvm.DIType) = .{}; - defer di_fields.deinit(gpa); + var fields: std.ArrayListUnmanaged(Builder.Metadata) = .{}; + defer fields.deinit(gpa); - try di_fields.ensureUnusedCapacity(gpa, struct_type.field_types.len); + try fields.ensureUnusedCapacity(gpa, struct_type.field_types.len); + + const debug_fwd_ref = try o.builder.debugForwardReference(); + + // Set as forward reference while the type is lowered in case it references itself + try o.debug_type_map.put(gpa, ty, debug_fwd_ref); comptime assert(struct_layout_version == 2); var it = struct_type.iterateRuntimeOrder(ip); @@ -2612,103 +2497,88 @@ pub const Object = struct { const field_name = struct_type.fieldName(ip, field_index).unwrap() orelse try ip.getOrPutStringFmt(gpa, "{d}", .{field_index}); - const field_di_ty = try o.lowerDebugType(field_ty, resolve); - - try di_fields.append(gpa, dib.createMemberType( - fwd_decl.toScope(), - ip.stringToSlice(field_name), - null, // file - 0, // line - field_size * 8, // size in bits - field_align.toByteUnits(0) * 8, // align in bits - field_offset * 8, // offset in bits - 0, // flags - field_di_ty, + fields.appendAssumeCapacity(try o.builder.debugMemberType( + try o.builder.string(ip.stringToSlice(field_name)), + Builder.Metadata.none, // File + debug_fwd_ref, + 0, // Line + try o.lowerDebugType(field_ty), + field_size * 8, + field_align.toByteUnits(0) * 8, + field_offset * 8, )); } - const full_di_ty = dib.createStructType( - compile_unit_scope, - name.ptr, - null, // file - 0, // line - ty.abiSize(mod) * 8, // size in bits - ty.abiAlignment(mod).toByteUnits(0) * 8, // align in bits - 0, // flags - null, // derived from - di_fields.items.ptr, - @intCast(di_fields.items.len), - 0, // run time lang - null, // vtable holder - "", // unique id + const debug_struct_type = try o.builder.debugStructType( + try o.builder.string(name), + Builder.Metadata.none, // File + o.debug_compile_unit, // Scope + 0, // Line + Builder.Metadata.none, // Underlying type + ty.abiSize(mod) * 8, + ty.abiAlignment(mod).toByteUnits(0) * 8, + try o.builder.debugTuple(fields.items), ); - dib.replaceTemporary(fwd_decl, full_di_ty); - // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(full_di_ty)); - return full_di_ty; + + o.builder.debugForwardReferenceSetType(debug_fwd_ref, debug_struct_type); + + // Set to real type now that it has been lowered fully + const map_ptr = o.debug_type_map.getPtr(ty) orelse unreachable; + map_ptr.* = debug_struct_type; + + return debug_struct_type; }, .Union => { - const compile_unit_scope = o.di_compile_unit.?.toScope(); const owner_decl_index = ty.getOwnerDecl(mod); const name = try o.allocTypeName(ty); defer gpa.free(name); - const fwd_decl = opt_fwd_decl orelse blk: { - const fwd_decl = dib.createReplaceableCompositeType( - DW.TAG.structure_type, - name.ptr, - o.di_compile_unit.?.toScope(), - null, // file - 0, // line - ); - gop.value_ptr.* = AnnotatedDITypePtr.initFwd(fwd_decl); - if (resolve == .fwd) return fwd_decl; - break :blk fwd_decl; - }; - const union_type = ip.indexToKey(ty.toIntern()).union_type; if (!union_type.haveFieldTypes(ip) or !ty.hasRuntimeBitsIgnoreComptime(mod)) { - const union_di_ty = try o.makeEmptyNamespaceDIType(owner_decl_index); - dib.replaceTemporary(fwd_decl, union_di_ty); - // The recursive call to `lowerDebugType` via `makeEmptyNamespaceDIType` - // means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(union_di_ty)); - return union_di_ty; + const debug_union_type = try o.makeEmptyNamespaceDebugType(owner_decl_index); + try o.debug_type_map.put(gpa, ty, debug_union_type); + return debug_union_type; } const union_obj = ip.loadUnionType(union_type); const layout = mod.getUnionLayout(union_obj); + const debug_fwd_ref = try o.builder.debugForwardReference(); + + // Set as forward reference while the type is lowered in case it references itself + try o.debug_type_map.put(gpa, ty, debug_fwd_ref); + if (layout.payload_size == 0) { - const tag_di_ty = try o.lowerDebugType(Type.fromInterned(union_obj.enum_tag_ty), resolve); - const di_fields = [_]*llvm.DIType{tag_di_ty}; - const full_di_ty = dib.createStructType( - compile_unit_scope, - name.ptr, - null, // file - 0, // line - ty.abiSize(mod) * 8, // size in bits - ty.abiAlignment(mod).toByteUnits(0) * 8, // align in bits - 0, // flags - null, // derived from - &di_fields, - di_fields.len, - 0, // run time lang - null, // vtable holder - "", // unique id + const debug_union_type = try o.builder.debugStructType( + try o.builder.string(name), + Builder.Metadata.none, // File + o.debug_compile_unit, // Scope + 0, // Line + Builder.Metadata.none, // Underlying type + ty.abiSize(mod) * 8, + ty.abiAlignment(mod).toByteUnits(0) * 8, + try o.builder.debugTuple( + &.{try o.lowerDebugType(Type.fromInterned(union_obj.enum_tag_ty))}, + ), ); - dib.replaceTemporary(fwd_decl, full_di_ty); - // The recursive call to `lowerDebugType` via `makeEmptyNamespaceDIType` - // means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(full_di_ty)); - return full_di_ty; + + // Set to real type now that it has been lowered fully + const map_ptr = o.debug_type_map.getPtr(ty) orelse unreachable; + map_ptr.* = debug_union_type; + + return debug_union_type; } - var di_fields: std.ArrayListUnmanaged(*llvm.DIType) = .{}; - defer di_fields.deinit(gpa); + var fields: std.ArrayListUnmanaged(Builder.Metadata) = .{}; + defer fields.deinit(gpa); - try di_fields.ensureUnusedCapacity(gpa, union_obj.field_names.len); + try fields.ensureUnusedCapacity(gpa, union_obj.field_names.len); + + const debug_union_fwd_ref = if (layout.tag_size == 0) + debug_fwd_ref + else + try o.builder.debugForwardReference(); for (0..union_obj.field_names.len) |field_index| { const field_ty = union_obj.field_types.get(ip)[field_index]; @@ -2717,18 +2587,16 @@ pub const Object = struct { const field_size = Type.fromInterned(field_ty).abiSize(mod); const field_align = mod.unionFieldNormalAlignment(union_obj, @intCast(field_index)); - const field_di_ty = try o.lowerDebugType(Type.fromInterned(field_ty), resolve); const field_name = union_obj.field_names.get(ip)[field_index]; - di_fields.appendAssumeCapacity(dib.createMemberType( - fwd_decl.toScope(), - ip.stringToSlice(field_name), - null, // file - 0, // line - field_size * 8, // size in bits - field_align.toByteUnits(0) * 8, // align in bits - 0, // offset in bits - 0, // flags - field_di_ty, + fields.appendAssumeCapacity(try o.builder.debugMemberType( + try o.builder.string(ip.stringToSlice(field_name)), + Builder.Metadata.none, // File + debug_union_fwd_ref, + 0, // Line + try o.lowerDebugType(Type.fromInterned(field_ty)), + field_size * 8, + field_align.toByteUnits(0) * 8, + 0, // Offset )); } @@ -2739,25 +2607,25 @@ pub const Object = struct { break :name union_name_buf.?; }; - const union_di_ty = dib.createUnionType( - compile_unit_scope, - union_name.ptr, - null, // file - 0, // line - ty.abiSize(mod) * 8, // size in bits - ty.abiAlignment(mod).toByteUnits(0) * 8, // align in bits - 0, // flags - di_fields.items.ptr, - @intCast(di_fields.items.len), - 0, // run time lang - "", // unique id + const debug_union_type = try o.builder.debugUnionType( + try o.builder.string(union_name), + Builder.Metadata.none, // File + o.debug_compile_unit, // Scope + 0, // Line + Builder.Metadata.none, // Underlying type + ty.abiSize(mod) * 8, + ty.abiAlignment(mod).toByteUnits(0) * 8, + try o.builder.debugTuple(fields.items), ); + o.builder.debugForwardReferenceSetType(debug_union_fwd_ref, debug_union_type); + if (layout.tag_size == 0) { - dib.replaceTemporary(fwd_decl, union_di_ty); - // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(union_di_ty)); - return union_di_ty; + // Set to real type now that it has been lowered fully + const map_ptr = o.debug_type_map.getPtr(ty) orelse unreachable; + map_ptr.* = debug_union_type; + + return debug_union_type; } var tag_offset: u64 = undefined; @@ -2770,81 +2638,80 @@ pub const Object = struct { tag_offset = layout.tag_align.forward(layout.payload_size); } - const tag_di = dib.createMemberType( - fwd_decl.toScope(), - "tag", - null, // file - 0, // line + const debug_tag_type = try o.builder.debugMemberType( + try o.builder.string("tag"), + Builder.Metadata.none, // File + debug_fwd_ref, + 0, // Line + try o.lowerDebugType(Type.fromInterned(union_obj.enum_tag_ty)), layout.tag_size * 8, layout.tag_align.toByteUnits(0) * 8, - tag_offset * 8, // offset in bits - 0, // flags - try o.lowerDebugType(Type.fromInterned(union_obj.enum_tag_ty), resolve), + tag_offset * 8, ); - const payload_di = dib.createMemberType( - fwd_decl.toScope(), - "payload", - null, // file - 0, // line - layout.payload_size * 8, // size in bits + const debug_payload_type = try o.builder.debugMemberType( + try o.builder.string("payload"), + Builder.Metadata.none, // File + debug_fwd_ref, + 0, // Line + debug_union_type, + layout.payload_size * 8, layout.payload_align.toByteUnits(0) * 8, - payload_offset * 8, // offset in bits - 0, // flags - union_di_ty, + payload_offset * 8, ); - const full_di_fields: [2]*llvm.DIType = + const full_fields: [2]Builder.Metadata = if (layout.tag_align.compare(.gte, layout.payload_align)) - .{ tag_di, payload_di } + .{ debug_tag_type, debug_payload_type } else - .{ payload_di, tag_di }; + .{ debug_payload_type, debug_tag_type }; - const full_di_ty = dib.createStructType( - compile_unit_scope, - name.ptr, - null, // file - 0, // line - ty.abiSize(mod) * 8, // size in bits - ty.abiAlignment(mod).toByteUnits(0) * 8, // align in bits - 0, // flags - null, // derived from - &full_di_fields, - full_di_fields.len, - 0, // run time lang - null, // vtable holder - "", // unique id + const debug_tagged_union_type = try o.builder.debugStructType( + try o.builder.string(name), + Builder.Metadata.none, // File + o.debug_compile_unit, // Scope + 0, // Line + Builder.Metadata.none, // Underlying type + ty.abiSize(mod) * 8, + ty.abiAlignment(mod).toByteUnits(0) * 8, + try o.builder.debugTuple(&full_fields), ); - dib.replaceTemporary(fwd_decl, full_di_ty); - // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(full_di_ty)); - return full_di_ty; + + o.builder.debugForwardReferenceSetType(debug_fwd_ref, debug_tagged_union_type); + + // Set to real type now that it has been lowered fully + const map_ptr = o.debug_type_map.getPtr(ty) orelse unreachable; + map_ptr.* = debug_tagged_union_type; + + return debug_tagged_union_type; }, .Fn => { const fn_info = mod.typeToFunc(ty).?; - var param_di_types = std.ArrayList(*llvm.DIType).init(gpa); - defer param_di_types.deinit(); + var debug_param_types = std.ArrayList(Builder.Metadata).init(gpa); + defer debug_param_types.deinit(); + + try debug_param_types.ensureUnusedCapacity(3 + fn_info.param_types.len); // Return type goes first. if (Type.fromInterned(fn_info.return_type).hasRuntimeBitsIgnoreComptime(mod)) { const sret = firstParamSRet(fn_info, mod); - const di_ret_ty = if (sret) Type.void else Type.fromInterned(fn_info.return_type); - try param_di_types.append(try o.lowerDebugType(di_ret_ty, resolve)); + const ret_ty = if (sret) Type.void else Type.fromInterned(fn_info.return_type); + debug_param_types.appendAssumeCapacity(try o.lowerDebugType(ret_ty)); if (sret) { const ptr_ty = try mod.singleMutPtrType(Type.fromInterned(fn_info.return_type)); - try param_di_types.append(try o.lowerDebugType(ptr_ty, resolve)); + debug_param_types.appendAssumeCapacity(try o.lowerDebugType(ptr_ty)); } } else { - try param_di_types.append(try o.lowerDebugType(Type.void, resolve)); + debug_param_types.appendAssumeCapacity(try o.lowerDebugType(Type.void)); } if (Type.fromInterned(fn_info.return_type).isError(mod) and o.module.comp.config.any_error_tracing) { const ptr_ty = try mod.singleMutPtrType(try o.getStackTraceType()); - try param_di_types.append(try o.lowerDebugType(ptr_ty, resolve)); + debug_param_types.appendAssumeCapacity(try o.lowerDebugType(ptr_ty)); } for (0..fn_info.param_types.len) |i| { @@ -2853,20 +2720,18 @@ pub const Object = struct { if (isByRef(param_ty, mod)) { const ptr_ty = try mod.singleMutPtrType(param_ty); - try param_di_types.append(try o.lowerDebugType(ptr_ty, resolve)); + debug_param_types.appendAssumeCapacity(try o.lowerDebugType(ptr_ty)); } else { - try param_di_types.append(try o.lowerDebugType(param_ty, resolve)); + debug_param_types.appendAssumeCapacity(try o.lowerDebugType(param_ty)); } } - const fn_di_ty = dib.createSubroutineType( - param_di_types.items.ptr, - @intCast(param_di_types.items.len), - 0, + const debug_function_type = try o.builder.debugSubroutineType( + try o.builder.debugTuple(debug_param_types.items), ); - // The recursive call to `lowerDebugType` means we can't use `gop` anymore. - try o.di_type_map.put(gpa, ty.toIntern(), AnnotatedDITypePtr.initFull(fn_di_ty)); - return fn_di_ty; + + try o.debug_type_map.put(gpa, ty, debug_function_type); + return debug_function_type; }, .ComptimeInt => unreachable, .ComptimeFloat => unreachable, @@ -2880,39 +2745,30 @@ pub const Object = struct { } } - fn namespaceToDebugScope(o: *Object, namespace_index: InternPool.NamespaceIndex) !*llvm.DIScope { + fn namespaceToDebugScope(o: *Object, namespace_index: InternPool.NamespaceIndex) !Builder.Metadata { const mod = o.module; const namespace = mod.namespacePtr(namespace_index); - if (namespace.parent == .none) { - const di_file = try o.getDIFile(o.gpa, namespace.file_scope); - return di_file.toScope(); - } - const di_type = try o.lowerDebugType(namespace.ty, .fwd); - return di_type.toScope(); + if (namespace.parent == .none) return try o.getDebugFile(namespace.file_scope); + + const gop = try o.debug_unresolved_namespace_scopes.getOrPut(o.gpa, namespace_index); + + if (!gop.found_existing) gop.value_ptr.* = try o.builder.debugForwardReference(); + + return gop.value_ptr.*; } - /// This is to be used instead of void for debug info types, to avoid tripping - /// Assertion `!isa(Scope) && "shouldn't make a namespace scope for a type"' - /// when targeting CodeView (Windows). - fn makeEmptyNamespaceDIType(o: *Object, decl_index: InternPool.DeclIndex) !*llvm.DIType { + fn makeEmptyNamespaceDebugType(o: *Object, decl_index: InternPool.DeclIndex) !Builder.Metadata { const mod = o.module; const decl = mod.declPtr(decl_index); - const fields: [0]*llvm.DIType = .{}; - const di_scope = try o.namespaceToDebugScope(decl.src_namespace); - return o.di_builder.?.createStructType( - di_scope, - mod.intern_pool.stringToSlice(decl.name), // TODO use fully qualified name - try o.getDIFile(o.gpa, mod.namespacePtr(decl.src_namespace).file_scope), + return o.builder.debugStructType( + try o.builder.string(mod.intern_pool.stringToSlice(decl.name)), // TODO use fully qualified name + try o.getDebugFile(mod.namespacePtr(decl.src_namespace).file_scope), + try o.namespaceToDebugScope(decl.src_namespace), decl.src_line + 1, - 0, // size in bits - 0, // align in bits - 0, // flags - null, // derived from - undefined, // TODO should be able to pass &fields, - fields.len, - 0, // run time lang - null, // vtable holder - "", // unique id + Builder.Metadata.none, + 0, + 0, + .none, ); } @@ -4822,26 +4678,32 @@ pub const DeclGen = struct { else => try o.lowerValue(init_val), }, &o.builder); - if (o.di_builder) |dib| { - const di_file = - try o.getDIFile(o.gpa, mod.namespacePtr(decl.src_namespace).file_scope); + const line_number = decl.src_line + 1; + const is_internal_linkage = !o.module.decl_exports.contains(decl_index); - const line_number = decl.src_line + 1; - const is_internal_linkage = !o.module.decl_exports.contains(decl_index); - const di_global = dib.createGlobalVariableExpression( - di_file.toScope(), - mod.intern_pool.stringToSlice(decl.name), - variable_index.name(&o.builder).slice(&o.builder).?, - di_file, - line_number, - try o.lowerDebugType(decl.ty, .full), - is_internal_linkage, - ); + if (dg.object.builder.strip) return; - try o.di_map.put(o.gpa, dg.decl, di_global.getVariable().toNode()); - if (!is_internal_linkage or decl.isExtern(mod)) - variable_index.toLlvm(&o.builder).attachMetaData(di_global); - } + const debug_file = try o.getDebugFile(mod.namespacePtr(decl.src_namespace).file_scope); + + const debug_global_var = try o.builder.debugGlobalVar( + try o.builder.string(mod.intern_pool.stringToSlice(decl.name)), // Name + variable_index.name(&o.builder), // Linkage name + debug_file, // File + debug_file, // Scope + line_number, + try o.lowerDebugType(decl.ty), + variable_index, + .{ .local = is_internal_linkage }, + ); + + const debug_expression = try o.builder.debugExpression(&.{}); + + const debug_global_var_expression = try o.builder.debugGlobalVarExpression( + debug_global_var, + debug_expression, + ); + + try o.debug_globals.append(o.gpa, debug_global_var_expression); } } }; @@ -4852,19 +4714,23 @@ pub const FuncGen = struct { air: Air, liveness: Liveness, wip: Builder.WipFunction, - di_scope: ?if (build_options.have_llvm) *llvm.DIScope else noreturn, - di_file: ?if (build_options.have_llvm) *llvm.DIFile else noreturn, + + file: Builder.Metadata, + subprogram: Builder.Metadata, + current_scope: Builder.Metadata, + + inlined: std.ArrayListUnmanaged(struct { + base_line: u32, + location: Builder.Metadata, + scope: Builder.Metadata, + }) = .{}, + + scope_stack: std.ArrayListUnmanaged(Builder.Metadata) = .{}, + base_line: u32, prev_dbg_line: c_uint, prev_dbg_column: c_uint, - /// Stack of locations where a call was inlined. - dbg_inlined: std.ArrayListUnmanaged(if (build_options.have_llvm) DbgState else void) = .{}, - - /// Stack of `DILexicalBlock`s. dbg_block instructions cannot happend accross - /// dbg_inline instructions so no special handling there is required. - dbg_block_stack: std.ArrayListUnmanaged(if (build_options.have_llvm) *llvm.DIScope else void) = .{}, - /// This stores the LLVM values used in a function, such that they can be referred to /// in other instructions. This table is cleared before every function is generated. func_inst_table: std.AutoHashMapUnmanaged(Air.Inst.Ref, Builder.Value), @@ -4905,8 +4771,8 @@ pub const FuncGen = struct { fn deinit(self: *FuncGen) void { self.wip.deinit(); - self.dbg_inlined.deinit(self.gpa); - self.dbg_block_stack.deinit(self.gpa); + self.scope_stack.deinit(self.gpa); + self.inlined.deinit(self.gpa); self.func_inst_table.deinit(self.gpa); self.blocks.deinit(self.gpa); } @@ -5515,9 +5381,6 @@ pub const FuncGen = struct { // a different LLVM type than the usual one. We solve this here at the callsite // by using our canonical type, then loading it if necessary. const alignment = return_type.abiAlignment(mod).toLlvm(); - if (o.builder.useLibLlvm()) - assert(o.target_data.abiSizeOfType(abi_ret_ty.toLlvm(&o.builder)) >= - o.target_data.abiSizeOfType(llvm_ret_ty.toLlvm(&o.builder))); const rp = try self.buildAlloca(abi_ret_ty, alignment); _ = try self.wip.store(.normal, call, rp, alignment); return if (isByRef(return_type, mod)) @@ -6675,42 +6538,48 @@ pub const FuncGen = struct { } fn airDbgStmt(self: *FuncGen, inst: Air.Inst.Index) !Builder.Value { - const di_scope = self.di_scope orelse return .none; + if (self.wip.builder.strip) return .none; const dbg_stmt = self.air.instructions.items(.data)[@intFromEnum(inst)].dbg_stmt; self.prev_dbg_line = @intCast(self.base_line + dbg_stmt.line + 1); self.prev_dbg_column = @intCast(dbg_stmt.column + 1); - const inlined_at = if (self.dbg_inlined.items.len > 0) - self.dbg_inlined.items[self.dbg_inlined.items.len - 1].loc + const inlined_at = if (self.inlined.items.len > 0) + self.inlined.items[self.inlined.items.len - 1].location else - null; - self.wip.llvm.builder.setCurrentDebugLocation( - self.prev_dbg_line, - self.prev_dbg_column, - di_scope, - inlined_at, - ); + Builder.Metadata.none; + + self.wip.current_debug_location = .{ + .line = self.prev_dbg_line, + .column = self.prev_dbg_column, + .scope = self.current_scope, + .inlined_at = inlined_at, + }; + return .none; } fn airDbgInlineBegin(self: *FuncGen, inst: Air.Inst.Index) !Builder.Value { + if (self.wip.builder.strip or true) return .none; const o = self.dg.object; - const dib = o.di_builder orelse return .none; - const ty_fn = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_fn; - const zcu = o.module; + + const ty_fn = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_fn; const func = zcu.funcInfo(ty_fn.func); const decl_index = func.owner_decl; const decl = zcu.declPtr(decl_index); const namespace = zcu.namespacePtr(decl.src_namespace); const owner_mod = namespace.file_scope.mod; - const di_file = try o.getDIFile(self.gpa, zcu.namespacePtr(decl.src_namespace).file_scope); - self.di_file = di_file; - const line_number = decl.src_line + 1; - const cur_debug_location = self.wip.llvm.builder.getCurrentDebugLocation2(); - try self.dbg_inlined.append(self.gpa, .{ - .loc = @ptrCast(cur_debug_location), - .scope = self.di_scope.?, + self.file = try o.getDebugFile(namespace.file_scope); + + const line_number = decl.src_line + 1; + try self.inlined.append(self.gpa, .{ + .location = if (self.wip.current_debug_location) |location| try self.wip.builder.debugLocation( + location.scope, + location.line, + location.column, + location.inlined_at, + ) else .none, + .scope = self.current_scope, .base_line = self.base_line, }); @@ -6721,91 +6590,108 @@ pub const FuncGen = struct { .param_types = &.{}, .return_type = .void_type, }); - const fn_di_ty = try o.lowerDebugType(fn_ty, .full); - const subprogram = dib.createFunction( - di_file.toScope(), - zcu.intern_pool.stringToSlice(decl.name), - zcu.intern_pool.stringToSlice(fqn), - di_file, + + const subprogram = try o.builder.debugSubprogram( + self.file, + try o.builder.string(zcu.intern_pool.stringToSlice(decl.name)), + try o.builder.string(zcu.intern_pool.stringToSlice(fqn)), line_number, - fn_di_ty, - is_internal_linkage, - true, // is definition - line_number + func.lbrace_line, // scope line - llvm.DIFlags.StaticMember, - owner_mod.optimize_mode != .Debug, - null, // decl_subprogram + line_number + func.lbrace_line, + try o.lowerDebugType(fn_ty), + .{ + .optimized = owner_mod.optimize_mode != .Debug, + .local = is_internal_linkage, + .definition = true, + .debug_info_flags = llvm.DIFlags.StaticMember, + }, + o.debug_compile_unit, ); - const lexical_block = dib.createLexicalBlock(subprogram.toScope(), di_file, line_number, 1); - self.di_scope = lexical_block.toScope(); + const lexical_block = try o.builder.debugLexicalBlock( + subprogram, + self.file, + line_number, + 1, + ); + self.current_scope = lexical_block; self.base_line = decl.src_line; return .none; } - fn airDbgInlineEnd(self: *FuncGen, inst: Air.Inst.Index) !Builder.Value { + fn airDbgInlineEnd(self: *FuncGen, inst: Air.Inst.Index) Allocator.Error!Builder.Value { + if (self.wip.builder.strip or true) return .none; const o = self.dg.object; - if (o.di_builder == null) return .none; + const ty_fn = self.air.instructions.items(.data)[@intFromEnum(inst)].ty_fn; const mod = o.module; const decl = mod.funcOwnerDeclPtr(ty_fn.func); - const di_file = try o.getDIFile(self.gpa, mod.namespacePtr(decl.src_namespace).file_scope); - self.di_file = di_file; - const old = self.dbg_inlined.pop(); - self.di_scope = old.scope; + self.file = try o.getDebugFile(mod.namespacePtr(decl.src_namespace).file_scope); + + const old = self.inlined.pop(); + self.current_scope = old.scope; self.base_line = old.base_line; return .none; } - fn airDbgBlockBegin(self: *FuncGen) !Builder.Value { + fn airDbgBlockBegin(self: *FuncGen) Allocator.Error!Builder.Value { + if (self.wip.builder.strip) return .none; const o = self.dg.object; - const dib = o.di_builder orelse return .none; - const old_scope = self.di_scope.?; - try self.dbg_block_stack.append(self.gpa, old_scope); - const lexical_block = dib.createLexicalBlock(old_scope, self.di_file.?, self.prev_dbg_line, self.prev_dbg_column); - self.di_scope = lexical_block.toScope(); + + try self.scope_stack.append(self.gpa, self.current_scope); + + const old = self.current_scope; + self.current_scope = try o.builder.debugLexicalBlock( + self.file, + old, + self.prev_dbg_line, + self.prev_dbg_column, + ); return .none; } fn airDbgBlockEnd(self: *FuncGen) !Builder.Value { - const o = self.dg.object; - if (o.di_builder == null) return .none; - self.di_scope = self.dbg_block_stack.pop(); + if (self.wip.builder.strip) return .none; + self.current_scope = self.scope_stack.pop(); return .none; } fn airDbgVarPtr(self: *FuncGen, inst: Air.Inst.Index) !Builder.Value { + if (self.wip.builder.strip) return .none; const o = self.dg.object; const mod = o.module; - const dib = o.di_builder orelse return .none; const pl_op = self.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const operand = try self.resolveInst(pl_op.operand); const name = self.air.nullTerminatedString(pl_op.payload); const ptr_ty = self.typeOf(pl_op.operand); - const di_local_var = dib.createAutoVariable( - self.di_scope.?, - name.ptr, - self.di_file.?, + const debug_local_var = try o.builder.debugLocalVar( + try o.builder.string(name), + self.file, + self.current_scope, self.prev_dbg_line, - try o.lowerDebugType(ptr_ty.childType(mod), .full), - true, // always preserve - 0, // flags + try o.lowerDebugType(ptr_ty.childType(mod)), ); - const inlined_at = if (self.dbg_inlined.items.len > 0) - self.dbg_inlined.items[self.dbg_inlined.items.len - 1].loc - else - null; - const debug_loc = llvm.getDebugLoc(self.prev_dbg_line, self.prev_dbg_column, self.di_scope.?, inlined_at); - const insert_block = self.wip.cursor.block.toLlvm(&self.wip); - _ = dib.insertDeclareAtEnd(operand.toLlvm(&self.wip), di_local_var, debug_loc, insert_block); + + _ = try self.wip.callIntrinsic( + .normal, + .none, + .@"dbg.declare", + &.{}, + &.{ + (try self.wip.debugValue(operand)).toValue(), + debug_local_var.toValue(), + (try o.builder.debugExpression(&.{})).toValue(), + }, + "", + ); + return .none; } fn airDbgVarVal(self: *FuncGen, inst: Air.Inst.Index) !Builder.Value { + if (self.wip.builder.strip) return .none; const o = self.dg.object; - const dib = o.di_builder orelse return .none; const pl_op = self.air.instructions.items(.data)[@intFromEnum(inst)].pl_op; const operand = try self.resolveInst(pl_op.operand); const operand_ty = self.typeOf(pl_op.operand); @@ -6813,32 +6699,58 @@ pub const FuncGen = struct { if (needDbgVarWorkaround(o)) return .none; - const di_local_var = dib.createAutoVariable( - self.di_scope.?, - name.ptr, - self.di_file.?, + const debug_local_var = try o.builder.debugLocalVar( + try o.builder.string(name), + self.file, + self.current_scope, self.prev_dbg_line, - try o.lowerDebugType(operand_ty, .full), - true, // always preserve - 0, // flags + try o.lowerDebugType(operand_ty), ); - const inlined_at = if (self.dbg_inlined.items.len > 0) - self.dbg_inlined.items[self.dbg_inlined.items.len - 1].loc - else - null; - const debug_loc = llvm.getDebugLoc(self.prev_dbg_line, self.prev_dbg_column, self.di_scope.?, inlined_at); - const insert_block = self.wip.cursor.block.toLlvm(&self.wip); + const zcu = o.module; const owner_mod = self.dg.ownerModule(); if (isByRef(operand_ty, zcu)) { - _ = dib.insertDeclareAtEnd(operand.toLlvm(&self.wip), di_local_var, debug_loc, insert_block); + _ = try self.wip.callIntrinsic( + .normal, + .none, + .@"dbg.declare", + &.{}, + &.{ + (try self.wip.debugValue(operand)).toValue(), + debug_local_var.toValue(), + (try o.builder.debugExpression(&.{})).toValue(), + }, + "", + ); } else if (owner_mod.optimize_mode == .Debug) { const alignment = operand_ty.abiAlignment(zcu).toLlvm(); const alloca = try self.buildAlloca(operand.typeOfWip(&self.wip), alignment); _ = try self.wip.store(.normal, operand, alloca, alignment); - _ = dib.insertDeclareAtEnd(alloca.toLlvm(&self.wip), di_local_var, debug_loc, insert_block); + _ = try self.wip.callIntrinsic( + .normal, + .none, + .@"dbg.declare", + &.{}, + &.{ + (try self.wip.debugValue(alloca)).toValue(), + debug_local_var.toValue(), + (try o.builder.debugExpression(&.{})).toValue(), + }, + "", + ); } else { - _ = dib.insertDbgValueIntrinsicAtEnd(operand.toLlvm(&self.wip), di_local_var, debug_loc, insert_block); + _ = try self.wip.callIntrinsic( + .normal, + .none, + .@"dbg.value", + &.{}, + &.{ + (try self.wip.debugValue(operand)).toValue(), + debug_local_var.toValue(), + (try o.builder.debugExpression(&.{})).toValue(), + }, + "", + ); } return .none; } @@ -8860,41 +8772,80 @@ pub const FuncGen = struct { const arg_val = self.args[self.arg_index]; self.arg_index += 1; + if (self.wip.builder.strip) return arg_val; + const inst_ty = self.typeOfIndex(inst); - if (o.di_builder) |dib| { - if (needDbgVarWorkaround(o)) return arg_val; + if (needDbgVarWorkaround(o)) return arg_val; - const src_index = self.air.instructions.items(.data)[@intFromEnum(inst)].arg.src_index; - const func_index = self.dg.decl.getOwnedFunctionIndex(); - const func = mod.funcInfo(func_index); - const lbrace_line = mod.declPtr(func.owner_decl).src_line + func.lbrace_line + 1; - const lbrace_col = func.lbrace_column + 1; - const di_local_var = dib.createParameterVariable( - self.di_scope.?, - mod.getParamName(func_index, src_index).ptr, // TODO test 0 bit args - self.di_file.?, - lbrace_line, - try o.lowerDebugType(inst_ty, .full), - true, // always preserve - 0, // flags - @intCast(self.arg_index), // includes +1 because 0 is return type + const src_index = self.air.instructions.items(.data)[@intFromEnum(inst)].arg.src_index; + const func_index = self.dg.decl.getOwnedFunctionIndex(); + const func = mod.funcInfo(func_index); + const lbrace_line = mod.declPtr(func.owner_decl).src_line + func.lbrace_line + 1; + const lbrace_col = func.lbrace_column + 1; + + const debug_parameter = try o.builder.debugParameter( + try o.builder.string(mod.getParamName(func_index, src_index)), + self.file, + self.current_scope, + lbrace_line, + try o.lowerDebugType(inst_ty), + @intCast(self.arg_index), + ); + + const old_location = self.wip.current_debug_location; + self.wip.current_debug_location = .{ + .line = lbrace_line, + .column = lbrace_col, + .scope = self.current_scope, + .inlined_at = Builder.Metadata.none, + }; + + const owner_mod = self.dg.ownerModule(); + if (isByRef(inst_ty, mod)) { + _ = try self.wip.callIntrinsic( + .normal, + .none, + .@"dbg.declare", + &.{}, + &.{ + (try self.wip.debugValue(arg_val)).toValue(), + debug_parameter.toValue(), + (try o.builder.debugExpression(&.{})).toValue(), + }, + "", + ); + } else if (owner_mod.optimize_mode == .Debug) { + const alignment = inst_ty.abiAlignment(mod).toLlvm(); + const alloca = try self.buildAlloca(arg_val.typeOfWip(&self.wip), alignment); + _ = try self.wip.store(.normal, arg_val, alloca, alignment); + _ = try self.wip.callIntrinsic( + .normal, + .none, + .@"dbg.declare", + &.{}, + &.{ + (try self.wip.debugValue(alloca)).toValue(), + debug_parameter.toValue(), + (try o.builder.debugExpression(&.{})).toValue(), + }, + "", + ); + } else { + _ = try self.wip.callIntrinsic( + .normal, + .none, + .@"dbg.value", + &.{}, + &.{ + (try self.wip.debugValue(arg_val)).toValue(), + debug_parameter.toValue(), + (try o.builder.debugExpression(&.{})).toValue(), + }, + "", ); - - const debug_loc = llvm.getDebugLoc(lbrace_line, lbrace_col, self.di_scope.?, null); - const insert_block = self.wip.cursor.block.toLlvm(&self.wip); - const owner_mod = self.dg.ownerModule(); - if (isByRef(inst_ty, mod)) { - _ = dib.insertDeclareAtEnd(arg_val.toLlvm(&self.wip), di_local_var, debug_loc, insert_block); - } else if (owner_mod.optimize_mode == .Debug) { - const alignment = inst_ty.abiAlignment(mod).toLlvm(); - const alloca = try self.buildAlloca(arg_val.typeOfWip(&self.wip), alignment); - _ = try self.wip.store(.normal, arg_val, alloca, alignment); - _ = dib.insertDeclareAtEnd(alloca.toLlvm(&self.wip), di_local_var, debug_loc, insert_block); - } else { - _ = dib.insertDbgValueIntrinsicAtEnd(arg_val.toLlvm(&self.wip), di_local_var, debug_loc, insert_block); - } } + self.wip.current_debug_location = old_location; return arg_val; } @@ -8932,7 +8883,7 @@ pub const FuncGen = struct { alignment: Builder.Alignment, ) Allocator.Error!Builder.Value { const target = self.dg.object.module.getTarget(); - return buildAllocaInner(&self.wip, self.di_scope != null, llvm_ty, alignment, target); + return buildAllocaInner(&self.wip, llvm_ty, alignment, target); } // Workaround for https://github.com/ziglang/zig/issues/16392 @@ -11716,45 +11667,6 @@ const struct_layout_version = 2; // https://github.com/llvm/llvm-project/issues/56585/ is fixed const optional_layout_version = 3; -/// We use the least significant bit of the pointer address to tell us -/// whether the type is fully resolved. Types that are only fwd declared -/// have the LSB flipped to a 1. -const AnnotatedDITypePtr = enum(usize) { - null, - _, - - fn initFwd(di_type: *llvm.DIType) AnnotatedDITypePtr { - const addr = @intFromPtr(di_type); - assert(@as(u1, @truncate(addr)) == 0); - return @enumFromInt(addr | 1); - } - - fn initFull(di_type: *llvm.DIType) AnnotatedDITypePtr { - const addr = @intFromPtr(di_type); - return @enumFromInt(addr); - } - - fn init(di_type: *llvm.DIType, resolve: Object.DebugResolveStatus) AnnotatedDITypePtr { - const addr = @intFromPtr(di_type); - const bit = @intFromBool(resolve == .fwd); - return @enumFromInt(addr | bit); - } - - fn toDIType(self: AnnotatedDITypePtr) *llvm.DIType { - switch (self) { - .null => unreachable, - _ => return @ptrFromInt(@intFromEnum(self) & ~@as(usize, 1)), - } - } - - fn isFwdOnly(self: AnnotatedDITypePtr) bool { - switch (self) { - .null => unreachable, - _ => return @as(u1, @truncate(@intFromEnum(self))) != 0, - } - } -}; - const lt_errors_fn_name = "__zig_lt_errors_len"; /// Without this workaround, LLVM crashes with "unknown codeview register H1" @@ -11778,7 +11690,6 @@ fn compilerRtIntBits(bits: u16) u16 { fn buildAllocaInner( wip: *Builder.WipFunction, - di_scope_non_null: bool, llvm_ty: Builder.Type, alignment: Builder.Alignment, target: std.Target, @@ -11787,19 +11698,15 @@ fn buildAllocaInner( const alloca = blk: { const prev_cursor = wip.cursor; - const prev_debug_location = if (wip.builder.useLibLlvm()) - wip.llvm.builder.getCurrentDebugLocation2() - else - undefined; + const prev_debug_location = wip.current_debug_location; defer { wip.cursor = prev_cursor; if (wip.cursor.block == .entry) wip.cursor.instruction += 1; - if (wip.builder.useLibLlvm() and di_scope_non_null) - wip.llvm.builder.setCurrentDebugLocation2(prev_debug_location); + wip.current_debug_location = prev_debug_location; } wip.cursor = .{ .block = .entry }; - if (wip.builder.useLibLlvm()) wip.llvm.builder.clearCurrentDebugLocation(); + wip.current_debug_location = null; break :blk try wip.alloca(.normal, llvm_ty, .none, alignment, address_space, ""); };