mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 14:23:09 +00:00
spirv: write error value in an storage buffer
This commit is contained in:
parent
fca5f3602d
commit
0901328f12
@ -2014,7 +2014,7 @@ pub const Cpu = struct {
|
|||||||
.global, .local, .shared => is_gpu,
|
.global, .local, .shared => is_gpu,
|
||||||
.constant => is_gpu and (context == null or context == .constant),
|
.constant => is_gpu and (context == null or context == .constant),
|
||||||
.param => is_nvptx,
|
.param => is_nvptx,
|
||||||
.input, .output, .uniform, .push_constant, .storage_buffer => is_spirv,
|
.input, .output, .uniform, .push_constant, .storage_buffer, .physical_storage_buffer => is_spirv,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -21,6 +21,7 @@ pub const Feature = enum {
|
|||||||
generic_pointer,
|
generic_pointer,
|
||||||
vector16,
|
vector16,
|
||||||
shader,
|
shader,
|
||||||
|
variable_pointers,
|
||||||
physical_storage_buffer,
|
physical_storage_buffer,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -129,6 +130,11 @@ pub const all_features = blk: {
|
|||||||
.description = "Enable SPV_KHR_physical_storage_buffer extension and the PhysicalStorageBufferAddresses capability",
|
.description = "Enable SPV_KHR_physical_storage_buffer extension and the PhysicalStorageBufferAddresses capability",
|
||||||
.dependencies = featureSet(&[_]Feature{.v1_0}),
|
.dependencies = featureSet(&[_]Feature{.v1_0}),
|
||||||
};
|
};
|
||||||
|
result[@intFromEnum(Feature.variable_pointers)] = .{
|
||||||
|
.llvm_name = null,
|
||||||
|
.description = "Enable SPV_KHR_variable_pointers extension and the (VariablePointers, VariablePointersStorageBuffer) capabilities",
|
||||||
|
.dependencies = featureSet(&[_]Feature{.v1_0}),
|
||||||
|
};
|
||||||
const ti = @typeInfo(Feature);
|
const ti = @typeInfo(Feature);
|
||||||
for (&result, 0..) |*elem, i| {
|
for (&result, 0..) |*elem, i| {
|
||||||
elem.index = i;
|
elem.index = i;
|
||||||
@ -147,7 +153,7 @@ pub const cpu = struct {
|
|||||||
pub const vulkan_v1_2: CpuModel = .{
|
pub const vulkan_v1_2: CpuModel = .{
|
||||||
.name = "vulkan_v1_2",
|
.name = "vulkan_v1_2",
|
||||||
.llvm_name = null,
|
.llvm_name = null,
|
||||||
.features = featureSet(&[_]Feature{ .v1_5, .shader, .physical_storage_buffer }),
|
.features = featureSet(&[_]Feature{ .v1_5, .shader }),
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const opencl_v2: CpuModel = .{
|
pub const opencl_v2: CpuModel = .{
|
||||||
|
|||||||
@ -531,6 +531,7 @@ pub const AddressSpace = enum(u5) {
|
|||||||
uniform,
|
uniform,
|
||||||
push_constant,
|
push_constant,
|
||||||
storage_buffer,
|
storage_buffer,
|
||||||
|
physical_storage_buffer,
|
||||||
|
|
||||||
// AVR address spaces.
|
// AVR address spaces.
|
||||||
flash,
|
flash,
|
||||||
|
|||||||
@ -169,12 +169,10 @@ pub const Object = struct {
|
|||||||
/// via the usual `intern_map` mechanism.
|
/// via the usual `intern_map` mechanism.
|
||||||
ptr_types: PtrTypeMap = .{},
|
ptr_types: PtrTypeMap = .{},
|
||||||
|
|
||||||
/// For test declarations for Vulkan, we have to add a push constant with a pointer to a
|
/// For test declarations for Vulkan, we have to add a buffer.
|
||||||
/// buffer that we can use. We only need to generate this once, this holds the link information
|
/// We only need to generate this once, this holds the link information
|
||||||
/// related to that.
|
/// related to that.
|
||||||
error_push_constant: ?struct {
|
error_buffer: ?SpvModule.Decl.Index = null,
|
||||||
push_constant_ptr: SpvModule.Decl.Index,
|
|
||||||
} = null,
|
|
||||||
|
|
||||||
pub fn init(gpa: Allocator, target: std.Target) Object {
|
pub fn init(gpa: Allocator, target: std.Target) Object {
|
||||||
return .{
|
return .{
|
||||||
@ -1739,15 +1737,34 @@ const NavGen = struct {
|
|||||||
fn spvStorageClass(self: *NavGen, as: std.builtin.AddressSpace) StorageClass {
|
fn spvStorageClass(self: *NavGen, as: std.builtin.AddressSpace) StorageClass {
|
||||||
return switch (as) {
|
return switch (as) {
|
||||||
.generic => if (self.spv.hasFeature(.generic_pointer)) .Generic else .Function,
|
.generic => if (self.spv.hasFeature(.generic_pointer)) .Generic else .Function,
|
||||||
|
.global => {
|
||||||
|
if (self.spv.hasFeature(.kernel)) return .CrossWorkgroup;
|
||||||
|
return .StorageBuffer;
|
||||||
|
},
|
||||||
|
.push_constant => {
|
||||||
|
assert(self.spv.hasFeature(.shader));
|
||||||
|
return .PushConstant;
|
||||||
|
},
|
||||||
|
.output => {
|
||||||
|
assert(self.spv.hasFeature(.shader));
|
||||||
|
return .Output;
|
||||||
|
},
|
||||||
|
.uniform => {
|
||||||
|
assert(self.spv.hasFeature(.shader));
|
||||||
|
return .Uniform;
|
||||||
|
},
|
||||||
|
.storage_buffer => {
|
||||||
|
assert(self.spv.hasFeature(.shader));
|
||||||
|
return .StorageBuffer;
|
||||||
|
},
|
||||||
|
.physical_storage_buffer => {
|
||||||
|
assert(self.spv.hasFeature(.physical_storage_buffer));
|
||||||
|
return .PhysicalStorageBuffer;
|
||||||
|
},
|
||||||
|
.constant => .UniformConstant,
|
||||||
.shared => .Workgroup,
|
.shared => .Workgroup,
|
||||||
.local => .Function,
|
.local => .Function,
|
||||||
.global => if (self.spv.hasFeature(.shader)) .PhysicalStorageBuffer else .CrossWorkgroup,
|
|
||||||
.constant => .UniformConstant,
|
|
||||||
.push_constant => .PushConstant,
|
|
||||||
.input => .Input,
|
.input => .Input,
|
||||||
.output => .Output,
|
|
||||||
.uniform => .Uniform,
|
|
||||||
.storage_buffer => .StorageBuffer,
|
|
||||||
.gs,
|
.gs,
|
||||||
.fs,
|
.fs,
|
||||||
.ss,
|
.ss,
|
||||||
@ -2713,38 +2730,32 @@ const NavGen = struct {
|
|||||||
});
|
});
|
||||||
},
|
},
|
||||||
.vulkan, .opengl => {
|
.vulkan, .opengl => {
|
||||||
const ptr_ptr_anyerror_ty_id = self.spv.allocId();
|
if (self.object.error_buffer == null) {
|
||||||
try self.spv.sections.types_globals_constants.emit(self.spv.gpa, .OpTypePointer, .{
|
|
||||||
.id_result = ptr_ptr_anyerror_ty_id,
|
|
||||||
.storage_class = .PushConstant,
|
|
||||||
.type = ptr_anyerror_ty_id,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (self.object.error_push_constant == null) {
|
|
||||||
const spv_err_decl_index = try self.spv.allocDecl(.global);
|
const spv_err_decl_index = try self.spv.allocDecl(.global);
|
||||||
try self.spv.declareDeclDeps(spv_err_decl_index, &.{});
|
try self.spv.declareDeclDeps(spv_err_decl_index, &.{});
|
||||||
|
|
||||||
const push_constant_struct_ty_id = self.spv.allocId();
|
const buffer_struct_ty_id = self.spv.allocId();
|
||||||
try self.spv.structType(push_constant_struct_ty_id, &.{ptr_anyerror_ty_id}, &.{"error_out_ptr"});
|
try self.spv.structType(buffer_struct_ty_id, &.{anyerror_ty_id}, &.{"error_out"});
|
||||||
try self.spv.decorate(push_constant_struct_ty_id, .Block);
|
try self.spv.decorate(buffer_struct_ty_id, .Block);
|
||||||
try self.spv.decorateMember(push_constant_struct_ty_id, 0, .{ .Offset = .{ .byte_offset = 0 } });
|
try self.spv.decorateMember(buffer_struct_ty_id, 0, .{ .Offset = .{ .byte_offset = 0 } });
|
||||||
|
|
||||||
const ptr_push_constant_struct_ty_id = self.spv.allocId();
|
const ptr_buffer_struct_ty_id = self.spv.allocId();
|
||||||
try self.spv.sections.types_globals_constants.emit(self.spv.gpa, .OpTypePointer, .{
|
try self.spv.sections.types_globals_constants.emit(self.spv.gpa, .OpTypePointer, .{
|
||||||
.id_result = ptr_push_constant_struct_ty_id,
|
.id_result = ptr_buffer_struct_ty_id,
|
||||||
.storage_class = .PushConstant,
|
.storage_class = self.spvStorageClass(.global),
|
||||||
.type = push_constant_struct_ty_id,
|
.type = buffer_struct_ty_id,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const buffer_struct_id = self.spv.declPtr(spv_err_decl_index).result_id;
|
||||||
try self.spv.sections.types_globals_constants.emit(self.spv.gpa, .OpVariable, .{
|
try self.spv.sections.types_globals_constants.emit(self.spv.gpa, .OpVariable, .{
|
||||||
.id_result_type = ptr_push_constant_struct_ty_id,
|
.id_result_type = ptr_buffer_struct_ty_id,
|
||||||
.id_result = self.spv.declPtr(spv_err_decl_index).result_id,
|
.id_result = buffer_struct_id,
|
||||||
.storage_class = .PushConstant,
|
.storage_class = self.spvStorageClass(.global),
|
||||||
});
|
});
|
||||||
|
try self.spv.decorate(buffer_struct_id, .{ .DescriptorSet = .{ .descriptor_set = 0 } });
|
||||||
|
try self.spv.decorate(buffer_struct_id, .{ .Binding = .{ .binding_point = 0 } });
|
||||||
|
|
||||||
self.object.error_push_constant = .{
|
self.object.error_buffer = spv_err_decl_index;
|
||||||
.push_constant_ptr = spv_err_decl_index,
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try self.spv.sections.execution_modes.emit(self.spv.gpa, .OpExecutionMode, .{
|
try self.spv.sections.execution_modes.emit(self.spv.gpa, .OpExecutionMode, .{
|
||||||
@ -2767,24 +2778,16 @@ const NavGen = struct {
|
|||||||
.id_result = self.spv.allocId(),
|
.id_result = self.spv.allocId(),
|
||||||
});
|
});
|
||||||
|
|
||||||
const spv_err_decl_index = self.object.error_push_constant.?.push_constant_ptr;
|
const spv_err_decl_index = self.object.error_buffer.?;
|
||||||
const push_constant_id = self.spv.declPtr(spv_err_decl_index).result_id;
|
const buffer_id = self.spv.declPtr(spv_err_decl_index).result_id;
|
||||||
try decl_deps.append(spv_err_decl_index);
|
try decl_deps.append(spv_err_decl_index);
|
||||||
|
|
||||||
const zero_id = try self.constInt(Type.u32, 0);
|
const zero_id = try self.constInt(Type.u32, 0);
|
||||||
// We cannot use OpInBoundsAccessChain to dereference cross-storage class, so we have to use
|
|
||||||
// a load.
|
|
||||||
const tmp = self.spv.allocId();
|
|
||||||
try section.emit(self.spv.gpa, .OpInBoundsAccessChain, .{
|
try section.emit(self.spv.gpa, .OpInBoundsAccessChain, .{
|
||||||
.id_result_type = ptr_ptr_anyerror_ty_id,
|
|
||||||
.id_result = tmp,
|
|
||||||
.base = push_constant_id,
|
|
||||||
.indexes = &.{zero_id},
|
|
||||||
});
|
|
||||||
try section.emit(self.spv.gpa, .OpLoad, .{
|
|
||||||
.id_result_type = ptr_anyerror_ty_id,
|
.id_result_type = ptr_anyerror_ty_id,
|
||||||
.id_result = p_error_id,
|
.id_result = p_error_id,
|
||||||
.pointer = tmp,
|
.base = buffer_id,
|
||||||
|
.indexes = &.{zero_id},
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
@ -4562,7 +4565,8 @@ const NavGen = struct {
|
|||||||
const field_int_id = blk: {
|
const field_int_id = blk: {
|
||||||
if (field_ty.isPtrAtRuntime(zcu)) {
|
if (field_ty.isPtrAtRuntime(zcu)) {
|
||||||
assert(self.spv.hasFeature(.addresses) or
|
assert(self.spv.hasFeature(.addresses) or
|
||||||
(self.spv.hasFeature(.physical_storage_buffer) and field_ty.ptrAddressSpace(zcu) == .storage_buffer));
|
(self.spv.hasFeature(.physical_storage_buffer) and
|
||||||
|
field_ty.ptrAddressSpace(zcu) == .storage_buffer));
|
||||||
break :blk try self.intFromPtr(field_id);
|
break :blk try self.intFromPtr(field_id);
|
||||||
}
|
}
|
||||||
break :blk try self.bitCast(field_int_ty, field_ty, field_id);
|
break :blk try self.bitCast(field_int_ty, field_ty, field_id);
|
||||||
@ -4969,6 +4973,7 @@ const NavGen = struct {
|
|||||||
if (payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
|
if (payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
|
||||||
const pl_ptr_ty_id = try self.ptrType(layout.payload_ty, .Function, .indirect);
|
const pl_ptr_ty_id = try self.ptrType(layout.payload_ty, .Function, .indirect);
|
||||||
const pl_ptr_id = try self.accessChain(pl_ptr_ty_id, tmp_id, &.{layout.payload_index});
|
const pl_ptr_id = try self.accessChain(pl_ptr_ty_id, tmp_id, &.{layout.payload_index});
|
||||||
|
const active_pl_ptr_id = if (!layout.payload_ty.eql(payload_ty, zcu)) blk: {
|
||||||
const active_pl_ptr_ty_id = try self.ptrType(payload_ty, .Function, .indirect);
|
const active_pl_ptr_ty_id = try self.ptrType(payload_ty, .Function, .indirect);
|
||||||
const active_pl_ptr_id = self.spv.allocId();
|
const active_pl_ptr_id = self.spv.allocId();
|
||||||
try self.func.body.emit(self.spv.gpa, .OpBitcast, .{
|
try self.func.body.emit(self.spv.gpa, .OpBitcast, .{
|
||||||
@ -4976,6 +4981,8 @@ const NavGen = struct {
|
|||||||
.id_result = active_pl_ptr_id,
|
.id_result = active_pl_ptr_id,
|
||||||
.operand = pl_ptr_id,
|
.operand = pl_ptr_id,
|
||||||
});
|
});
|
||||||
|
break :blk active_pl_ptr_id;
|
||||||
|
} else pl_ptr_id;
|
||||||
|
|
||||||
try self.store(payload_ty, active_pl_ptr_id, payload.?, .{});
|
try self.store(payload_ty, active_pl_ptr_id, payload.?, .{});
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@ -350,6 +350,11 @@ pub fn finalize(self: *Module, a: Allocator) ![]Word {
|
|||||||
.vector16 => try self.addCapability(.Vector16),
|
.vector16 => try self.addCapability(.Vector16),
|
||||||
// Shader
|
// Shader
|
||||||
.shader => try self.addCapability(.Shader),
|
.shader => try self.addCapability(.Shader),
|
||||||
|
.variable_pointers => {
|
||||||
|
try self.addExtension("SPV_KHR_variable_pointers");
|
||||||
|
try self.addCapability(.VariablePointersStorageBuffer);
|
||||||
|
try self.addCapability(.VariablePointers);
|
||||||
|
},
|
||||||
.physical_storage_buffer => {
|
.physical_storage_buffer => {
|
||||||
try self.addExtension("SPV_KHR_physical_storage_buffer");
|
try self.addExtension("SPV_KHR_physical_storage_buffer");
|
||||||
try self.addCapability(.PhysicalStorageBufferAddresses);
|
try self.addCapability(.PhysicalStorageBufferAddresses);
|
||||||
@ -364,20 +369,17 @@ pub fn finalize(self: *Module, a: Allocator) ![]Word {
|
|||||||
// Emit memory model
|
// Emit memory model
|
||||||
const addressing_model: spec.AddressingModel = blk: {
|
const addressing_model: spec.AddressingModel = blk: {
|
||||||
if (self.hasFeature(.shader)) {
|
if (self.hasFeature(.shader)) {
|
||||||
break :blk switch (self.target.cpu.arch) {
|
assert(self.target.cpu.arch == .spirv64);
|
||||||
.spirv32 => .Logical, // TODO: I don't think this will ever be implemented.
|
if (self.hasFeature(.physical_storage_buffer)) break :blk .PhysicalStorageBuffer64;
|
||||||
.spirv64 => .PhysicalStorageBuffer64,
|
break :blk .Logical;
|
||||||
else => unreachable,
|
}
|
||||||
};
|
|
||||||
} else if (self.hasFeature(.kernel)) {
|
assert(self.hasFeature(.kernel));
|
||||||
break :blk switch (self.target.cpu.arch) {
|
break :blk switch (self.target.cpu.arch) {
|
||||||
.spirv32 => .Physical32,
|
.spirv32 => .Physical32,
|
||||||
.spirv64 => .Physical64,
|
.spirv64 => .Physical64,
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
};
|
};
|
||||||
}
|
|
||||||
|
|
||||||
unreachable;
|
|
||||||
};
|
};
|
||||||
try self.sections.memory_model.emit(self.gpa, .OpMemoryModel, .{
|
try self.sections.memory_model.emit(self.gpa, .OpMemoryModel, .{
|
||||||
.addressing_model = addressing_model,
|
.addressing_model = addressing_model,
|
||||||
|
|||||||
@ -501,21 +501,26 @@ pub fn addrSpaceCastIsValid(
|
|||||||
/// part of a merge (result of a branch) and may not be stored in memory at all. This function returns
|
/// part of a merge (result of a branch) and may not be stored in memory at all. This function returns
|
||||||
/// for a particular architecture and address space wether such pointers are logical.
|
/// for a particular architecture and address space wether such pointers are logical.
|
||||||
pub fn arePointersLogical(target: std.Target, as: AddressSpace) bool {
|
pub fn arePointersLogical(target: std.Target, as: AddressSpace) bool {
|
||||||
if (target.os.tag != .vulkan) {
|
if (target.os.tag != .vulkan) return false;
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return switch (as) {
|
return switch (as) {
|
||||||
// TODO: Vulkan doesn't support pointers in the generic address space, we
|
// TODO: Vulkan doesn't support pointers in the generic address space, we
|
||||||
// should remove this case but this requires a change in defaultAddressSpace().
|
// should remove this case but this requires a change in defaultAddressSpace().
|
||||||
// For now, at least disable them from being regarded as physical.
|
// For now, at least disable them from being regarded as physical.
|
||||||
.generic => true,
|
.generic => true,
|
||||||
// For now, all global pointers are represented using PhysicalStorageBuffer, so these are real
|
// For now, all global pointers are represented using StorageBuffer or CrossWorkgroup,
|
||||||
// pointers.
|
// so these are real pointers.
|
||||||
.global => false,
|
.global => false,
|
||||||
// TODO: Allowed with VK_KHR_variable_pointers.
|
.physical_storage_buffer => false,
|
||||||
.shared => true,
|
.shared => !target.cpu.features.isEnabled(@intFromEnum(std.Target.spirv.Feature.variable_pointers)),
|
||||||
.constant, .local, .input, .output, .uniform, .push_constant, .storage_buffer => true,
|
.constant,
|
||||||
|
.local,
|
||||||
|
.input,
|
||||||
|
.output,
|
||||||
|
.uniform,
|
||||||
|
.push_constant,
|
||||||
|
.storage_buffer,
|
||||||
|
=> true,
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user