spirv: Add SSBO read fallbacks when no aliasing is available
This commit is contained in:
parent
fd913bceaf
commit
fde47152d9
1 changed files with 99 additions and 37 deletions
|
@ -8,44 +8,62 @@
|
||||||
|
|
||||||
namespace Shader::Backend::SPIRV {
|
namespace Shader::Backend::SPIRV {
|
||||||
namespace {
|
namespace {
|
||||||
Id StorageIndex(EmitContext& ctx, const IR::Value& offset, size_t element_size) {
|
Id StorageIndex(EmitContext& ctx, const IR::Value& offset, size_t element_size,
|
||||||
|
u32 index_offset = 0) {
|
||||||
if (offset.IsImmediate()) {
|
if (offset.IsImmediate()) {
|
||||||
const u32 imm_offset{static_cast<u32>(offset.U32() / element_size)};
|
const u32 imm_offset{static_cast<u32>(offset.U32() / element_size) + index_offset};
|
||||||
return ctx.Const(imm_offset);
|
return ctx.Const(imm_offset);
|
||||||
}
|
}
|
||||||
const u32 shift{static_cast<u32>(std::countr_zero(element_size))};
|
const u32 shift{static_cast<u32>(std::countr_zero(element_size))};
|
||||||
const Id index{ctx.Def(offset)};
|
Id index{ctx.Def(offset)};
|
||||||
if (shift == 0) {
|
if (shift != 0) {
|
||||||
return index;
|
|
||||||
}
|
|
||||||
const Id shift_id{ctx.Const(shift)};
|
const Id shift_id{ctx.Const(shift)};
|
||||||
return ctx.OpShiftRightLogical(ctx.U32[1], index, shift_id);
|
index = ctx.OpShiftRightLogical(ctx.U32[1], index, shift_id);
|
||||||
|
}
|
||||||
|
if (index_offset != 0) {
|
||||||
|
index = ctx.OpIAdd(ctx.U32[1], index, ctx.Const(index_offset));
|
||||||
|
}
|
||||||
|
return index;
|
||||||
}
|
}
|
||||||
|
|
||||||
Id StoragePointer(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
Id StoragePointer(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
const StorageTypeDefinition& type_def, size_t element_size,
|
const StorageTypeDefinition& type_def, size_t element_size,
|
||||||
Id StorageDefinitions::*member_ptr) {
|
Id StorageDefinitions::*member_ptr, u32 index_offset = 0) {
|
||||||
if (!binding.IsImmediate()) {
|
if (!binding.IsImmediate()) {
|
||||||
throw NotImplementedException("Dynamic storage buffer indexing");
|
throw NotImplementedException("Dynamic storage buffer indexing");
|
||||||
}
|
}
|
||||||
const Id ssbo{ctx.ssbos[binding.U32()].*member_ptr};
|
const Id ssbo{ctx.ssbos[binding.U32()].*member_ptr};
|
||||||
const Id index{StorageIndex(ctx, offset, element_size)};
|
const Id index{StorageIndex(ctx, offset, element_size, index_offset)};
|
||||||
return ctx.OpAccessChain(type_def.element, ssbo, ctx.u32_zero_value, index);
|
return ctx.OpAccessChain(type_def.element, ssbo, ctx.u32_zero_value, index);
|
||||||
}
|
}
|
||||||
|
|
||||||
Id LoadStorage(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, Id result_type,
|
Id LoadStorage(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, Id result_type,
|
||||||
const StorageTypeDefinition& type_def, size_t element_size,
|
const StorageTypeDefinition& type_def, size_t element_size,
|
||||||
Id StorageDefinitions::*member_ptr) {
|
Id StorageDefinitions::*member_ptr, u32 index_offset = 0) {
|
||||||
const Id pointer{StoragePointer(ctx, binding, offset, type_def, element_size, member_ptr)};
|
const Id pointer{
|
||||||
|
StoragePointer(ctx, binding, offset, type_def, element_size, member_ptr, index_offset)};
|
||||||
return ctx.OpLoad(result_type, pointer);
|
return ctx.OpLoad(result_type, pointer);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Id LoadStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
u32 index_offset = 0) {
|
||||||
|
return LoadStorage(ctx, binding, offset, ctx.U32[1], ctx.storage_types.U32, sizeof(u32),
|
||||||
|
&StorageDefinitions::U32, index_offset);
|
||||||
|
}
|
||||||
|
|
||||||
void WriteStorage(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, Id value,
|
void WriteStorage(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, Id value,
|
||||||
const StorageTypeDefinition& type_def, size_t element_size,
|
const StorageTypeDefinition& type_def, size_t element_size,
|
||||||
Id StorageDefinitions::*member_ptr) {
|
Id StorageDefinitions::*member_ptr, u32 index_offset = 0) {
|
||||||
const Id pointer{StoragePointer(ctx, binding, offset, type_def, element_size, member_ptr)};
|
const Id pointer{
|
||||||
|
StoragePointer(ctx, binding, offset, type_def, element_size, member_ptr, index_offset)};
|
||||||
ctx.OpStore(pointer, value);
|
ctx.OpStore(pointer, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void WriteStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset, Id value,
|
||||||
|
u32 index_offset = 0) {
|
||||||
|
WriteStorage(ctx, binding, offset, value, ctx.storage_types.U32, sizeof(u32),
|
||||||
|
&StorageDefinitions::U32, index_offset);
|
||||||
|
}
|
||||||
} // Anonymous namespace
|
} // Anonymous namespace
|
||||||
|
|
||||||
void EmitLoadGlobalU8(EmitContext&) {
|
void EmitLoadGlobalU8(EmitContext&) {
|
||||||
|
@ -105,42 +123,73 @@ void EmitWriteGlobal128(EmitContext& ctx, Id address, Id value) {
|
||||||
}
|
}
|
||||||
|
|
||||||
Id EmitLoadStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
|
Id EmitLoadStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
|
||||||
|
if (ctx.profile.support_descriptor_aliasing) {
|
||||||
return ctx.OpUConvert(ctx.U32[1],
|
return ctx.OpUConvert(ctx.U32[1],
|
||||||
LoadStorage(ctx, binding, offset, ctx.U8, ctx.storage_types.U8,
|
LoadStorage(ctx, binding, offset, ctx.U8, ctx.storage_types.U8,
|
||||||
sizeof(u8), &StorageDefinitions::U8));
|
sizeof(u8), &StorageDefinitions::U8));
|
||||||
|
} else {
|
||||||
|
return ctx.OpBitFieldUExtract(ctx.U32[1], LoadStorage32(ctx, binding, offset),
|
||||||
|
ctx.BitOffset8(offset), ctx.Const(8u));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Id EmitLoadStorageS8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
|
Id EmitLoadStorageS8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
|
||||||
|
if (ctx.profile.support_descriptor_aliasing) {
|
||||||
return ctx.OpSConvert(ctx.U32[1],
|
return ctx.OpSConvert(ctx.U32[1],
|
||||||
LoadStorage(ctx, binding, offset, ctx.S8, ctx.storage_types.S8,
|
LoadStorage(ctx, binding, offset, ctx.S8, ctx.storage_types.S8,
|
||||||
sizeof(s8), &StorageDefinitions::S8));
|
sizeof(s8), &StorageDefinitions::S8));
|
||||||
|
} else {
|
||||||
|
return ctx.OpBitFieldSExtract(ctx.U32[1], LoadStorage32(ctx, binding, offset),
|
||||||
|
ctx.BitOffset8(offset), ctx.Const(8u));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Id EmitLoadStorageU16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
|
Id EmitLoadStorageU16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
|
||||||
|
if (ctx.profile.support_descriptor_aliasing) {
|
||||||
return ctx.OpUConvert(ctx.U32[1],
|
return ctx.OpUConvert(ctx.U32[1],
|
||||||
LoadStorage(ctx, binding, offset, ctx.U16, ctx.storage_types.U16,
|
LoadStorage(ctx, binding, offset, ctx.U16, ctx.storage_types.U16,
|
||||||
sizeof(u16), &StorageDefinitions::U16));
|
sizeof(u16), &StorageDefinitions::U16));
|
||||||
|
} else {
|
||||||
|
return ctx.OpBitFieldUExtract(ctx.U32[1], LoadStorage32(ctx, binding, offset),
|
||||||
|
ctx.BitOffset16(offset), ctx.Const(16u));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Id EmitLoadStorageS16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
|
Id EmitLoadStorageS16(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
|
||||||
|
if (ctx.profile.support_descriptor_aliasing) {
|
||||||
return ctx.OpSConvert(ctx.U32[1],
|
return ctx.OpSConvert(ctx.U32[1],
|
||||||
LoadStorage(ctx, binding, offset, ctx.S16, ctx.storage_types.S16,
|
LoadStorage(ctx, binding, offset, ctx.S16, ctx.storage_types.S16,
|
||||||
sizeof(s16), &StorageDefinitions::S16));
|
sizeof(s16), &StorageDefinitions::S16));
|
||||||
|
} else {
|
||||||
|
return ctx.OpBitFieldSExtract(ctx.U32[1], LoadStorage32(ctx, binding, offset),
|
||||||
|
ctx.BitOffset16(offset), ctx.Const(16u));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Id EmitLoadStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
|
Id EmitLoadStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
|
||||||
return LoadStorage(ctx, binding, offset, ctx.U32[1], ctx.storage_types.U32, sizeof(u32),
|
return LoadStorage32(ctx, binding, offset);
|
||||||
&StorageDefinitions::U32);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Id EmitLoadStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
|
Id EmitLoadStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
|
||||||
return LoadStorage(ctx, binding, offset, ctx.U32[2], ctx.storage_types.U32x2, sizeof(u32[2]),
|
if (ctx.profile.support_descriptor_aliasing) {
|
||||||
&StorageDefinitions::U32x2);
|
return LoadStorage(ctx, binding, offset, ctx.U32[2], ctx.storage_types.U32x2,
|
||||||
|
sizeof(u32[2]), &StorageDefinitions::U32x2);
|
||||||
|
} else {
|
||||||
|
return ctx.OpCompositeConstruct(ctx.U32[2], LoadStorage32(ctx, binding, offset, 0),
|
||||||
|
LoadStorage32(ctx, binding, offset, 1));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Id EmitLoadStorage128(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
|
Id EmitLoadStorage128(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset) {
|
||||||
return LoadStorage(ctx, binding, offset, ctx.U32[4], ctx.storage_types.U32x4, sizeof(u32[4]),
|
if (ctx.profile.support_descriptor_aliasing) {
|
||||||
&StorageDefinitions::U32x4);
|
return LoadStorage(ctx, binding, offset, ctx.U32[4], ctx.storage_types.U32x4,
|
||||||
|
sizeof(u32[4]), &StorageDefinitions::U32x4);
|
||||||
|
} else {
|
||||||
|
return ctx.OpCompositeConstruct(ctx.U32[4], LoadStorage32(ctx, binding, offset, 0),
|
||||||
|
LoadStorage32(ctx, binding, offset, 1),
|
||||||
|
LoadStorage32(ctx, binding, offset, 2),
|
||||||
|
LoadStorage32(ctx, binding, offset, 3));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void EmitWriteStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
void EmitWriteStorageU8(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
|
@ -169,20 +218,33 @@ void EmitWriteStorageS16(EmitContext& ctx, const IR::Value& binding, const IR::V
|
||||||
|
|
||||||
void EmitWriteStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
void EmitWriteStorage32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
Id value) {
|
Id value) {
|
||||||
WriteStorage(ctx, binding, offset, value, ctx.storage_types.U32, sizeof(u32),
|
WriteStorage32(ctx, binding, offset, value);
|
||||||
&StorageDefinitions::U32);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void EmitWriteStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
void EmitWriteStorage64(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
Id value) {
|
Id value) {
|
||||||
|
if (ctx.profile.support_descriptor_aliasing) {
|
||||||
WriteStorage(ctx, binding, offset, value, ctx.storage_types.U32x2, sizeof(u32[2]),
|
WriteStorage(ctx, binding, offset, value, ctx.storage_types.U32x2, sizeof(u32[2]),
|
||||||
&StorageDefinitions::U32x2);
|
&StorageDefinitions::U32x2);
|
||||||
|
} else {
|
||||||
|
for (u32 index = 0; index < 2; ++index) {
|
||||||
|
const Id element{ctx.OpCompositeExtract(ctx.U32[1], value, index)};
|
||||||
|
WriteStorage32(ctx, binding, offset, element, index);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void EmitWriteStorage128(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
void EmitWriteStorage128(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset,
|
||||||
Id value) {
|
Id value) {
|
||||||
|
if (ctx.profile.support_descriptor_aliasing) {
|
||||||
WriteStorage(ctx, binding, offset, value, ctx.storage_types.U32x4, sizeof(u32[4]),
|
WriteStorage(ctx, binding, offset, value, ctx.storage_types.U32x4, sizeof(u32[4]),
|
||||||
&StorageDefinitions::U32x4);
|
&StorageDefinitions::U32x4);
|
||||||
|
} else {
|
||||||
|
for (u32 index = 0; index < 4; ++index) {
|
||||||
|
const Id element{ctx.OpCompositeExtract(ctx.U32[1], value, index)};
|
||||||
|
WriteStorage32(ctx, binding, offset, element, index);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Shader::Backend::SPIRV
|
} // namespace Shader::Backend::SPIRV
|
||||||
|
|
Loading…
Reference in a new issue