@@ -29,6 +29,7 @@ namespace hlsl
29
29
// TODO: some poor soul needs to study rest of https://registry.khronos.org/SPIR-V/specs/unified1/SPIRV.html#_capability
30
30
#define __NBL_CAPABILITY_ShaderLayer [[vk::ext_capability (spv::CapabilityShaderLayer)]]
31
31
#define __NBL_CAPABILITY_ShaderViewportIndex [[vk::ext_capability (spv::CapabilityShaderViewportIndex)]]
32
+ // there's a whole lot more of them
32
33
33
34
#else
34
35
@@ -44,6 +45,10 @@ namespace hlsl
44
45
#define __NBL_SPIRV_SUPERSET_1_6__
45
46
46
47
// 1.6 core caps
48
+ // UniformDecoration
49
+ // Demote to helper invocation
50
+ // Some integer dot product stuff
51
+ //
47
52
48
53
#else
49
54
@@ -87,12 +92,25 @@ template<uint32_t StorageClass, typename T>
87
92
using pointer_t = vk::SpirvOpaqueType<spv::OpTypePointer,vk::Literal<vk::integral_constant<uint32_t,StorageClass> >,T>;
88
93
89
94
//! General Operations
95
+
96
+ //
97
+ template<typename M, uint32_t StorageClass, typename T>
98
+ [[vk::ext_instruction (spv::OpAccessChain)]]
99
+ pointer_t<StorageClass,M> accessChain (pointer_t<StorageClass,T> v, int32_t index);
90
100
91
101
// The holy operation that makes addrof possible
92
102
template<uint32_t StorageClass, typename T>
93
103
[[vk::ext_instruction (spv::OpCopyObject)]]
94
104
pointer_t<StorageClass,T> copyObject ([[vk::ext_reference]] T v);
95
105
106
+ // unfortunately without reflection we can't validate that objects "logically match" in a concept
107
+ template<typename T, typename U>
108
+ [[vk::ext_instruction (spv::OpCopyLogical)]]
109
+ enable_if_t<!is_same_v<T,U>,T> copyLogical ([[vk::ext_reference]] U v);
110
+ template<typename T, typename Ptr_U>
111
+ [[vk::ext_instruction (spv::OpCopyLogical)]]
112
+ enable_if_t<is_spirv_type_v<Ptr_U>/* && !is_same_v<T,U>*/ ,T> copyLogical (Ptr_U v);
113
+
96
114
// Here's the thing with atomics, it's not only the data type that dictates whether you can do an atomic or not.
97
115
// It's the storage class that has the most effect (shared vs storage vs image) and we can't check that easily
98
116
template<typename T> // integers operate on 2s complement so same op for signed and unsigned
@@ -204,10 +222,14 @@ template<typename T, typename Ptr_T> // DXC Workaround
204
222
enable_if_t<is_spirv_type_v<Ptr_T>, T> atomicCompareExchange (Ptr_T ptr, uint32_t memoryScope, uint32_t memSemanticsEqual, uint32_t memSemanticsUnequal, T value, T comparator);
205
223
206
224
225
+
226
+ template<typename T>
227
+ using bda_pointer_t __NBL_CAPABILITY_PhysicalStorageBufferAddresses = vk::SpirvType<spv::OpTypePointer,sizeof (uint64_t),/*alignof(uint64_t)*/ 8 ,vk::Literal<vk::integral_constant<uint32_t,spv::StorageClassPhysicalStorageBuffer> >,T>;
228
+
207
229
template<typename T, uint32_t alignment>
208
230
__NBL_CAPABILITY_PhysicalStorageBufferAddresses
209
231
[[vk::ext_instruction (spv::OpLoad)]]
210
- T load (pointer_t<spv::StorageClassPhysicalStorageBuffer, T> pointer, [[vk::ext_literal]] uint32_t __aligned = /*Aligned*/ 0x00000002 , [[vk::ext_literal]] uint32_t __alignment = alignment);
232
+ T load (bda_pointer_t< T> pointer, [[vk::ext_literal]] uint32_t __aligned = /*Aligned*/ 0x00000002 , [[vk::ext_literal]] uint32_t __alignment = alignment);
211
233
212
234
template<typename T, typename P>
213
235
[[vk::ext_instruction (spv::OpLoad)]]
@@ -216,7 +238,7 @@ enable_if_t<is_spirv_type_v<P>,T> load(P pointer);
216
238
template<typename T, uint32_t alignment>
217
239
__NBL_CAPABILITY_PhysicalStorageBufferAddresses
218
240
[[vk::ext_instruction (spv::OpStore)]]
219
- void store (pointer_t<spv::StorageClassPhysicalStorageBuffer,T> pointer, T obj, [[vk::ext_literal]] uint32_t __aligned = /*Aligned*/ 0x00000002 , [[vk::ext_literal]] uint32_t __alignment = alignment);
241
+ void store (bda_pointer_t<T> pointer, T obj, [[vk::ext_literal]] uint32_t __aligned = /*Aligned*/ 0x00000002 , [[vk::ext_literal]] uint32_t __alignment = alignment);
220
242
221
243
template<typename T, typename P>
222
244
[[vk::ext_instruction (spv::OpStore)]]
@@ -234,20 +256,22 @@ void controlBarrier(uint32_t executionScope, uint32_t memoryScope, uint32_t memo
234
256
void memoryBarrier (uint32_t memoryScope, uint32_t memorySemantics);
235
257
236
258
// Add specializations if you need to emit a `ext_capability` (this means that the instruction needs to forward through an `impl::` struct and so on)
259
+ // TODO: better constraints, one should only be able to cast fundamental types, etc. https://registry.khronos.org/SPIR-V/specs/unified1/SPIRV.html#OpBitcast
260
+ #if 0
237
261
template<typename T, typename U>
238
262
[[vk::ext_instruction (spv::OpBitcast)]]
239
263
enable_if_t<is_spirv_type_v<T> && is_spirv_type_v<U>, T> bitcast (U);
240
264
241
- template<typename T>
265
+ template<typename U, typename T>
242
266
__NBL_CAPABILITY_PhysicalStorageBufferAddresses
243
267
[[vk::ext_instruction (spv::OpBitcast)]]
244
- uint64_t bitcast (pointer_t<spv::StorageClassPhysicalStorageBuffer, T>);
268
+ enable_if_t<is_same_v<U,uint64_t2>||is_same_v<U,uint32_t2>,U> bitcast (bda_pointer_t< T>);
245
269
246
- template<typename T>
270
+ template<typename T, typename U >
247
271
__NBL_CAPABILITY_PhysicalStorageBufferAddresses
248
272
[[vk::ext_instruction (spv::OpBitcast)]]
249
- pointer_t<spv::StorageClassPhysicalStorageBuffer, T> bitcast (uint64_t );
250
-
273
+ enable_if_t<is_same_v<U,uint64_t2>||is_same_v<U,uint32_t2>,bda_pointer_t< T> > bitcast (U );
274
+ #endif
251
275
template<class T, class U>
252
276
[[vk::ext_instruction (spv::OpBitcast)]]
253
277
T bitcast (U);
0 commit comments