|
1 | 1 | /* |
2 | | - * Copyright 2019-2024 Diligent Graphics LLC |
| 2 | + * Copyright 2019-2025 Diligent Graphics LLC |
3 | 3 | * Copyright 2015-2019 Egor Yusov |
4 | 4 | * |
5 | 5 | * Licensed under the Apache License, Version 2.0 (the "License"); |
@@ -194,7 +194,7 @@ class ShaderResourceCacheVk : public ShaderResourceCacheBase |
194 | 194 |
|
195 | 195 | void AssignDescriptorSetAllocation(Uint32 SetIndex, DescriptorSetAllocation&& Allocation) |
196 | 196 | { |
197 | | - auto& DescrSet = GetDescriptorSet(SetIndex); |
| 197 | + DescriptorSet& DescrSet = GetDescriptorSet(SetIndex); |
198 | 198 | VERIFY(DescrSet.GetSize() > 0, "Descriptor set is empty"); |
199 | 199 | VERIFY(!DescrSet.m_DescriptorSetAllocation, "Descriptor set allocation has already been initialized"); |
200 | 200 | DescrSet.m_DescriptorSetAllocation = std::move(Allocation); |
@@ -260,9 +260,9 @@ class ShaderResourceCacheVk : public ShaderResourceCacheBase |
260 | 260 | template <bool VerifyOnly> |
261 | 261 | void TransitionResources(DeviceContextVkImpl* pCtxVkImpl); |
262 | 262 |
|
263 | | - __forceinline Uint32 GetDynamicBufferOffsets(DeviceContextIndex CtxId, |
264 | | - std::vector<uint32_t>& Offsets, |
265 | | - Uint32 StartInd) const; |
| 263 | + Uint32 GetDynamicBufferOffsets(DeviceContextVkImpl* pCtx, |
| 264 | + std::vector<uint32_t>& Offsets, |
| 265 | + Uint32 StartInd) const; |
266 | 266 |
|
267 | 267 | private: |
268 | 268 | Resource* GetFirstResourcePtr() |
@@ -316,83 +316,4 @@ __forceinline auto ShaderResourceCacheVk::Resource::GetDescriptorWriteInfo<Descr |
316 | 316 | template <> |
317 | 317 | __forceinline auto ShaderResourceCacheVk::Resource::GetDescriptorWriteInfo<DescriptorType::AccelerationStructure>() const { return GetAccelerationStructureWriteInfo(); } |
318 | 318 |
|
319 | | - |
320 | | -__forceinline Uint32 ShaderResourceCacheVk::GetDynamicBufferOffsets(DeviceContextIndex CtxId, |
321 | | - std::vector<uint32_t>& Offsets, |
322 | | - Uint32 StartInd) const |
323 | | -{ |
324 | | - // If any of the sets being bound include dynamic uniform or storage buffers, then |
325 | | - // pDynamicOffsets includes one element for each array element in each dynamic descriptor |
326 | | - // type binding in each set. Values are taken from pDynamicOffsets in an order such that |
327 | | - // all entries for set N come before set N+1; within a set, entries are ordered by the binding |
328 | | - // numbers (unclear if this is SPIRV binding or VkDescriptorSetLayoutBinding number) in the |
329 | | - // descriptor set layouts; and within a binding array, elements are in order. (13.2.5) |
330 | | - |
331 | | - // In each descriptor set, all uniform buffers with dynamic offsets (DescriptorType::UniformBufferDynamic) |
332 | | - // for every shader stage come first, followed by all storage buffers with dynamic offsets |
333 | | - // (DescriptorType::StorageBufferDynamic and DescriptorType::StorageBufferDynamic_ReadOnly) for every shader stage, |
334 | | - // followed by all other resources. |
335 | | - Uint32 OffsetInd = StartInd; |
336 | | - for (Uint32 set = 0; set < m_NumSets; ++set) |
337 | | - { |
338 | | - const auto& DescrSet = GetDescriptorSet(set); |
339 | | - const auto SetSize = DescrSet.GetSize(); |
340 | | - |
341 | | - Uint32 res = 0; |
342 | | - while (res < SetSize) |
343 | | - { |
344 | | - const auto& Res = DescrSet.GetResource(res); |
345 | | - if (Res.Type == DescriptorType::UniformBufferDynamic) |
346 | | - { |
347 | | - const auto* pBufferVk = Res.pObject.ConstPtr<BufferVkImpl>(); |
348 | | - // Do not verify dynamic allocation here as there may be some buffers that are not used by the PSO. |
349 | | - // The allocations of the buffers that are actually used will be verified by |
350 | | - // PipelineResourceSignatureVkImpl::DvpValidateCommittedResource(). |
351 | | - const auto Offset = pBufferVk != nullptr ? pBufferVk->GetDynamicOffset(CtxId, nullptr /* Do not verify allocation*/) : 0; |
352 | | - // The effective offset used for dynamic uniform and storage buffer bindings is the sum of the relative |
353 | | - // offset taken from pDynamicOffsets, and the base address of the buffer plus base offset in the descriptor set. |
354 | | - // The range of the dynamic uniform and storage buffer bindings is the buffer range as specified in the descriptor set. |
355 | | - Offsets[OffsetInd++] = StaticCast<Uint32>(Res.BufferDynamicOffset + Offset); |
356 | | - ++res; |
357 | | - } |
358 | | - else |
359 | | - break; |
360 | | - } |
361 | | - |
362 | | - while (res < SetSize) |
363 | | - { |
364 | | - const auto& Res = DescrSet.GetResource(res); |
365 | | - if (Res.Type == DescriptorType::StorageBufferDynamic || |
366 | | - Res.Type == DescriptorType::StorageBufferDynamic_ReadOnly) |
367 | | - { |
368 | | - const auto* pBufferVkView = Res.pObject.ConstPtr<BufferViewVkImpl>(); |
369 | | - const auto* pBufferVk = pBufferVkView != nullptr ? pBufferVkView->GetBuffer<const BufferVkImpl>() : nullptr; |
370 | | - // Do not verify dynamic allocation here as there may be some buffers that are not used by the PSO. |
371 | | - // The allocations of the buffers that are actually used will be verified by |
372 | | - // PipelineResourceSignatureVkImpl::DvpValidateCommittedResource(). |
373 | | - const auto Offset = pBufferVk != nullptr ? pBufferVk->GetDynamicOffset(CtxId, nullptr /* Do not verify allocation*/) : 0; |
374 | | - // The effective offset used for dynamic uniform and storage buffer bindings is the sum of the relative |
375 | | - // offset taken from pDynamicOffsets, and the base address of the buffer plus base offset in the descriptor set. |
376 | | - // The range of the dynamic uniform and storage buffer bindings is the buffer range as specified in the descriptor set. |
377 | | - Offsets[OffsetInd++] = StaticCast<Uint32>(Res.BufferDynamicOffset + Offset); |
378 | | - ++res; |
379 | | - } |
380 | | - else |
381 | | - break; |
382 | | - } |
383 | | - |
384 | | -#ifdef DILIGENT_DEBUG |
385 | | - for (; res < SetSize; ++res) |
386 | | - { |
387 | | - const auto& Res = DescrSet.GetResource(res); |
388 | | - VERIFY((Res.Type != DescriptorType::UniformBufferDynamic && |
389 | | - Res.Type != DescriptorType::StorageBufferDynamic && |
390 | | - Res.Type != DescriptorType::StorageBufferDynamic_ReadOnly), |
391 | | - "All dynamic uniform and storage buffers are expected to go first in the beginning of each descriptor set"); |
392 | | - } |
393 | | -#endif |
394 | | - } |
395 | | - return OffsetInd - StartInd; |
396 | | -} |
397 | | - |
398 | 319 | } // namespace Diligent |
0 commit comments