2121#include " src/gpu/graphite/TextureProxy.h"
2222#include " src/gpu/graphite/UniformManager.h"
2323#include " src/gpu/graphite/vk/VulkanBuffer.h"
24+ #include " src/gpu/graphite/vk/VulkanCaps.h"
2425#include " src/gpu/graphite/vk/VulkanDescriptorSet.h"
2526#include " src/gpu/graphite/vk/VulkanFramebuffer.h"
2627#include " src/gpu/graphite/vk/VulkanGraphiteUtilsPriv.h"
@@ -36,6 +37,82 @@ namespace skgpu::graphite {
3637
3738class VulkanDescriptorSet ;
3839
40+ /* *
41+ * Since intrinsic uniforms need to be read in the vertex shader, we cannot use protected buffers
42+ * for them when submitting protected work. Thus in order to upload data to them, we need to make
43+ * them mappable instead of using commands to copy data to them (would require them to be
44+ * protected if we did). This helper class manages rotating through buffers and writing each new
45+ * occurrence of a set of intrinsic uniforms into the current buffer.
46+ *
47+ * Ideally we would remove this class and instead use push constants for all intrinsic uniforms.
48+ */
49+ class VulkanCommandBuffer ::IntrinsicConstantsManager {
50+ public:
51+ BindBufferInfo add (VulkanCommandBuffer* cb, UniformDataBlock intrinsicValues) {
52+ static constexpr int kNumSlots = 8 ;
53+
54+ BindBufferInfo* existing = fCachedIntrinsicValues .find (intrinsicValues);
55+ if (existing) {
56+ return *existing;
57+ }
58+
59+ SkASSERT (!cb->fActiveRenderPass );
60+
61+ const Caps* caps = cb->fSharedContext ->caps ();
62+ const uint32_t stride =
63+ SkAlignTo (intrinsicValues.size (), caps->requiredUniformBufferAlignment ());
64+ if (!fCurrentBuffer || fSlotsUsed == kNumSlots ) {
65+ VulkanResourceProvider* resourceProvider = cb->fResourceProvider ;
66+ sk_sp<Buffer> buffer = resourceProvider->findOrCreateBuffer (stride * kNumSlots ,
67+ BufferType::kUniform ,
68+ AccessPattern::kHostVisible ,
69+ " IntrinsicConstantBuffer" );
70+ if (!buffer) {
71+ return {};
72+ }
73+ VulkanBuffer* ptr = static_cast <VulkanBuffer*>(buffer.release ());
74+ fCurrentBuffer = sk_sp<VulkanBuffer>(ptr);
75+
76+ fSlotsUsed = 0 ;
77+
78+ if (!fCurrentBuffer ) {
79+ // If we failed to create a GPU buffer to hold the intrinsic uniforms, we will fail
80+ // the Recording being inserted, so return an empty bind info.
81+ return {};
82+ }
83+ cb->trackResource (fCurrentBuffer );
84+ }
85+
86+ SkASSERT (fCurrentBuffer && fSlotsUsed < kNumSlots );
87+ void * mapPtr = fCurrentBuffer ->map ();
88+ if (!mapPtr) {
89+ return {};
90+ }
91+ uint32_t offset = (fSlotsUsed ++) * stride;
92+ mapPtr = SkTAddOffset<void >(mapPtr, static_cast <ptrdiff_t >(offset));
93+ memcpy (mapPtr, intrinsicValues.data (), intrinsicValues.size ());
94+
95+ BindBufferInfo binding{
96+ fCurrentBuffer .get (), offset, SkTo<uint32_t >(intrinsicValues.size ())};
97+ fCachedIntrinsicValues .set (UniformDataBlock::Make (intrinsicValues, &fUniformData ), binding);
98+ return binding;
99+ }
100+
101+ private:
102+ // The current buffer being filled up, as well as the how much of it has been written to.
103+ sk_sp<VulkanBuffer> fCurrentBuffer ;
104+ int fSlotsUsed = 0 ; // in multiples of the intrinsic uniform size and UBO binding requirement
105+
106+ // All uploaded intrinsic uniform sets and where they are on the GPU. All uniform sets are
107+ // cached for the duration of a CommandBuffer since the maximum number of elements in this
108+ // collection will equal the number of render passes and the intrinsic constants aren't that
109+ // large. This maximizes the chance for reuse between passes.
110+ skia_private::THashMap<UniformDataBlock, BindBufferInfo, UniformDataBlock::Hash>
111+ fCachedIntrinsicValues ;
112+ SkArenaAlloc fUniformData {0 };
113+ };
114+
115+
39116std::unique_ptr<VulkanCommandBuffer> VulkanCommandBuffer::Make (
40117 const VulkanSharedContext* sharedContext,
41118 VulkanResourceProvider* resourceProvider,
@@ -121,6 +198,7 @@ void VulkanCommandBuffer::onResetCommandBuffer() {
121198 SkASSERT (!fActive );
122199 VULKAN_CALL_ERRCHECK (fSharedContext , ResetCommandPool (fSharedContext ->device (), fPool , 0 ));
123200 fActiveGraphicsPipeline = nullptr ;
201+ fIntrinsicConstants = nullptr ;
124202 fBindUniformBuffers = true ;
125203 fBoundIndexBuffer = VK_NULL_HANDLE;
126204 fBoundIndexBufferOffset = 0 ;
@@ -156,6 +234,7 @@ void VulkanCommandBuffer::begin() {
156234
157235 VULKAN_CALL_ERRCHECK (fSharedContext ,
158236 BeginCommandBuffer (fPrimaryCommandBuffer , &cmdBufferBeginInfo));
237+ fIntrinsicConstants = std::make_unique<IntrinsicConstantsManager>();
159238 fActive = true ;
160239}
161240
@@ -369,33 +448,24 @@ void VulkanCommandBuffer::waitUntilFinished() {
369448 /* timeout=*/ UINT64_MAX));
370449}
371450
372- void VulkanCommandBuffer::updateIntrinsicUniforms (SkIRect viewport) {
451+ bool VulkanCommandBuffer::updateIntrinsicUniforms (SkIRect viewport) {
373452 SkASSERT (fActive && !fActiveRenderPass );
374453
375454 // The SkSL has declared these as a top-level interface block, which will use std140 in Vulkan.
376455 // If we switch to supporting push constants here, it would be std430 instead.
377456 UniformManager intrinsicValues{Layout::kStd140 };
378457 CollectIntrinsicUniforms (fSharedContext ->caps (), viewport, fDstCopyBounds , &intrinsicValues);
379- SkSpan<const char > bytes = intrinsicValues.finish ();
380- SkASSERT (bytes.size_bytes () == VulkanResourceProvider::kIntrinsicConstantSize );
381-
382- sk_sp<Buffer> intrinsicUniformBuffer = fResourceProvider ->refIntrinsicConstantBuffer ();
383- const VulkanBuffer* intrinsicVulkanBuffer =
384- static_cast <VulkanBuffer*>(intrinsicUniformBuffer.get ());
385- SkASSERT (intrinsicVulkanBuffer && intrinsicVulkanBuffer->size () >= bytes.size_bytes ());
386-
387- fUniformBuffersToBind [VulkanGraphicsPipeline::kIntrinsicUniformBufferIndex ] = {
388- intrinsicUniformBuffer.get (),
389- /* offset=*/ 0 ,
390- SkTo<uint32_t >(bytes.size_bytes ())
391- };
392-
393- this ->updateBuffer (intrinsicVulkanBuffer, bytes.data (), bytes.size_bytes ());
458+ BindBufferInfo binding =
459+ fIntrinsicConstants ->add (this , UniformDataBlock::Wrap (&intrinsicValues));
460+ if (!binding) {
461+ return false ;
462+ } else if (binding ==
463+ fUniformBuffersToBind [VulkanGraphicsPipeline::kIntrinsicUniformBufferIndex ]) {
464+ return true ; // no binding change needed
465+ }
394466
395- // Ensure the buffer update is completed and made visible before reading
396- intrinsicVulkanBuffer->setBufferAccess (this , VK_ACCESS_UNIFORM_READ_BIT,
397- VK_PIPELINE_STAGE_VERTEX_SHADER_BIT);
398- this ->trackResource (std::move (intrinsicUniformBuffer));
467+ fUniformBuffersToBind [VulkanGraphicsPipeline::kIntrinsicUniformBufferIndex ] = binding;
468+ return true ;
399469}
400470
401471bool VulkanCommandBuffer::onAddRenderPass (const RenderPassDesc& renderPassDesc,
@@ -424,7 +494,9 @@ bool VulkanCommandBuffer::onAddRenderPass(const RenderPassDesc& renderPassDesc,
424494 }
425495 }
426496
427- this ->updateIntrinsicUniforms (viewport);
497+ if (!this ->updateIntrinsicUniforms (viewport)) {
498+ return false ;
499+ }
428500 this ->setViewport (viewport);
429501
430502 if (!this ->beginRenderPass (renderPassDesc,
0 commit comments