|
39 | 39 | PipelineMTL& pipelineImpl = (PipelineMTL&)pipeline; |
40 | 40 | m_CurrentPipeline = &pipelineImpl; |
41 | 41 |
|
42 | | - switch(m_CurrentPipeline->m_pipelineType) { |
43 | | - case nri::PipelineMTL::Compute: |
44 | | - m_ComputeEncoder = [m_Handle computeCommandEncoderWithDescriptor: NULL]; |
45 | | - break; |
46 | | - default: |
47 | | - break; |
48 | | - } |
| 42 | +// if(m_CurrentPipeline->m_pipelineType == nri::PipelineMTL::Compute) { |
| 43 | + // m_ComputeEncoder = [m_Handle computeCommandEncoderWithDescriptor: NULL]; |
| 44 | + // } |
| 45 | + |
| 46 | +} |
| 47 | +void CommandBufferMTL::SetPipelineLayout(const PipelineLayout& pipelineLayout) { |
| 48 | + |
| 49 | +} |
| 50 | +void CommandBufferMTL::SetDescriptorSet(uint32_t setIndexInPipelineLayout, const DescriptorSet& descriptorSet, const uint32_t* dynamicConstantBufferOffsets) { |
| 51 | + |
| 52 | +} |
| 53 | +void CommandBufferMTL::SetConstants(uint32_t pushConstantIndex, const void* data, uint32_t size) { |
| 54 | + //if (pDesc->mUsedStages & SHADER_STAGE_VERT) |
| 55 | + //{ |
| 56 | + // [m_RendererEncoder setVertexBytes:data length:size atIndex:pushConstantIndex]; |
| 57 | + //} |
| 58 | + |
| 59 | + //if (pDesc->mUsedStages & SHADER_STAGE_FRAG) |
| 60 | + //{ |
| 61 | + // [m_RendererEncoder setFragmentBytes:data length:size atIndex:pushConstantIndex]; |
| 62 | + //} |
| 63 | + |
| 64 | + //if (pDesc->mUsedStages & SHADER_STAGE_COMP) |
| 65 | + //{ |
| 66 | + // [m_RendererEncoder setBytes:data length:size atIndex:pushConstantIndex]; |
| 67 | + //} |
49 | 68 |
|
50 | 69 | } |
51 | | -void CommandBufferMTL::SetPipelineLayout(const PipelineLayout& pipelineLayout) {} |
52 | | -void CommandBufferMTL::SetDescriptorSet(uint32_t setIndexInPipelineLayout, const DescriptorSet& descriptorSet, const uint32_t* dynamicConstantBufferOffsets) {} |
53 | | -void CommandBufferMTL::SetConstants(uint32_t pushConstantIndex, const void* data, uint32_t size) {} |
54 | 70 | void CommandBufferMTL::SetDescriptorPool(const DescriptorPool& descriptorPool) {} |
55 | | -void CommandBufferMTL::Barrier(const BarrierGroupDesc& barrierGroupDesc) {} |
| 71 | +void CommandBufferMTL::Barrier(const BarrierGroupDesc& barrierGroupDesc) { |
| 72 | + //if (pCmd->pQueue->mBarrierFlags & BARRIER_FLAG_BUFFERS) |
| 73 | + { |
| 74 | + [m_RendererEncoder memoryBarrierWithScope:MTLBarrierScopeBuffers |
| 75 | + afterStages:MTLRenderStageFragment |
| 76 | + beforeStages:MTLRenderStageVertex]; |
| 77 | + } |
| 78 | + |
| 79 | + //if (pCmd->pQueue->mBarrierFlags & BARRIER_FLAG_TEXTURES) |
| 80 | + { |
| 81 | + [m_RendererEncoder memoryBarrierWithScope:MTLBarrierScopeTextures |
| 82 | + afterStages:MTLRenderStageFragment |
| 83 | + beforeStages:MTLRenderStageVertex]; |
| 84 | + } |
| 85 | + |
| 86 | + //if (pCmd->pQueue->mBarrierFlags & BARRIER_FLAG_RENDERTARGETS) |
| 87 | + { |
| 88 | + [m_RendererEncoder memoryBarrierWithScope:MTLBarrierScopeRenderTargets |
| 89 | + afterStages:MTLRenderStageFragment |
| 90 | + beforeStages:MTLRenderStageVertex]; |
| 91 | + } |
| 92 | + |
| 93 | + |
| 94 | +} |
56 | 95 | void CommandBufferMTL::BeginRendering(const AttachmentsDesc& attachmentsDesc) { |
57 | 96 | m_RendererEncoder = [m_Handle renderCommandEncoderWithDescriptor: NULL]; |
58 | 97 | } |
|
61 | 100 | m_RendererEncoder = nil; |
62 | 101 | m_ComputeEncoder = nil; |
63 | 102 | } |
64 | | -void CommandBufferMTL::SetViewports(const Viewport* viewports, uint32_t viewportNum) {} |
| 103 | +void CommandBufferMTL::SetViewports(const Viewport* viewports, uint32_t viewportNum) { |
| 104 | + MTLViewport* mtlViewports = StackAlloc(MTLViewport, viewportNum); |
| 105 | + |
| 106 | + // [m_RendererEncoder setViewports:<#(const MTLViewport * _Nonnull)#> count:<#(NSUInteger)#> |
| 107 | +} |
65 | 108 | void CommandBufferMTL::SetScissors(const Rect* rects, uint32_t rectNum) { |
66 | 109 | NSCAssert(m_RendererEncoder, @"encoder set"); |
67 | 110 | MTLScissorRect rect; |
|
0 commit comments