|
2 | 2 | using UnityEngine.Rendering.RenderGraphModule;
|
3 | 3 | using UnityEngine.Rendering;
|
4 | 4 | using UnityEngine.Rendering.Universal;
|
| 5 | +using UnityEngine.Rendering.RenderGraphModule.Util; |
| 6 | +using UnityEngine.Experimental.Rendering; |
| 7 | +using static UnityEditor.ShaderData; |
5 | 8 |
|
6 | 9 | //This example blits the active CameraColor to a new texture. It shows how to do a blit with material, and how to use the ResourceData to avoid another blit back to the active color target.
|
7 | 10 | //This example is for API demonstrative purposes.
|
8 | 11 |
|
9 |
| -public class BlitAndSwapColorRendererFeature : ScriptableRendererFeature |
10 |
| -{ |
11 |
| - |
12 |
| - // This pass blits the whole screen for a given material to a temp texture, and swaps the UniversalResourceData.cameraColor to this temp texture. |
13 |
| - // Therefor, the next pass that references the cameraColor will reference this new temp texture as the cameraColor, saving us a blit. |
14 |
| - // Using the ResourceData, you can manage swapping of resources yourself and don't need a bespoke API like the SwapColorBuffer API that was specific for the cameraColor. |
15 |
| - // This allows you to write more decoupled passes without the added costs of avoidable copies/blits. |
16 |
| - class BlitAndSwapColorPass : ScriptableRenderPass |
17 |
| - { |
18 |
| - const string m_PassName = "BlitAndSwapColorPass"; |
19 |
| - |
20 |
| - // The data we want to transfer to the render function after recording. |
21 |
| - class PassData |
22 |
| - { |
23 |
| - // For the blit operation we will need the source and destination of the color attachments. |
24 |
| - public TextureHandle source; |
25 |
| - public TextureHandle destination; |
26 |
| - // We will also need a material to transform the color attachment when making a blit operation. |
27 |
| - public Material material; |
28 |
| - } |
29 |
| - |
30 |
| - // Scale bias is used to blit from source to distination given a 2d scale in the x and y parameters |
31 |
| - // and an offset in the z and w parameters. |
32 |
| - static Vector4 scaleBias = new Vector4(1f, 1f, 0f, 0f); |
33 |
| - |
34 |
| - // Material used in the blit operation. |
35 |
| - Material m_BlitMaterial; |
36 |
| - |
37 |
| - // Function used to transfer the material from the renderer feature to the render pass. |
38 |
| - public void Setup(Material mat) |
39 |
| - { |
40 |
| - m_BlitMaterial = mat; |
41 |
| - } |
42 |
| - |
43 |
| - public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameData) |
44 |
| - { |
45 |
| - var resourceData = frameData.Get<UniversalResourceData>(); |
46 |
| - |
47 |
| - //This should never happen since we set m_Pass.requiresIntermediateTexture = true; |
48 |
| - //Unless you set the render event to AfterRendering, where we only have the BackBuffer. |
49 |
| - if (resourceData.isActiveTargetBackBuffer) |
50 |
| - { |
51 |
| - Debug.LogError($"Skipping render pass. BlitAndSwapColorRendererFeature requires an intermediate ColorTexture, we can't use the BackBuffer as a texture input."); |
52 |
| - return; |
53 |
| - } |
54 |
| - |
55 |
| - |
56 |
| - // Starts the recording of the render graph pass given the name of the pass |
57 |
| - // and outputting the data used to pass data to the execution of the render function. |
58 |
| - using (var builder = renderGraph.AddRasterRenderPass<PassData>(m_PassName, out var passData)) |
59 |
| - { |
60 |
| - // Initialize the pass data |
61 |
| - InitPassData(renderGraph, frameData, ref passData); |
62 | 12 |
|
63 |
| - // Sets input. |
64 |
| - builder.UseTexture(passData.source); |
| 13 | +// This pass blits the whole screen for a given material to a temp texture, and swaps the UniversalResourceData.cameraColor to this temp texture. |
| 14 | +// Therefor, the next pass that references the cameraColor will reference this new temp texture as the cameraColor, saving us a blit. |
| 15 | +// Using the ResourceData, you can manage swapping of resources yourself and don't need a bespoke API like the SwapColorBuffer API that was specific for the cameraColor. |
| 16 | +// This allows you to write more decoupled passes without the added costs of avoidable copies/blits. |
| 17 | +public class BlitAndSwapColorPass : ScriptableRenderPass |
| 18 | +{ |
| 19 | + const string m_PassName = "BlitAndSwapColorPass"; |
65 | 20 |
|
66 |
| - // Sets output. |
67 |
| - builder.SetRenderAttachment(passData.destination, 0); |
| 21 | + // Material used in the blit operation. |
| 22 | + Material m_BlitMaterial; |
68 | 23 |
|
69 |
| - // Sets the render function. |
70 |
| - builder.SetRenderFunc((PassData data, RasterGraphContext rgContext) => ExecutePass(data, rgContext)); |
| 24 | + // Function used to transfer the material from the renderer feature to the render pass. |
| 25 | + public void Setup(Material mat) |
| 26 | + { |
| 27 | + m_BlitMaterial = mat; |
71 | 28 |
|
| 29 | + //The pass will read the current color texture. That needs to be an intermediate texture. It's not supported to use the BackBuffer as input texture. |
| 30 | + //By setting this property, URP will automatically create an intermediate texture. |
| 31 | + //It's good practice to set it here and not from the RenderFeature. This way, the pass is selfcontaining and you can use it to directly enqueue the pass from a monobehaviour without a RenderFeature. |
| 32 | + requiresIntermediateTexture = true; |
| 33 | + } |
72 | 34 |
|
73 |
| - //FrameData allows to get and set internal pipeline buffers. Here we update the CameraColorBuffer to the texture that we just wrote to in this pass. |
74 |
| - //Because RenderGraph manages the pipeline resources and dependencies, following up passes will correctly use the right color buffer. |
75 |
| - //This optimization has some caveats. You have to be careful when the color buffer is persistent across frames and between different cameras, such as in camera stacking. |
76 |
| - //In those cases you need to make sure your texture is an RTHandle and that you properly manage the lifecycle of it. |
77 |
| - resourceData.cameraColor = passData.destination; |
78 |
| - } |
79 |
| - } |
| 35 | + public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameData) |
| 36 | + { |
| 37 | + // UniversalResourceData contains all the texture handles used by the renderer, including the active color and depth textures |
| 38 | + // The active color and depth textures are the main color and depth buffers that the camera renders into |
| 39 | + var resourceData = frameData.Get<UniversalResourceData>(); |
80 | 40 |
|
81 |
| - // ExecutePass is the render function for each of the blit render graph recordings. |
82 |
| - // This is good practice to avoid using variables outside of the lambda it is called from. |
83 |
| - // It is static to avoid using member variables which could cause unintended behaviour. |
84 |
| - static void ExecutePass(PassData data, RasterGraphContext rgContext) |
| 41 | + //This should never happen since we set m_Pass.requiresIntermediateTexture = true; |
| 42 | + //Unless you set the render event to AfterRendering, where we only have the BackBuffer. |
| 43 | + if (resourceData.isActiveTargetBackBuffer) |
85 | 44 | {
|
86 |
| - // We can use blit with or without a material both using the static scaleBias to avoid reallocations. |
87 |
| - if (data.material == null) |
88 |
| - Blitter.BlitTexture(rgContext.cmd, data.source, scaleBias, 0, false); |
89 |
| - else |
90 |
| - Blitter.BlitTexture(rgContext.cmd, data.source, scaleBias, data.material, 0); |
| 45 | + Debug.LogError($"Skipping render pass. BlitAndSwapColorRendererFeature requires an intermediate ColorTexture, we can't use the BackBuffer as a texture input."); |
| 46 | + return; |
91 | 47 | }
|
92 | 48 |
|
93 |
| - private void InitPassData(RenderGraph renderGraph, ContextContainer frameData, ref PassData passData) |
94 |
| - { |
95 |
| - // Fill up the passData with the data needed by the passes |
| 49 | + // The destination texture is created here, |
| 50 | + // the texture is created with the same dimensions as the active color texture |
| 51 | + var source = resourceData.activeColorTexture; |
96 | 52 |
|
97 |
| - // UniversalResourceData contains all the texture handles used by the renderer, including the active color and depth textures |
98 |
| - // The active color and depth textures are the main color and depth buffers that the camera renders into |
99 |
| - UniversalResourceData resourceData = frameData.Get<UniversalResourceData>(); |
| 53 | + var destinationDesc = renderGraph.GetTextureDesc(source); |
| 54 | + destinationDesc.name = $"CameraColor-{m_PassName}"; |
| 55 | + destinationDesc.clearBuffer = false; |
100 | 56 |
|
101 |
| - // The destination texture is created here, |
102 |
| - // the texture is created with the same dimensions as the active color texture, but with no depth buffer, being a copy of the color texture |
| 57 | + TextureHandle destination = renderGraph.CreateTexture(destinationDesc); |
103 | 58 |
|
104 |
| - UniversalCameraData cameraData = frameData.Get<UniversalCameraData>(); |
105 |
| - RenderTextureDescriptor desc = cameraData.cameraTargetDescriptor; |
106 |
| - desc.depthBufferBits = 0; |
| 59 | + RenderGraphUtils.BlitMaterialParameters para = new(source, destination, m_BlitMaterial, 0); |
| 60 | + renderGraph.AddBlitPass(para, passName: m_PassName); |
107 | 61 |
|
108 |
| - TextureHandle destination = UniversalRenderer.CreateRenderGraphTexture(renderGraph, desc, $"CameraTarget-{m_PassName}", false); |
109 |
| - |
110 |
| - passData.source = resourceData.activeColorTexture; |
111 |
| - passData.destination = destination; |
112 |
| - passData.material = m_BlitMaterial; |
113 |
| - } |
| 62 | + //FrameData allows to get and set internal pipeline buffers. Here we update the CameraColorBuffer to the texture that we just wrote to in this pass. |
| 63 | + //Because RenderGraph manages the pipeline resources and dependencies, following up passes will correctly use the right color buffer. |
| 64 | + //This optimization has some caveats. You have to be careful when the color buffer is persistent across frames and between different cameras, such as in camera stacking. |
| 65 | + //In those cases you need to make sure your texture is an RTHandle and that you properly manage the lifecycle of it. |
| 66 | + resourceData.cameraColor = destination; |
114 | 67 | }
|
| 68 | +} |
115 | 69 |
|
116 |
| - |
| 70 | +public class BlitAndSwapColorRendererFeature : ScriptableRendererFeature |
| 71 | +{ |
117 | 72 | [Tooltip("The material used when making the blit operation.")]
|
118 | 73 | public Material material;
|
119 | 74 |
|
@@ -143,11 +98,7 @@ public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingD
|
143 | 98 | }
|
144 | 99 |
|
145 | 100 | m_Pass.Setup(material);
|
146 |
| - renderer.EnqueuePass(m_Pass); |
147 |
| - |
148 |
| - //The pass will read the current color texture. That needs to be an intermediate texture. It's not supported to use the BackBuffer as input texture. |
149 |
| - //By setting this property, URP will automatically create an intermediate texture. |
150 |
| - m_Pass.requiresIntermediateTexture = true; |
| 101 | + renderer.EnqueuePass(m_Pass); |
151 | 102 | }
|
152 | 103 | }
|
153 | 104 |
|
|
0 commit comments