diff --git a/src/core/RenderTarget.js b/src/core/RenderTarget.js index cde6a2b53c2869..882364ccf232b4 100644 --- a/src/core/RenderTarget.js +++ b/src/core/RenderTarget.js @@ -36,7 +36,8 @@ class RenderTarget extends EventDispatcher { * @property {number} [samples=0] - The MSAA samples count. * @property {number} [count=1] - Defines the number of color attachments . Must be at least `1`. * @property {number} [depth=1] - The texture depth. - * @property {boolean} [multiview=false] - Whether this target is used for multiview rendering. + * @property {boolean} [multiview=false] - Whether this target is used for multiview rendering (WebGL OVR_multiview2 extension). + * @property {boolean} [useArrayDepthTexture=false] - Whether to create the depth texture as an array texture for per-layer depth testing. This is separate from multiview to allow array depth textures without the multiview extension (e.g., for WebGPU XR). */ /** @@ -62,7 +63,8 @@ class RenderTarget extends EventDispatcher { samples: 0, count: 1, depth: 1, - multiview: false + multiview: false, + useArrayDepthTexture: false }, options ); /** @@ -200,6 +202,16 @@ class RenderTarget extends EventDispatcher { */ this.multiview = options.multiview; + /** + * Whether to create the depth texture as an array texture for per-layer depth testing. + * This is separate from multiview to allow array depth textures without the multiview + * extension (e.g., for WebGPU XR which doesn't use OVR_multiview2). + * + * @type {boolean} + * @default false + */ + this.useArrayDepthTexture = options.useArrayDepthTexture; + } _setTextureOptions( options = {} ) { @@ -370,6 +382,8 @@ class RenderTarget extends EventDispatcher { if ( source.depthTexture !== null ) this.depthTexture = source.depthTexture.clone(); this.samples = source.samples; + this.multiview = source.multiview; + this.useArrayDepthTexture = source.useArrayDepthTexture; return this; diff --git a/src/renderers/common/Renderer.js b/src/renderers/common/Renderer.js index e689a252fe9a7b..815dd22e8ee0d6 100644 --- a/src/renderers/common/Renderer.js +++ b/src/renderers/common/Renderer.js @@ -1338,9 +1338,32 @@ class Renderer { frameBufferTarget.scissor.multiplyScalar( canvasTarget._pixelRatio ); frameBufferTarget.scissorTest = canvasTarget._scissorTest; frameBufferTarget.multiview = outputRenderTarget !== null ? outputRenderTarget.multiview : false; + frameBufferTarget.useArrayDepthTexture = outputRenderTarget !== null ? outputRenderTarget.useArrayDepthTexture : false; frameBufferTarget.resolveDepthBuffer = outputRenderTarget !== null ? outputRenderTarget.resolveDepthBuffer : true; frameBufferTarget._autoAllocateDepthBuffer = outputRenderTarget !== null ? outputRenderTarget._autoAllocateDepthBuffer : false; + // Propagate samples from output render target (important for XR which may have samples=0) + // However, when the renderer has MSAA enabled (this.samples > 0), use the renderer's samples + // to ensure MSAA is applied during scene rendering. The MSAA resolve happens when copying to output. + if ( outputRenderTarget !== null ) { + + frameBufferTarget.samples = this.samples > 0 ? this.samples : outputRenderTarget.samples; + + } + + // Propagate array texture flag for XR/multi-layer rendering + // IMPORTANT: When MSAA is enabled (this.samples > 0), we must NOT use array textures because + // WebGPU doesn't support multisampled array textures. Per-eye rendering will be used instead. + if ( outputRenderTarget !== null && outputRenderTarget.texture.isArrayTexture && this.samples === 0 ) { + + frameBufferTarget.texture.isArrayTexture = true; + + } else { + + frameBufferTarget.texture.isArrayTexture = false; + + } + return frameBufferTarget; } @@ -1454,6 +1477,76 @@ class Renderer { if ( xr.cameraAutoUpdate === true ) xr.updateCamera( camera ); camera = xr.getCamera(); // use XR camera for rendering + // For XR with MSAA and ArrayCamera, we need to render each eye separately + // because MSAA textures cannot be array textures in WebGPU. + // Flow: left eye scene → left tone mapping → right eye scene → right tone mapping + if ( camera.isArrayCamera && this.samples > 0 && frameBufferTarget !== null ) { + + const xrCameras = camera.cameras; + const xrOutputRenderTarget = outputRenderTarget; + + // Render each eye separately using the EXISTING render infrastructure + for ( let eyeIndex = 0; eyeIndex < xrCameras.length; eyeIndex ++ ) { + + const eyeCamera = xrCameras[ eyeIndex ]; + + // Temporarily disable XR so _renderScene uses the single-layer framebuffer normally + xr.enabled = false; + + // For scene rendering, output goes to the single-layer MSAA framebuffer + // (not directly to XR output - that happens in the tone mapping step) + this.setRenderTarget( frameBufferTarget, 0, activeMipmapLevel ); + + // Call the existing _renderScene to render this eye's scene + // This goes through the full initialization path + this._renderScene( scene, eyeCamera, false ); + + // Re-enable XR temporarily for proper output handling + xr.enabled = true; + + // Now output the rendered frame to the appropriate XR layer + this.setRenderTarget( xrOutputRenderTarget, eyeIndex, activeMipmapLevel ); + this._activeCubeFace = eyeIndex; + + // Render the output quad (tone mapping) to copy to XR layer + const quad = this._quad; + + if ( this._nodes.hasOutputChange( frameBufferTarget.texture ) ) { + + quad.material.fragmentNode = this._nodes.getOutputNode( frameBufferTarget.texture ); + quad.material.needsUpdate = true; + + } + + // Disable XR again for the quad render to avoid ArrayCamera handling + xr.enabled = false; + const savedAutoClear = this.autoClear; + this.autoClear = false; + + this._renderScene( quad, quad.camera, false ); + + this.autoClear = savedAutoClear; + + } + + // Restore XR state + xr.enabled = true; + + // Clean up and return + nodeFrame.renderId = previousRenderId; + this._currentRenderContext = previousRenderContext; + this._currentRenderObjectFunction = previousRenderObjectFunction; + this._handleObjectFunction = previousHandleObjectFunction; + + this._callDepth --; + + sceneRef.onAfterRender( this, scene, camera, renderTarget ); + this.inspector.finishRender( null ); + + return null; + + } + } // @@ -1668,7 +1761,34 @@ class Renderer { this.autoClear = false; this.xr.enabled = false; - this._renderScene( quad, quad.camera, false ); + // For XR array textures, we need to render the output quad once per layer + // Each layer needs to sample from the correct array slice and write to the correct output layer + if ( renderTarget.texture.isArrayTexture && renderTarget.texture.image.depth > 1 ) { + + const layerCount = renderTarget.texture.image.depth; + + for ( let layer = 0; layer < layerCount; layer ++ ) { + + // Set the layer index for sampling from the array texture + // This updates a renderGroup uniform that's synced before each draw + this._nodes.setOutputLayerIndex( layer ); + + // Set the active layer for the render target + this._activeCubeFace = layer; + + this._renderScene( quad, quad.camera, false ); + + } + + // Reset the layer index + this._nodes.setOutputLayerIndex( 0 ); + this._activeCubeFace = 0; + + } else { + + this._renderScene( quad, quad.camera, false ); + + } this.autoClear = currentAutoClear; this.xr.enabled = currentXR; @@ -2411,7 +2531,12 @@ class Renderer { */ _resetXRState() { - this.backend.setXRTarget( null ); + if ( this.backend.isWebGPUBackend !== true ) { + + this.backend.setXRTarget( null ); + + } + this.setOutputRenderTarget( null ); this.setRenderTarget( null ); diff --git a/src/renderers/common/Textures.js b/src/renderers/common/Textures.js index b1eab665e9b526..3f9a8e4d60f2b0 100644 --- a/src/renderers/common/Textures.js +++ b/src/renderers/common/Textures.js @@ -86,7 +86,13 @@ class Textures extends DataMap { depthTexture.image.height = mipHeight; depthTexture.image.depth = size.depth; depthTexture.renderTarget = renderTarget; - depthTexture.isArrayTexture = renderTarget.multiview === true && size.depth > 1; + + // Determine if depth texture should be an array texture: + // - Either useArrayDepthTexture is explicitly set, OR multiview is enabled (for backward compat) + // - AND there are multiple layers (depth > 1) + // - AND MSAA is not enabled (WebGPU doesn't support multisampled depth array textures) + const useArrayDepth = renderTarget.useArrayDepthTexture || renderTarget.multiview; + depthTexture.isArrayTexture = size.depth > 1 && renderTarget.samples <= 1 && useArrayDepth; depthTextureMips[ activeMipmapLevel ] = depthTexture; diff --git a/src/renderers/common/XRManager.js b/src/renderers/common/XRManager.js index a31d2f70f9d922..8bd1b0036ade45 100644 --- a/src/renderers/common/XRManager.js +++ b/src/renderers/common/XRManager.js @@ -7,7 +7,7 @@ import { Vector2 } from '../../math/Vector2.js'; import { Vector3 } from '../../math/Vector3.js'; import { Vector4 } from '../../math/Vector4.js'; import { WebXRController } from '../webxr/WebXRController.js'; -import { AddEquation, BackSide, CustomBlending, DepthFormat, DepthStencilFormat, FrontSide, RGBAFormat, UnsignedByteType, UnsignedInt248Type, UnsignedIntType, ZeroFactor } from '../../constants.js'; +import { AddEquation, BackSide, CustomBlending, DepthFormat, DepthStencilFormat, FrontSide, RGBAFormat, UnsignedByteType, UnsignedInt248Type, UnsignedIntType, ZeroFactor, LinearFilter } from '../../constants.js'; import { DepthTexture } from '../../textures/DepthTexture.js'; import { XRRenderTarget } from './XRRenderTarget.js'; import { CylinderGeometry } from '../../geometries/CylinderGeometry.js'; @@ -17,6 +17,7 @@ import { PlaneGeometry } from '../../geometries/PlaneGeometry.js'; import { MeshBasicMaterial } from '../../materials/MeshBasicMaterial.js'; import { Mesh } from '../../objects/Mesh.js'; import { warn } from '../../utils.js'; +import { RenderTarget } from '../../core/RenderTarget.js'; const _cameraLPos = /*@__PURE__*/ new Vector3(); const _cameraRPos = /*@__PURE__*/ new Vector3(); @@ -181,6 +182,7 @@ class XRManager extends EventDispatcher { * @readonly */ this._supportsGlBinding = typeof XRWebGLBinding !== 'undefined'; + this._supportsWebGPUBinding = typeof XRGPUBinding !== 'undefined'; this._frameBufferTargets = null; @@ -343,6 +345,25 @@ class XRManager extends EventDispatcher { */ this._glBinding = null; + /** + * A reference to the current XR WebGPU binding (when using WebGPU backend). + * + * @private + * @type {?XRWebGLBinding} + * @default null + */ + + this._webgpuBinding = null; + + /** + * A reference to the current renderer backend (WebGL/WebGPU). + * + * @private + * @type {?Object} + * @default null + */ + this._backend = null; + /** * A reference to the current XR projection layer. * @@ -613,6 +634,18 @@ class XRManager extends EventDispatcher { } + getWebGPUBinding() { + + if ( this._webgpuBinding === null && this._supportsWebGPUBinding ) { + + this._webgpuBinding = new XRGPUBinding( this._session, this._backend.device ); + + } + + return this._webgpuBinding; + + } + /** * Returns the current XR frame. * @@ -918,18 +951,15 @@ class XRManager extends EventDispatcher { async setSession( session ) { const renderer = this._renderer; - const backend = renderer.backend; + this._backend = renderer.backend; this._gl = renderer.getContext(); const gl = this._gl; - const attributes = gl.getContextAttributes(); this._session = session; if ( session !== null ) { - if ( backend.isWebGPUBackend === true ) throw new Error( 'THREE.XRManager: XR is currently not supported with a WebGPU backend. Use WebGL by passing "{ forceWebGL: true }" to the constructor of the renderer.' ); - session.addEventListener( 'select', this._onSessionEvent ); session.addEventListener( 'selectstart', this._onSessionEvent ); session.addEventListener( 'selectend', this._onSessionEvent ); @@ -939,8 +969,6 @@ class XRManager extends EventDispatcher { session.addEventListener( 'end', this._onSessionEnd ); session.addEventListener( 'inputsourceschange', this._onInputSourcesChange ); - await backend.makeXRCompatible(); - this._currentPixelRatio = renderer.getPixelRatio(); renderer.getSize( this._currentSize ); @@ -950,7 +978,31 @@ class XRManager extends EventDispatcher { // - if ( this._supportsLayers === true ) { + if ( this._session.enabledFeatures.includes( 'webgpu' ) ) { + + const glProjLayer = this.getWebGPUBinding().createProjectionLayer( { + colorFormat: this.getWebGPUBinding().getPreferredColorFormat(), + depthStencilFormat: 'depth24plus' } ); + this._glProjLayer = glProjLayer; + const layersArray = [ glProjLayer ]; + + session.updateRenderState( { layers: layersArray } ); + + this._referenceSpace = await session.requestReferenceSpace( this.getReferenceSpaceType() ); + + this._xrRenderTarget = new RenderTarget( glProjLayer.textureWidth, glProjLayer.textureHeight, { + depth: 2, + minFilter: LinearFilter, + magFilter: LinearFilter, + depthBuffer: true, + multiview: false, + useArrayDepthTexture: true, + samples: 0 + } ); + + this._xrRenderTarget.texture.isArrayTexture = true; + + } else if ( this._supportsLayers === true ) { // default path using XRProjectionLayer @@ -958,6 +1010,10 @@ class XRManager extends EventDispatcher { let depthType = null; let glDepthFormat = null; + const attributes = gl.getContextAttributes(); + await this._backend.makeXRCompatible(); + this.setFoveation( this.getFoveation() ); + if ( renderer.depth ) { glDepthFormat = renderer.stencil ? gl.DEPTH24_STENCIL8 : gl.DEPTH_COMPONENT24; @@ -1040,6 +1096,8 @@ class XRManager extends EventDispatcher { } else { // fallback to XRWebGLLayer + await this._backend.makeXRCompatible(); + this.setFoveation( this.getFoveation() ); const layerInit = { antialias: renderer.currentSamples > 0, @@ -1077,8 +1135,6 @@ class XRManager extends EventDispatcher { // - this.setFoveation( this.getFoveation() ); - renderer._animation.setAnimationLoop( this._onAnimationFrame ); renderer._animation.setContext( session ); renderer._animation.start(); @@ -1380,9 +1436,66 @@ function onSessionEnd() { renderer._resetXRState(); + // Clean up the XR render target and its cached texture data + if ( this._xrRenderTarget !== null && renderer.backend.isWebGPUBackend ) { + + // Delete backend's and textures module's cached data for XR textures before disposal + // This is necessary because XR textures are external (from XRGPUBinding) + // and their GPU resources are managed by WebXR, not Three.js + const backend = this._backend; + const texturesModule = renderer._textures; + + // Clear the render target's descriptor cache in the backend first + if ( backend && backend.get ) { + + const renderTargetData = backend.get( this._xrRenderTarget ); + if ( renderTargetData ) { + + renderTargetData.descriptors = undefined; + + } + + } + + // Clear all textures in the textures array + const texturesArray = this._xrRenderTarget.textures; + if ( texturesArray ) { + + for ( let i = 0; i < texturesArray.length; i ++ ) { + + if ( backend && backend.delete ) backend.delete( texturesArray[ i ] ); + if ( texturesModule && texturesModule.delete ) texturesModule.delete( texturesArray[ i ] ); + + } + + } + + if ( this._xrRenderTarget.depthTexture ) { + + if ( backend && backend.delete ) backend.delete( this._xrRenderTarget.depthTexture ); + if ( texturesModule && texturesModule.delete ) texturesModule.delete( this._xrRenderTarget.depthTexture ); + + } + + // Also delete the render target data itself + if ( backend && backend.delete ) backend.delete( this._xrRenderTarget ); + if ( texturesModule && texturesModule.delete ) texturesModule.delete( this._xrRenderTarget ); + + // Clear render contexts to remove any cached texture views + if ( renderer._renderContexts && renderer._renderContexts.dispose ) { + + renderer._renderContexts.dispose(); + + } + + this._xrRenderTarget.dispose(); + + } + this._session = null; this._xrRenderTarget = null; this._glBinding = null; + this._webgpuBinding = null; this._glBaseLayer = null; this._glProjLayer = null; @@ -1566,7 +1679,16 @@ function onAnimationFrame( time, frame ) { const views = pose.views; - if ( this._glBaseLayer !== null ) { + // Check if we're using WebGPU backend with XRGPUBinding + const isWebGPUBackend = backend.isWebGPUBackend === true; + const webgpuBinding = isWebGPUBackend ? this.getWebGPUBinding() : null; + + // For WebGPU path: collect color textures and view descriptors from all views + const colorTextures = []; + const viewDescriptors = []; + let depthTexture = null; + + if ( this._glBaseLayer !== null && ! isWebGPUBackend ) { backend.setXRTarget( glBaseLayer.framebuffer ); @@ -1589,8 +1711,32 @@ function onAnimationFrame( time, frame ) { let viewport; - if ( this._supportsLayers === true ) { + if ( isWebGPUBackend && webgpuBinding !== null ) { + + // WebGPU path: Use XRGPUBinding to get GPUTextures directly + const gpuSubImage = webgpuBinding.getViewSubImage( this._glProjLayer, view ); + viewport = gpuSubImage.viewport; + + // Collect color textures for each view (for MRT array) + colorTextures.push( gpuSubImage.colorTexture ); + + // Collect view descriptor for each view - this is critical for creating proper 2D views into the array texture + if ( gpuSubImage.getViewDescriptor ) { + + viewDescriptors.push( gpuSubImage.getViewDescriptor() ); + + } + + if ( i === 0 ) { + + // add support for browser supplied depth texture later. + depthTexture = null; + + } + + } else if ( this._supportsLayers === true ) { + // WebGL path: Use XRWebGLBinding const glSubImage = this._glBinding.getViewSubImage( this._glProjLayer, view ); viewport = glSubImage.viewport; @@ -1643,6 +1789,18 @@ function onAnimationFrame( time, frame ) { } + // WebGPU path: Register all collected color textures after the loop + if ( isWebGPUBackend && webgpuBinding !== null && colorTextures.length > 0 ) { + + backend.setXRRenderTargetTextures( + this._xrRenderTarget, + colorTextures, // Array of GPUTextures, one per view + depthTexture, + viewDescriptors // Array of view descriptors, one per view + ); + + } + renderer.setOutputRenderTarget( this._xrRenderTarget ); } diff --git a/src/renderers/common/nodes/Nodes.js b/src/renderers/common/nodes/Nodes.js index 832544018c93a7..c8840c81e8c252 100644 --- a/src/renderers/common/nodes/Nodes.js +++ b/src/renderers/common/nodes/Nodes.js @@ -4,7 +4,7 @@ import NodeBuilderState from './NodeBuilderState.js'; import NodeMaterial from '../../../materials/nodes/NodeMaterial.js'; import { cubeMapNode } from '../../../nodes/utils/CubeMapNode.js'; import { NodeFrame } from '../../../nodes/Nodes.js'; -import { objectGroup, renderGroup, frameGroup, cubeTexture, texture, texture3D, vec3, fog, rangeFogFactor, densityFogFactor, reference, pmremTexture, screenUV } from '../../../nodes/TSL.js'; +import { objectGroup, renderGroup, frameGroup, cubeTexture, texture, texture3D, vec3, fog, rangeFogFactor, densityFogFactor, reference, pmremTexture, screenUV, uniform } from '../../../nodes/TSL.js'; import { builtin } from '../../../nodes/accessors/BuiltinNode.js'; import { CubeUVReflectionMapping, EquirectangularReflectionMapping, EquirectangularRefractionMapping } from '../../../constants.js'; @@ -15,6 +15,10 @@ const _outputNodeMap = new WeakMap(); const _chainKeys = []; const _cacheKeyValues = []; +// Dedicated uniform for output pass array layer selection +// This is separate from cameraIndex to avoid the sharedUniformGroup complexity +const _outputLayerIndex = /*@__PURE__*/ uniform( 0, 'int' ).setGroup( renderGroup ); + /** * This renderer module manages node-related objects and is the * primary interface between the renderer and the node system. @@ -719,9 +723,28 @@ class Nodes extends DataMap { const renderer = this.renderer; const cacheKey = this.getOutputCacheKey(); - const output = outputTarget.isArrayTexture ? - texture3D( outputTarget, vec3( screenUV, builtin( 'gl_ViewID_OVR' ) ) ).renderOutput( renderer.toneMapping, renderer.currentColorSpace ) : - texture( outputTarget, screenUV ).renderOutput( renderer.toneMapping, renderer.currentColorSpace ); + let output; + + if ( outputTarget.isArrayTexture ) { + + if ( ! this.backend.isWebGLBackend ) { + + // For array textures, use .depth() to specify the array layer + // _outputLayerIndex is a renderGroup uniform that's updated per-layer during output pass + output = texture( outputTarget, screenUV ).depth( _outputLayerIndex ).renderOutput( renderer.toneMapping, renderer.currentColorSpace ); + + } else { + + // For WebGL, use gl_ViewID_OVR for multiview rendering + output = texture3D( outputTarget, vec3( screenUV, builtin( 'gl_ViewID_OVR' ) ) ).renderOutput( renderer.toneMapping, renderer.currentColorSpace ); + + } + + } else { + + output = texture( outputTarget, screenUV ).renderOutput( renderer.toneMapping, renderer.currentColorSpace ); + + } _outputNodeMap.set( outputTarget, cacheKey ); @@ -729,6 +752,18 @@ class Nodes extends DataMap { } + /** + * Sets the output layer index for array texture output pass. + * This should be called before each layer render during the output pass. + * + * @param {number} index - The layer index. + */ + setOutputLayerIndex( index ) { + + _outputLayerIndex.value = index; + + } + /** * Triggers the call of `updateBefore()` methods * for all nodes of the given render object. diff --git a/src/renderers/webgpu/WebGPUBackend.js b/src/renderers/webgpu/WebGPUBackend.js index dab01c2a6b472e..031415c0424f97 100644 --- a/src/renderers/webgpu/WebGPUBackend.js +++ b/src/renderers/webgpu/WebGPUBackend.js @@ -175,7 +175,8 @@ class WebGPUBackend extends Backend { const adapterOptions = { powerPreference: parameters.powerPreference, - featureLevel: parameters.compatibilityMode ? 'compatibility' : undefined + featureLevel: parameters.compatibilityMode ? 'compatibility' : undefined, + xrCompatible: renderer.xr.enabled }; const adapter = ( typeof navigator !== 'undefined' ) ? await navigator.gpu.requestAdapter( adapterOptions ) : null; @@ -238,6 +239,55 @@ class WebGPUBackend extends Backend { } + /** + * Configures the given XR render target with external GPUTextures. + * + * This method is used when rendering with WebXR and the WebGPU backend + /** + * Registers external GPU textures from XRGPUBinding for use in rendering. + * This allows WebXR sessions to provide their own textures for rendering. + * + * @param {RenderTarget} renderTarget - The render target to register the textures for. + * @param {GPUTexture|Array} colorTextures - The color GPUTexture(s) from XRSubImage. Can be a single texture or array for MRT. + * @param {?GPUTexture} [depthTexture=null] - The depth/stencil GPUTexture from XRSubImage. + * @param {?Array} [viewDescriptors=null] - Array of view descriptors from XRSubImage.getViewDescriptor(), one per view. + */ + setXRRenderTargetTextures( renderTarget, colorTextures, depthTexture = null, viewDescriptors = null ) { + + // Handle both single texture and array of textures (for MRT) + const colorTextureArray = Array.isArray( colorTextures ) ? colorTextures : [ colorTextures ]; + + // For XR, all views typically share the same array texture, just with different view descriptors + // So we use the first color texture and store the view descriptors for later use + const colorTexture = colorTextureArray[ 0 ]; + + // Register color texture - store as 'texture' so view creation code can find it + // Also store the format from the GPUTexture for pipeline creation + this.set( renderTarget.texture, { + texture: colorTexture, + format: colorTexture.format, + externalTexture: true, + xrViewDescriptors: viewDescriptors, + initialized: true + } ); + + // Register depth texture + if ( depthTexture !== null ) { + + this.set( renderTarget.depthTexture, { + texture: depthTexture, + format: depthTexture.format, + externalTexture: true, + xrViewDescriptors: viewDescriptors, + initialized: true + } ); + + renderTarget._autoAllocateDepthBuffer = false; + + } + + } + /** * A reference to the context. * @@ -396,7 +446,7 @@ class WebGPUBackend extends Backend { } /** - * Internal to determine if the current render target is a render target array with depth 2D array texture. + * Returns whether the render target is a render target array with depth 2D array texture. * * @param {RenderContext} renderContext - The render context. * @return {boolean} Whether the render target is a render target array with depth 2D array texture. @@ -405,7 +455,7 @@ class WebGPUBackend extends Backend { */ _isRenderCameraDepthArray( renderContext ) { - return renderContext.depthTexture && renderContext.depthTexture.image.depth > 1 && renderContext.camera.isArrayCamera; + return renderContext.depthTexture && renderContext.depthTexture.isArrayTexture === true && renderContext.camera.isArrayCamera; } @@ -422,12 +472,46 @@ class WebGPUBackend extends Backend { const renderTarget = renderContext.renderTarget; const renderTargetData = this.get( renderTarget ); + // Check if any textures are external XR textures - these should not be cached + // because XR textures can change every frame + const textures = renderContext.textures; + let hasExternalXRTexture = false; + + if ( textures ) { + + for ( let i = 0; i < textures.length; i ++ ) { + + const textureData = this.get( textures[ i ] ); + if ( textureData.externalTexture === true ) { + + hasExternalXRTexture = true; + break; + + } + + } + + } + + // Also check depth texture + if ( renderContext.depthTexture ) { + + const depthTextureData = this.get( renderContext.depthTexture ); + if ( depthTextureData.externalTexture === true ) { + + hasExternalXRTexture = true; + + } + + } + let descriptors = renderTargetData.descriptors; if ( descriptors === undefined || renderTargetData.width !== renderTarget.width || renderTargetData.height !== renderTarget.height || - renderTargetData.samples !== renderTarget.samples + renderTargetData.samples !== renderTarget.samples || + hasExternalXRTexture // Don't use cache for XR textures ) { descriptors = {}; @@ -439,9 +523,8 @@ class WebGPUBackend extends Backend { const cacheKey = renderContext.getCacheKey(); let descriptorBase = descriptors[ cacheKey ]; - if ( descriptorBase === undefined ) { + if ( descriptorBase === undefined || hasExternalXRTexture ) { - const textures = renderContext.textures; const textureViews = []; let sliceIndex; @@ -452,77 +535,116 @@ class WebGPUBackend extends Backend { const textureData = this.get( textures[ i ] ); - const viewDescriptor = { - label: `colorAttachment_${ i }`, - baseMipLevel: renderContext.activeMipmapLevel, - mipLevelCount: 1, - baseArrayLayer: renderContext.activeCubeFace, - arrayLayerCount: 1, - dimension: GPUTextureViewDimension.TwoD - }; + // Check if this is an external XR texture with view descriptors + // Only create multiple color attachments when using an array camera (XR scene rendering) + // For output passes (non-array camera), we need only 1 color attachment + if ( textureData.externalTexture === true && textureData.xrViewDescriptors && textureData.xrViewDescriptors.length > 0 && renderContext.camera.isArrayCamera ) { + + // XR path: Use the view descriptors from XRGPUBinding to create proper 2D views + for ( let viewIndex = 0; viewIndex < textureData.xrViewDescriptors.length; viewIndex ++ ) { + + const xrViewDescriptor = textureData.xrViewDescriptors[ viewIndex ]; + const textureView = textureData.texture.createView( xrViewDescriptor ); - if ( renderTarget.isRenderTarget3D ) { + textureViews.push( { + view: textureView, + resolveTarget: undefined, + depthSlice: undefined + } ); - sliceIndex = renderContext.activeCubeFace; + } - viewDescriptor.baseArrayLayer = 0; - viewDescriptor.dimension = GPUTextureViewDimension.ThreeD; - viewDescriptor.depthOrArrayLayers = textures[ i ].image.depth; + } else if ( textureData.externalTexture === true ) { - } else if ( renderTarget.isRenderTarget && textures[ i ].image.depth > 1 ) { + // External XR texture without view descriptors OR non-array camera output pass + // Create a simple 2D view for the correct layer based on activeCubeFace + const textureView = textureData.texture.createView( { + dimension: '2d', + baseArrayLayer: renderContext.activeCubeFace, + arrayLayerCount: 1 + } ); - if ( isRenderCameraDepthArray === true ) { + textureViews.push( { + view: textureView, + resolveTarget: undefined, + depthSlice: undefined + } ); - const cameras = renderContext.camera.cameras; - for ( let layer = 0; layer < cameras.length; layer ++ ) { + } else { - const layerViewDescriptor = { - ...viewDescriptor, - baseArrayLayer: layer, - arrayLayerCount: 1, - dimension: GPUTextureViewDimension.TwoD - }; - const textureView = textureData.texture.createView( layerViewDescriptor ); - textureViews.push( { - view: textureView, - resolveTarget: undefined, - depthSlice: undefined - } ); + const viewDescriptor = { + label: `colorAttachment_${ i }`, + baseMipLevel: renderContext.activeMipmapLevel, + mipLevelCount: 1, + baseArrayLayer: renderContext.activeCubeFace, + arrayLayerCount: 1, + dimension: GPUTextureViewDimension.TwoD + }; - } + if ( renderTarget.isRenderTarget3D ) { - } else { + sliceIndex = renderContext.activeCubeFace; - viewDescriptor.dimension = GPUTextureViewDimension.TwoDArray; + viewDescriptor.baseArrayLayer = 0; + viewDescriptor.dimension = GPUTextureViewDimension.ThreeD; viewDescriptor.depthOrArrayLayers = textures[ i ].image.depth; + } else if ( renderTarget.isRenderTarget && textures[ i ].image.depth > 1 ) { + + if ( isRenderCameraDepthArray === true ) { + + const cameras = renderContext.camera.cameras; + for ( let layer = 0; layer < cameras.length; layer ++ ) { + + const layerViewDescriptor = { + ...viewDescriptor, + baseArrayLayer: layer, + arrayLayerCount: 1, + dimension: GPUTextureViewDimension.TwoD + }; + const textureView = textureData.texture.createView( layerViewDescriptor ); + textureViews.push( { + view: textureView, + resolveTarget: undefined, + depthSlice: undefined + } ); + + } + + } else { + + viewDescriptor.dimension = GPUTextureViewDimension.TwoDArray; + viewDescriptor.depthOrArrayLayers = textures[ i ].image.depth; + + } + } - } + if ( isRenderCameraDepthArray !== true ) { - if ( isRenderCameraDepthArray !== true ) { + const textureView = textureData.texture.createView( viewDescriptor ); - const textureView = textureData.texture.createView( viewDescriptor ); + let view, resolveTarget; - let view, resolveTarget; + if ( textureData.msaaTexture !== undefined ) { - if ( textureData.msaaTexture !== undefined ) { + view = textureData.msaaTexture.createView(); + resolveTarget = textureView; - view = textureData.msaaTexture.createView(); - resolveTarget = textureView; + } else { - } else { + view = textureView; + resolveTarget = undefined; - view = textureView; - resolveTarget = undefined; + } - } + textureViews.push( { + view, + resolveTarget, + depthSlice: sliceIndex + } ); - textureViews.push( { - view, - resolveTarget, - depthSlice: sliceIndex - } ); + } } @@ -533,16 +655,40 @@ class WebGPUBackend extends Backend { if ( renderContext.depth ) { const depthTextureData = this.get( renderContext.depthTexture ); - const options = {}; - if ( renderContext.depthTexture.isArrayTexture || renderContext.depthTexture.isCubeTexture ) { - options.dimension = GPUTextureViewDimension.TwoD; - options.arrayLayerCount = 1; - options.baseArrayLayer = renderContext.activeCubeFace; + // Handle external XR depth textures + if ( depthTextureData.externalTexture === true ) { - } + // XR depth texture - create a simple 2D view or use view descriptors + if ( depthTextureData.xrViewDescriptors && depthTextureData.xrViewDescriptors.length > 0 ) { - descriptorBase.depthStencilView = depthTextureData.texture.createView( options ); + // Use the first view descriptor for depth (shared across views) + descriptorBase.depthStencilView = depthTextureData.texture.createView( depthTextureData.xrViewDescriptors[ 0 ] ); + + } else { + + descriptorBase.depthStencilView = depthTextureData.texture.createView( { + dimension: '2d', + baseArrayLayer: 0, + arrayLayerCount: 1 + } ); + + } + + } else { + + const options = {}; + if ( renderContext.depthTexture.isArrayTexture || renderContext.depthTexture.isCubeTexture ) { + + options.dimension = GPUTextureViewDimension.TwoD; + options.arrayLayerCount = 1; + options.baseArrayLayer = renderContext.activeCubeFace; + + } + + descriptorBase.depthStencilView = depthTextureData.texture.createView( options ); + + } } @@ -710,7 +856,9 @@ class WebGPUBackend extends Backend { } - depthStencilAttachment.depthStoreOp = GPUStoreOp.Store; + // Discard depth if the render target doesn't need to resolve it (e.g., XR framebuffers) + const resolveDepth = renderContext.renderTarget === null || renderContext.renderTarget.resolveDepthBuffer !== false; + depthStencilAttachment.depthStoreOp = resolveDepth ? GPUStoreOp.Store : GPUStoreOp.Discard; } @@ -727,7 +875,9 @@ class WebGPUBackend extends Backend { } - depthStencilAttachment.stencilStoreOp = GPUStoreOp.Store; + // Discard stencil if the render target doesn't need to resolve it (e.g., XR framebuffers) + const resolveStencil = renderContext.renderTarget === null || renderContext.renderTarget.resolveStencilBuffer !== false; + depthStencilAttachment.stencilStoreOp = resolveStencil ? GPUStoreOp.Store : GPUStoreOp.Discard; } @@ -1706,7 +1856,8 @@ class WebGPUBackend extends Backend { let pass = renderContextData.currentPass; let sets = renderContextData.currentSets; - if ( renderContextData.bundleEncoders ) { + const isBundleEncoder = renderContextData.bundleEncoders !== undefined; + if ( isBundleEncoder ) { const bundleEncoder = renderContextData.bundleEncoders[ i ]; const bundleSets = renderContextData.bundleSets[ i ]; @@ -1717,7 +1868,8 @@ class WebGPUBackend extends Backend { - if ( vp ) { + // GPURenderBundleEncoder does not support setViewport, only GPURenderPassEncoder does + if ( vp && ! isBundleEncoder ) { pass.setViewport( Math.floor( vp.x * pixelRatio ), diff --git a/src/renderers/webgpu/nodes/WGSLNodeBuilder.js b/src/renderers/webgpu/nodes/WGSLNodeBuilder.js index 22511d2415462b..659967b3a8a582 100644 --- a/src/renderers/webgpu/nodes/WGSLNodeBuilder.js +++ b/src/renderers/webgpu/nodes/WGSLNodeBuilder.js @@ -774,6 +774,8 @@ class WGSLNodeBuilder extends NodeBuilder { /** * Generates the WGSL snippet when sampling textures with a bias to the mip level. + /** + * Generates the WGSL snippet for sampling/loading the given texture with bias. * * @param {Texture} texture - The texture. * @param {string} textureProperty - The name of the texture uniform in the shader. @@ -788,13 +790,27 @@ class WGSLNodeBuilder extends NodeBuilder { if ( shaderStage === 'fragment' ) { - if ( offsetSnippet ) { + if ( depthSnippet ) { - return `textureSampleBias( ${ textureProperty }, ${ textureProperty }_sampler, ${ uvSnippet }, ${ biasSnippet }, ${ offsetSnippet } )`; + if ( offsetSnippet ) { - } + return `textureSampleBias( ${ textureProperty }, ${ textureProperty }_sampler, ${ uvSnippet }, ${ depthSnippet }, ${ biasSnippet }, ${ offsetSnippet } )`; + + } + + return `textureSampleBias( ${ textureProperty }, ${ textureProperty }_sampler, ${ uvSnippet }, ${ depthSnippet }, ${ biasSnippet } )`; + + } else { - return `textureSampleBias( ${ textureProperty }, ${ textureProperty }_sampler, ${ uvSnippet }, ${ biasSnippet } )`; + if ( offsetSnippet ) { + + return `textureSampleBias( ${ textureProperty }, ${ textureProperty }_sampler, ${ uvSnippet }, ${ biasSnippet }, ${ offsetSnippet } )`; + + } + + return `textureSampleBias( ${ textureProperty }, ${ textureProperty }_sampler, ${ uvSnippet }, ${ biasSnippet } )`; + + } } else { @@ -960,7 +976,12 @@ class WGSLNodeBuilder extends NodeBuilder { if ( type === 'texture' || type === 'storageTexture' ) { - if ( node.value.is3DTexture === true ) { + if ( node.value.isArrayTexture === true || node.value.isDataArrayTexture === true || node.value.isCompressedArrayTexture === true ) { + + // Array textures should use regular NodeSampledTexture, not NodeSampledTexture3D + texture = new NodeSampledTexture( uniformNode.name, uniformNode.node, group, access ); + + } else if ( node.value.is3DTexture === true ) { texture = new NodeSampledTexture3D( uniformNode.name, uniformNode.node, group, access ); diff --git a/src/renderers/webgpu/utils/WebGPUBindingUtils.js b/src/renderers/webgpu/utils/WebGPUBindingUtils.js index 3f67a361e479db..db30a528eb53d2 100644 --- a/src/renderers/webgpu/utils/WebGPUBindingUtils.js +++ b/src/renderers/webgpu/utils/WebGPUBindingUtils.js @@ -374,14 +374,16 @@ class WebGPUBindingUtils { dimensionViewGPU = GPUTextureViewDimension.Cube; - } else if ( binding.isSampledTexture3D ) { - - dimensionViewGPU = GPUTextureViewDimension.ThreeD; - } else if ( binding.texture.isArrayTexture || binding.texture.isDataArrayTexture || binding.texture.isCompressedArrayTexture ) { + // Check texture's actual isArrayTexture property FIRST, before binding's cached isSampledTexture3D + // This handles cases where the texture's array status is set after shader compilation (e.g., XR textures) dimensionViewGPU = GPUTextureViewDimension.TwoDArray; + } else if ( binding.isSampledTexture3D ) { + + dimensionViewGPU = GPUTextureViewDimension.ThreeD; + } else { dimensionViewGPU = GPUTextureViewDimension.TwoD; diff --git a/src/renderers/webgpu/utils/WebGPUTextureUtils.js b/src/renderers/webgpu/utils/WebGPUTextureUtils.js index 642a3c8da12268..c1a9ae3b20d0e4 100644 --- a/src/renderers/webgpu/utils/WebGPUTextureUtils.js +++ b/src/renderers/webgpu/utils/WebGPUTextureUtils.js @@ -227,6 +227,13 @@ class WebGPUTextureUtils { if ( textureData.initialized ) { + // Skip creation for external XR textures - they are already set up + if ( textureData.externalTexture === true ) { + + return; + + } + throw new Error( 'WebGPUTextureUtils: Texture already initialized.' ); } @@ -322,6 +329,17 @@ class WebGPUTextureUtils { msaaTextureDescriptorGPU.sampleCount = samples; msaaTextureDescriptorGPU.mipLevelCount = 1; // See https://www.w3.org/TR/webgpu/#texture-creation + // MSAA textures cannot be array textures - they must have exactly 1 layer + if ( msaaTextureDescriptorGPU.size.depthOrArrayLayers > 1 ) { + + msaaTextureDescriptorGPU.size = { + width: msaaTextureDescriptorGPU.size.width, + height: msaaTextureDescriptorGPU.size.height, + depthOrArrayLayers: 1 + }; + + } + textureData.msaaTexture = backend.device.createTexture( msaaTextureDescriptorGPU ); }