11import { GUI } from 'dat.gui' ;
2+ import { mat3 , mat4 } from 'wgpu-matrix' ;
23import fullscreenTexturedQuadWGSL from '../../shaders/fullscreenTexturedQuad.wgsl' ;
34import sampleExternalTextureWGSL from '../../shaders/sampleExternalTexture.frag.wgsl' ;
5+ import sampleExternalTextureAsPanoramaWGSL from './sampleExternalTextureAsPanorama.wgsl' ;
46import { quitIfWebGPUNotAvailable } from '../util' ;
57
68const adapter = await navigator . gpu ?. requestAdapter ( ) ;
79const device = await adapter ?. requestDevice ( ) ;
810quitIfWebGPUNotAvailable ( adapter , device ) ;
911
12+ const videos = [
13+ {
14+ name : 'giraffe' ,
15+ url : '../../assets/video/5214261-hd_1920_1080_25fps.mp4' ,
16+ mode : 'cover' ,
17+ } ,
18+ { nane : 'lhc' , url : '../../assets/video/pano.webm' , mode : '360' } ,
19+ {
20+ name : 'lake' ,
21+ url : '../../assets/video/Video_360°._Timelapse._Bled_Lake_in_Slovenia..webm.720p.vp9.webm' ,
22+ mode : '360' ,
23+ } ,
24+ ] ;
25+
1026// Set video element
1127const video = document . createElement ( 'video' ) ;
1228video . loop = true ;
1329video . playsInline = true ;
1430video . autoplay = true ;
1531video . muted = true ;
16- video . src = '../../assets/video/pano.webm' ;
17- await video . play ( ) ;
32+
33+ let canReadVideo = false ;
34+
35+ async function playVideo ( ndx : number ) {
36+ canReadVideo = false ;
37+ video . src = videos [ ndx ] . url ;
38+ await video . play ( ) ;
39+ canReadVideo = true ;
40+ }
41+ await playVideo ( 0 ) ;
1842
1943const canvas = document . querySelector ( 'canvas' ) as HTMLCanvasElement ;
2044const context = canvas . getContext ( 'webgpu' ) as GPUCanvasContext ;
21- const devicePixelRatio = window . devicePixelRatio ;
22- canvas . width = canvas . clientWidth * devicePixelRatio ;
23- canvas . height = canvas . clientHeight * devicePixelRatio ;
2445const presentationFormat = navigator . gpu . getPreferredCanvasFormat ( ) ;
2546
2647context . configure ( {
2748 device,
2849 format : presentationFormat ,
2950} ) ;
3051
31- const pipeline = device . createRenderPipeline ( {
52+ const videoCoverPipeline = device . createRenderPipeline ( {
3253 layout : 'auto' ,
3354 vertex : {
3455 module : device . createShaderModule ( {
@@ -50,15 +71,38 @@ const pipeline = device.createRenderPipeline({
5071 } ,
5172} ) ;
5273
74+ const module = device . createShaderModule ( {
75+ code : sampleExternalTextureAsPanoramaWGSL ,
76+ } ) ;
77+ const video360Pipeline = device . createRenderPipeline ( {
78+ layout : 'auto' ,
79+ vertex : { module } ,
80+ fragment : {
81+ module,
82+ targets : [ { format : presentationFormat } ] ,
83+ } ,
84+ primitive : {
85+ topology : 'triangle-list' ,
86+ } ,
87+ } ) ;
88+
5389const sampler = device . createSampler ( {
5490 magFilter : 'linear' ,
5591 minFilter : 'linear' ,
5692} ) ;
5793
94+ // make buffer big enough for either pipeline
95+ const uniformBuffer = device . createBuffer ( {
96+ size : 80 ,
97+ usage : GPUBufferUsage . UNIFORM | GPUBufferUsage . COPY_DST ,
98+ } ) ;
99+
58100const params = new URLSearchParams ( window . location . search ) ;
59101const settings = {
60102 requestFrame : 'requestAnimationFrame' ,
61103 videoSource : params . get ( 'videoSource' ) || 'videoElement' ,
104+ video : 0 ,
105+ fov : 75 ,
62106} ;
63107
64108const gui = new GUI ( ) ;
@@ -67,12 +111,31 @@ gui.add(settings, 'requestFrame', [
67111 'requestAnimationFrame' ,
68112 'requestVideoFrameCallback' ,
69113] ) ;
114+ gui
115+ . add (
116+ settings ,
117+ 'video' ,
118+ Object . fromEntries ( videos . map ( ( { name } , i ) => [ name , i ] ) )
119+ )
120+ . onChange ( ( ) => {
121+ playVideo ( settings . video ) ;
122+ } ) ;
70123
71- function frame ( ) {
124+ let yRotation = 0 ;
125+ let xRotation = 0 ;
126+
127+ function drawVideo ( ) {
128+ const maxSize = device . limits . maxTextureDimension2D ;
129+ canvas . width = Math . min ( Math . max ( 1 , canvas . offsetWidth ) , maxSize ) ;
130+ canvas . height = Math . min ( Math . max ( 1 , canvas . offsetHeight ) , maxSize ) ;
72131 const externalTextureSource =
73132 settings . videoSource === 'videoFrame' ? new VideoFrame ( video ) : video ;
74133
75- const uniformBindGroup = device . createBindGroup ( {
134+ const mode = videos [ settings . video ] . mode ;
135+ const pipeline = mode === '360' ? video360Pipeline : videoCoverPipeline ;
136+ const canvasTexture = context . getCurrentTexture ( ) ;
137+
138+ const bindGroup = device . createBindGroup ( {
76139 layout : pipeline . getBindGroupLayout ( 0 ) ,
77140 entries : [
78141 {
@@ -85,11 +148,60 @@ function frame() {
85148 source : externalTextureSource ,
86149 } ) ,
87150 } ,
151+ {
152+ binding : 3 ,
153+ resource : uniformBuffer ,
154+ } ,
88155 ] ,
89156 } ) ;
90157
158+ if ( mode === '360' ) {
159+ // Spin the camera around the y axis and add in the user's x and y rotation;
160+ const time = performance . now ( ) * 0.001 ;
161+ const rotation = time * 0.1 + yRotation ;
162+ const projection = mat4 . perspective (
163+ ( settings . fov * Math . PI ) / 180 ,
164+ canvas . clientWidth / canvas . clientHeight ,
165+ 0.5 ,
166+ 100
167+ ) ;
168+
169+ // Note: You can use any method you want to compute a view matrix,
170+ // just be sure to zero out the translation.
171+ const camera = mat4 . identity ( ) ;
172+ mat4 . rotateY ( camera , rotation , camera ) ;
173+ mat4 . rotateX ( camera , xRotation , camera ) ;
174+ mat4 . setTranslation ( camera , [ 0 , 0 , 0 ] , camera ) ;
175+ const view = mat4 . inverse ( camera ) ;
176+ const viewDirectionProjection = mat4 . multiply ( projection , view ) ;
177+ const viewDirectionProjectionInverse = mat4 . inverse (
178+ viewDirectionProjection
179+ ) ;
180+
181+ const uniforms = new Float32Array ( [
182+ ...viewDirectionProjectionInverse ,
183+ canvasTexture . width ,
184+ canvasTexture . height ,
185+ ] ) ;
186+ device . queue . writeBuffer ( uniformBuffer , 0 , uniforms ) ;
187+ } else {
188+ // compute a `cover` matrix for a unit UV quad.
189+ const mat = mat3 . identity ( ) ;
190+ const videoAspect = video . videoWidth / video . videoHeight ;
191+ const canvasAspect = canvas . offsetWidth / canvas . offsetHeight ;
192+ const combinedAspect = videoAspect / canvasAspect ;
193+ mat3 . translate ( mat , [ 0.5 , 0.5 ] , mat ) ;
194+ mat3 . scale (
195+ mat ,
196+ combinedAspect > 1 ? [ 1 / combinedAspect , 1 ] : [ 1 , combinedAspect ] ,
197+ mat
198+ ) ;
199+ mat3 . translate ( mat , [ - 0.5 , - 0.5 ] , mat ) ;
200+ device . queue . writeBuffer ( uniformBuffer , 0 , mat ) ;
201+ }
202+
91203 const commandEncoder = device . createCommandEncoder ( ) ;
92- const textureView = context . getCurrentTexture ( ) . createView ( ) ;
204+ const textureView = canvasTexture . createView ( ) ;
93205
94206 const renderPassDescriptor : GPURenderPassDescriptor = {
95207 colorAttachments : [
@@ -104,16 +216,21 @@ function frame() {
104216
105217 const passEncoder = commandEncoder . beginRenderPass ( renderPassDescriptor ) ;
106218 passEncoder . setPipeline ( pipeline ) ;
107- passEncoder . setBindGroup ( 0 , uniformBindGroup ) ;
219+ passEncoder . setBindGroup ( 0 , bindGroup ) ;
108220 passEncoder . draw ( 6 ) ;
109221 passEncoder . end ( ) ;
110222 device . queue . submit ( [ commandEncoder . finish ( ) ] ) ;
111223
112224 if ( externalTextureSource instanceof VideoFrame ) {
113225 externalTextureSource . close ( ) ;
114226 }
227+ }
115228
116- if ( settings . requestFrame == 'requestVideoFrameCallback' ) {
229+ function frame ( ) {
230+ if ( canReadVideo ) {
231+ drawVideo ( ) ;
232+ }
233+ if ( canReadVideo && settings . requestFrame == 'requestVideoFrameCallback' ) {
117234 video . requestVideoFrameCallback ( frame ) ;
118235 } else {
119236 requestAnimationFrame ( frame ) ;
@@ -125,3 +242,39 @@ if (settings.requestFrame == 'requestVideoFrameCallback') {
125242} else {
126243 requestAnimationFrame ( frame ) ;
127244}
245+
246+ let startX = 0 ;
247+ let startY = 0 ;
248+ let startYRotation = 0 ;
249+ let startTarget = 0 ;
250+
251+ const clamp = ( value : number , min : number , max : number ) => {
252+ return Math . max ( min , Math . min ( max , value ) ) ;
253+ } ;
254+
255+ const drag = ( e : PointerEvent ) => {
256+ const deltaX = e . clientX - startX ;
257+ const deltaY = e . clientY - startY ;
258+ yRotation = startYRotation + deltaX * 0.01 ;
259+ xRotation = clamp (
260+ startTarget + deltaY * - 0.01 ,
261+ - Math . PI * 0.4 ,
262+ Math . PI * 0.4
263+ ) ;
264+ } ;
265+
266+ const stopDrag = ( ) => {
267+ window . removeEventListener ( 'pointermove' , drag ) ;
268+ window . removeEventListener ( 'pointerup' , stopDrag ) ;
269+ } ;
270+
271+ const startDrag = ( e : PointerEvent ) => {
272+ window . addEventListener ( 'pointermove' , drag ) ;
273+ window . addEventListener ( 'pointerup' , stopDrag ) ;
274+ e . preventDefault ( ) ;
275+ startX = e . clientX ;
276+ startY = e . clientY ;
277+ startYRotation = yRotation ;
278+ startTarget = xRotation ;
279+ } ;
280+ canvas . addEventListener ( 'pointerdown' , startDrag ) ;
0 commit comments