1
1
import { GUI } from 'dat.gui' ;
2
+ import { mat3 , mat4 } from 'wgpu-matrix' ;
2
3
import fullscreenTexturedQuadWGSL from '../../shaders/fullscreenTexturedQuad.wgsl' ;
3
- import sampleExternalTextureWGSL from '../../shaders/sampleExternalTexture.frag.wgsl' ;
4
+ import sampleExternalTextureWGSL from './sampleExternalTexture.frag.wgsl' ;
5
+ import sampleExternalTextureAsPanoramaWGSL from './sampleExternalTextureAsPanorama.wgsl' ;
4
6
import { quitIfWebGPUNotAvailable } from '../util' ;
5
7
6
8
const adapter = await navigator . gpu ?. requestAdapter ( {
@@ -9,28 +11,31 @@ const adapter = await navigator.gpu?.requestAdapter({
9
11
const device = await adapter ?. requestDevice ( ) ;
10
12
quitIfWebGPUNotAvailable ( adapter , device ) ;
11
13
12
- // Set video element
13
- const video = document . createElement ( 'video' ) ;
14
- video . loop = true ;
15
- video . playsInline = true ;
16
- video . autoplay = true ;
17
- video . muted = true ;
18
- video . src = '../../assets/video/pano.webm' ;
19
- await video . play ( ) ;
14
+ const videos = {
15
+ 'giraffe (2d)' : {
16
+ url : '../../assets/video/5214261-hd_1920_1080_25fps.mp4' ,
17
+ mode : 'cover' ,
18
+ } ,
19
+ 'lhc (360)' : {
20
+ url : '../../assets/video/pano.webm' ,
21
+ mode : '360' ,
22
+ } ,
23
+ 'lake (360)' : {
24
+ url : '../../assets/video/Video_360°._Timelapse._Bled_Lake_in_Slovenia..webm.720p.vp9.webm' ,
25
+ mode : '360' ,
26
+ } ,
27
+ } as const ;
20
28
21
29
const canvas = document . querySelector ( 'canvas' ) as HTMLCanvasElement ;
22
30
const context = canvas . getContext ( 'webgpu' ) as GPUCanvasContext ;
23
- const devicePixelRatio = window . devicePixelRatio ;
24
- canvas . width = canvas . clientWidth * devicePixelRatio ;
25
- canvas . height = canvas . clientHeight * devicePixelRatio ;
26
31
const presentationFormat = navigator . gpu . getPreferredCanvasFormat ( ) ;
27
32
28
33
context . configure ( {
29
34
device,
30
35
format : presentationFormat ,
31
36
} ) ;
32
37
33
- const pipeline = device . createRenderPipeline ( {
38
+ const videoCoverPipeline = device . createRenderPipeline ( {
34
39
layout : 'auto' ,
35
40
vertex : {
36
41
module : device . createShaderModule ( {
@@ -52,29 +57,81 @@ const pipeline = device.createRenderPipeline({
52
57
} ,
53
58
} ) ;
54
59
60
+ const module = device . createShaderModule ( {
61
+ code : sampleExternalTextureAsPanoramaWGSL ,
62
+ } ) ;
63
+ const video360Pipeline = device . createRenderPipeline ( {
64
+ layout : 'auto' ,
65
+ vertex : { module } ,
66
+ fragment : {
67
+ module,
68
+ targets : [ { format : presentationFormat } ] ,
69
+ } ,
70
+ primitive : {
71
+ topology : 'triangle-list' ,
72
+ } ,
73
+ } ) ;
74
+
55
75
const sampler = device . createSampler ( {
56
76
magFilter : 'linear' ,
57
77
minFilter : 'linear' ,
58
78
} ) ;
59
79
80
+ // make buffer big enough for either pipeline
81
+ const uniformBuffer = device . createBuffer ( {
82
+ size : ( 16 + 2 + 2 ) * 4 , // (mat4x4f + vec2f + padding) vs (mat3x3f + padding)
83
+ usage : GPUBufferUsage . UNIFORM | GPUBufferUsage . COPY_DST ,
84
+ } ) ;
85
+
86
+ // Set video element
87
+ const video = document . createElement ( 'video' ) ;
88
+ video . loop = true ;
89
+ video . playsInline = true ;
90
+ video . autoplay = true ;
91
+ video . muted = true ;
92
+
93
+ let canReadVideo = false ;
94
+
95
+ async function playVideo ( videoName : keyof typeof videos ) {
96
+ canReadVideo = false ;
97
+ video . src = videos [ videoName ] . url ;
98
+ await video . play ( ) ;
99
+ canReadVideo = true ;
100
+ }
101
+
60
102
const params = new URLSearchParams ( window . location . search ) ;
61
103
const settings = {
62
104
requestFrame : 'requestAnimationFrame' ,
63
105
videoSource : params . get ( 'videoSource' ) || 'videoElement' ,
106
+ video : Object . keys ( videos ) [ 0 ] as keyof typeof videos ,
64
107
} ;
108
+ playVideo ( settings . video ) ;
65
109
66
110
const gui = new GUI ( ) ;
67
111
gui . add ( settings , 'videoSource' , [ 'videoElement' , 'videoFrame' ] ) ;
68
112
gui . add ( settings , 'requestFrame' , [
69
113
'requestAnimationFrame' ,
70
114
'requestVideoFrameCallback' ,
71
115
] ) ;
116
+ gui . add ( settings , 'video' , Object . keys ( videos ) ) . onChange ( ( ) => {
117
+ playVideo ( settings . video ) ;
118
+ } ) ;
72
119
73
- function frame ( ) {
120
+ let yRotation = 0 ;
121
+ let xRotation = 0 ;
122
+
123
+ function drawVideo ( ) {
124
+ const maxSize = device . limits . maxTextureDimension2D ;
125
+ canvas . width = Math . min ( Math . max ( 1 , canvas . offsetWidth ) , maxSize ) ;
126
+ canvas . height = Math . min ( Math . max ( 1 , canvas . offsetHeight ) , maxSize ) ;
74
127
const externalTextureSource =
75
128
settings . videoSource === 'videoFrame' ? new VideoFrame ( video ) : video ;
76
129
77
- const uniformBindGroup = device . createBindGroup ( {
130
+ const mode = videos [ settings . video ] . mode ;
131
+ const pipeline = mode === '360' ? video360Pipeline : videoCoverPipeline ;
132
+ const canvasTexture = context . getCurrentTexture ( ) ;
133
+
134
+ const bindGroup = device . createBindGroup ( {
78
135
layout : pipeline . getBindGroupLayout ( 0 ) ,
79
136
entries : [
80
137
{
@@ -87,11 +144,60 @@ function frame() {
87
144
source : externalTextureSource ,
88
145
} ) ,
89
146
} ,
147
+ {
148
+ binding : 3 ,
149
+ resource : { buffer : uniformBuffer } ,
150
+ } ,
90
151
] ,
91
152
} ) ;
92
153
154
+ if ( mode === '360' ) {
155
+ // Spin the camera around the y axis and add in the user's x and y rotation;
156
+ const time = performance . now ( ) * 0.001 ;
157
+ const rotation = time * 0.1 + yRotation ;
158
+ const projection = mat4 . perspective (
159
+ ( 75 * Math . PI ) / 180 ,
160
+ canvas . clientWidth / canvas . clientHeight ,
161
+ 0.5 ,
162
+ 100
163
+ ) ;
164
+
165
+ // Note: You can use any method you want to compute a view matrix,
166
+ // just be sure to zero out the translation.
167
+ const camera = mat4 . identity ( ) ;
168
+ mat4 . rotateY ( camera , rotation , camera ) ;
169
+ mat4 . rotateX ( camera , xRotation , camera ) ;
170
+ mat4 . setTranslation ( camera , [ 0 , 0 , 0 ] , camera ) ;
171
+ const view = mat4 . inverse ( camera ) ;
172
+ const viewDirectionProjection = mat4 . multiply ( projection , view ) ;
173
+ const viewDirectionProjectionInverse = mat4 . inverse (
174
+ viewDirectionProjection
175
+ ) ;
176
+
177
+ const uniforms = new Float32Array ( [
178
+ ...viewDirectionProjectionInverse ,
179
+ canvasTexture . width ,
180
+ canvasTexture . height ,
181
+ ] ) ;
182
+ device . queue . writeBuffer ( uniformBuffer , 0 , uniforms ) ;
183
+ } else {
184
+ // compute a `cover` matrix for a unit UV quad.
185
+ const mat = mat3 . identity ( ) ;
186
+ const videoAspect = video . videoWidth / video . videoHeight ;
187
+ const canvasAspect = canvas . offsetWidth / canvas . offsetHeight ;
188
+ const combinedAspect = videoAspect / canvasAspect ;
189
+ mat3 . translate ( mat , [ 0.5 , 0.5 ] , mat ) ;
190
+ mat3 . scale (
191
+ mat ,
192
+ combinedAspect > 1 ? [ 1 / combinedAspect , 1 ] : [ 1 , combinedAspect ] ,
193
+ mat
194
+ ) ;
195
+ mat3 . translate ( mat , [ - 0.5 , - 0.5 ] , mat ) ;
196
+ device . queue . writeBuffer ( uniformBuffer , 0 , mat ) ;
197
+ }
198
+
93
199
const commandEncoder = device . createCommandEncoder ( ) ;
94
- const textureView = context . getCurrentTexture ( ) . createView ( ) ;
200
+ const textureView = canvasTexture . createView ( ) ;
95
201
96
202
const renderPassDescriptor : GPURenderPassDescriptor = {
97
203
colorAttachments : [
@@ -106,15 +212,20 @@ function frame() {
106
212
107
213
const passEncoder = commandEncoder . beginRenderPass ( renderPassDescriptor ) ;
108
214
passEncoder . setPipeline ( pipeline ) ;
109
- passEncoder . setBindGroup ( 0 , uniformBindGroup ) ;
215
+ passEncoder . setBindGroup ( 0 , bindGroup ) ;
110
216
passEncoder . draw ( 6 ) ;
111
217
passEncoder . end ( ) ;
112
218
device . queue . submit ( [ commandEncoder . finish ( ) ] ) ;
113
219
114
220
if ( externalTextureSource instanceof VideoFrame ) {
115
221
externalTextureSource . close ( ) ;
116
222
}
223
+ }
117
224
225
+ function frame ( ) {
226
+ if ( canReadVideo ) {
227
+ drawVideo ( ) ;
228
+ }
118
229
if ( settings . requestFrame == 'requestVideoFrameCallback' ) {
119
230
video . requestVideoFrameCallback ( frame ) ;
120
231
} else {
@@ -127,3 +238,39 @@ if (settings.requestFrame == 'requestVideoFrameCallback') {
127
238
} else {
128
239
requestAnimationFrame ( frame ) ;
129
240
}
241
+
242
+ let startX = 0 ;
243
+ let startY = 0 ;
244
+ let startYRotation = 0 ;
245
+ let startTarget = 0 ;
246
+
247
+ const clamp = ( value : number , min : number , max : number ) => {
248
+ return Math . max ( min , Math . min ( max , value ) ) ;
249
+ } ;
250
+
251
+ const drag = ( e : PointerEvent ) => {
252
+ const deltaX = e . clientX - startX ;
253
+ const deltaY = e . clientY - startY ;
254
+ yRotation = startYRotation + deltaX * 0.01 ;
255
+ xRotation = clamp (
256
+ startTarget + deltaY * - 0.01 ,
257
+ - Math . PI * 0.4 ,
258
+ Math . PI * 0.4
259
+ ) ;
260
+ } ;
261
+
262
+ const stopDrag = ( ) => {
263
+ window . removeEventListener ( 'pointermove' , drag ) ;
264
+ window . removeEventListener ( 'pointerup' , stopDrag ) ;
265
+ } ;
266
+
267
+ const startDrag = ( e : PointerEvent ) => {
268
+ window . addEventListener ( 'pointermove' , drag ) ;
269
+ window . addEventListener ( 'pointerup' , stopDrag ) ;
270
+ e . preventDefault ( ) ;
271
+ startX = e . clientX ;
272
+ startY = e . clientY ;
273
+ startYRotation = yRotation ;
274
+ startTarget = xRotation ;
275
+ } ;
276
+ canvas . addEventListener ( 'pointerdown' , startDrag ) ;
0 commit comments