Skip to content

Commit 6ae0a5c

Browse files
Add a Panorama Video Example (#525)
Co-authored-by: François Beaufort <[email protected]>
1 parent fb22c85 commit 6ae0a5c

9 files changed

+229
-24
lines changed
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
By: [Taryn Elliott](https://www.pexels.com/@taryn-elliott/)
2+
From: https://www.pexels.com/video/giraffe-walking-in-the-forest-5214261/
3+
License: https://www.pexels.com/license/
2.92 MB
Binary file not shown.
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
By [Fabio Casati](https://www.youtube.com/channel/UCTnaAJ2DlSM6jtdUFXtGu8Q)
2+
From: https://commons.wikimedia.org/wiki/File:Video_360%C2%B0._Timelapse._Bled_Lake_in_Slovenia..webm
3+
License: [CC BY 3.0](https://creativecommons.org/licenses/by/3.0)
2.68 MB
Binary file not shown.

sample/videoUploading/index.html

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,10 @@
2121
html, body {
2222
margin: 0; /* remove default margin */
2323
height: 100%; /* make body fill the browser window */
24-
display: flex;
25-
place-content: center center;
2624
}
2725
canvas {
28-
width: 600px;
29-
height: 600px;
26+
width: 100%;
27+
height: 100%;
3028
max-width: 100%;
3129
display: block;
3230
}

sample/videoUploading/main.ts

Lines changed: 164 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
import { GUI } from 'dat.gui';
2+
import { mat3, mat4 } from 'wgpu-matrix';
23
import fullscreenTexturedQuadWGSL from '../../shaders/fullscreenTexturedQuad.wgsl';
3-
import sampleExternalTextureWGSL from '../../shaders/sampleExternalTexture.frag.wgsl';
4+
import sampleExternalTextureWGSL from './sampleExternalTexture.frag.wgsl';
5+
import sampleExternalTextureAsPanoramaWGSL from './sampleExternalTextureAsPanorama.wgsl';
46
import { quitIfWebGPUNotAvailable } from '../util';
57

68
const adapter = await navigator.gpu?.requestAdapter({
@@ -9,28 +11,31 @@ const adapter = await navigator.gpu?.requestAdapter({
911
const device = await adapter?.requestDevice();
1012
quitIfWebGPUNotAvailable(adapter, device);
1113

12-
// Set video element
13-
const video = document.createElement('video');
14-
video.loop = true;
15-
video.playsInline = true;
16-
video.autoplay = true;
17-
video.muted = true;
18-
video.src = '../../assets/video/pano.webm';
19-
await video.play();
14+
const videos = {
15+
'giraffe (2d)': {
16+
url: '../../assets/video/5214261-hd_1920_1080_25fps.mp4',
17+
mode: 'cover',
18+
},
19+
'lhc (360)': {
20+
url: '../../assets/video/pano.webm',
21+
mode: '360',
22+
},
23+
'lake (360)': {
24+
url: '../../assets/video/Video_360°._Timelapse._Bled_Lake_in_Slovenia..webm.720p.vp9.webm',
25+
mode: '360',
26+
},
27+
} as const;
2028

2129
const canvas = document.querySelector('canvas') as HTMLCanvasElement;
2230
const context = canvas.getContext('webgpu') as GPUCanvasContext;
23-
const devicePixelRatio = window.devicePixelRatio;
24-
canvas.width = canvas.clientWidth * devicePixelRatio;
25-
canvas.height = canvas.clientHeight * devicePixelRatio;
2631
const presentationFormat = navigator.gpu.getPreferredCanvasFormat();
2732

2833
context.configure({
2934
device,
3035
format: presentationFormat,
3136
});
3237

33-
const pipeline = device.createRenderPipeline({
38+
const videoCoverPipeline = device.createRenderPipeline({
3439
layout: 'auto',
3540
vertex: {
3641
module: device.createShaderModule({
@@ -52,29 +57,81 @@ const pipeline = device.createRenderPipeline({
5257
},
5358
});
5459

60+
const module = device.createShaderModule({
61+
code: sampleExternalTextureAsPanoramaWGSL,
62+
});
63+
const video360Pipeline = device.createRenderPipeline({
64+
layout: 'auto',
65+
vertex: { module },
66+
fragment: {
67+
module,
68+
targets: [{ format: presentationFormat }],
69+
},
70+
primitive: {
71+
topology: 'triangle-list',
72+
},
73+
});
74+
5575
const sampler = device.createSampler({
5676
magFilter: 'linear',
5777
minFilter: 'linear',
5878
});
5979

80+
// make buffer big enough for either pipeline
81+
const uniformBuffer = device.createBuffer({
82+
size: (16 + 2 + 2) * 4, // (mat4x4f + vec2f + padding) vs (mat3x3f + padding)
83+
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
84+
});
85+
86+
// Set video element
87+
const video = document.createElement('video');
88+
video.loop = true;
89+
video.playsInline = true;
90+
video.autoplay = true;
91+
video.muted = true;
92+
93+
let canReadVideo = false;
94+
95+
async function playVideo(videoName: keyof typeof videos) {
96+
canReadVideo = false;
97+
video.src = videos[videoName].url;
98+
await video.play();
99+
canReadVideo = true;
100+
}
101+
60102
const params = new URLSearchParams(window.location.search);
61103
const settings = {
62104
requestFrame: 'requestAnimationFrame',
63105
videoSource: params.get('videoSource') || 'videoElement',
106+
video: Object.keys(videos)[0] as keyof typeof videos,
64107
};
108+
playVideo(settings.video);
65109

66110
const gui = new GUI();
67111
gui.add(settings, 'videoSource', ['videoElement', 'videoFrame']);
68112
gui.add(settings, 'requestFrame', [
69113
'requestAnimationFrame',
70114
'requestVideoFrameCallback',
71115
]);
116+
gui.add(settings, 'video', Object.keys(videos)).onChange(() => {
117+
playVideo(settings.video);
118+
});
72119

73-
function frame() {
120+
let yRotation = 0;
121+
let xRotation = 0;
122+
123+
function drawVideo() {
124+
const maxSize = device.limits.maxTextureDimension2D;
125+
canvas.width = Math.min(Math.max(1, canvas.offsetWidth), maxSize);
126+
canvas.height = Math.min(Math.max(1, canvas.offsetHeight), maxSize);
74127
const externalTextureSource =
75128
settings.videoSource === 'videoFrame' ? new VideoFrame(video) : video;
76129

77-
const uniformBindGroup = device.createBindGroup({
130+
const mode = videos[settings.video].mode;
131+
const pipeline = mode === '360' ? video360Pipeline : videoCoverPipeline;
132+
const canvasTexture = context.getCurrentTexture();
133+
134+
const bindGroup = device.createBindGroup({
78135
layout: pipeline.getBindGroupLayout(0),
79136
entries: [
80137
{
@@ -87,11 +144,60 @@ function frame() {
87144
source: externalTextureSource,
88145
}),
89146
},
147+
{
148+
binding: 3,
149+
resource: { buffer: uniformBuffer },
150+
},
90151
],
91152
});
92153

154+
if (mode === '360') {
155+
// Spin the camera around the y axis and add in the user's x and y rotation;
156+
const time = performance.now() * 0.001;
157+
const rotation = time * 0.1 + yRotation;
158+
const projection = mat4.perspective(
159+
(75 * Math.PI) / 180,
160+
canvas.clientWidth / canvas.clientHeight,
161+
0.5,
162+
100
163+
);
164+
165+
// Note: You can use any method you want to compute a view matrix,
166+
// just be sure to zero out the translation.
167+
const camera = mat4.identity();
168+
mat4.rotateY(camera, rotation, camera);
169+
mat4.rotateX(camera, xRotation, camera);
170+
mat4.setTranslation(camera, [0, 0, 0], camera);
171+
const view = mat4.inverse(camera);
172+
const viewDirectionProjection = mat4.multiply(projection, view);
173+
const viewDirectionProjectionInverse = mat4.inverse(
174+
viewDirectionProjection
175+
);
176+
177+
const uniforms = new Float32Array([
178+
...viewDirectionProjectionInverse,
179+
canvasTexture.width,
180+
canvasTexture.height,
181+
]);
182+
device.queue.writeBuffer(uniformBuffer, 0, uniforms);
183+
} else {
184+
// compute a `cover` matrix for a unit UV quad.
185+
const mat = mat3.identity();
186+
const videoAspect = video.videoWidth / video.videoHeight;
187+
const canvasAspect = canvas.offsetWidth / canvas.offsetHeight;
188+
const combinedAspect = videoAspect / canvasAspect;
189+
mat3.translate(mat, [0.5, 0.5], mat);
190+
mat3.scale(
191+
mat,
192+
combinedAspect > 1 ? [1 / combinedAspect, 1] : [1, combinedAspect],
193+
mat
194+
);
195+
mat3.translate(mat, [-0.5, -0.5], mat);
196+
device.queue.writeBuffer(uniformBuffer, 0, mat);
197+
}
198+
93199
const commandEncoder = device.createCommandEncoder();
94-
const textureView = context.getCurrentTexture().createView();
200+
const textureView = canvasTexture.createView();
95201

96202
const renderPassDescriptor: GPURenderPassDescriptor = {
97203
colorAttachments: [
@@ -106,15 +212,20 @@ function frame() {
106212

107213
const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
108214
passEncoder.setPipeline(pipeline);
109-
passEncoder.setBindGroup(0, uniformBindGroup);
215+
passEncoder.setBindGroup(0, bindGroup);
110216
passEncoder.draw(6);
111217
passEncoder.end();
112218
device.queue.submit([commandEncoder.finish()]);
113219

114220
if (externalTextureSource instanceof VideoFrame) {
115221
externalTextureSource.close();
116222
}
223+
}
117224

225+
function frame() {
226+
if (canReadVideo) {
227+
drawVideo();
228+
}
118229
if (settings.requestFrame == 'requestVideoFrameCallback') {
119230
video.requestVideoFrameCallback(frame);
120231
} else {
@@ -127,3 +238,39 @@ if (settings.requestFrame == 'requestVideoFrameCallback') {
127238
} else {
128239
requestAnimationFrame(frame);
129240
}
241+
242+
let startX = 0;
243+
let startY = 0;
244+
let startYRotation = 0;
245+
let startTarget = 0;
246+
247+
const clamp = (value: number, min: number, max: number) => {
248+
return Math.max(min, Math.min(max, value));
249+
};
250+
251+
const drag = (e: PointerEvent) => {
252+
const deltaX = e.clientX - startX;
253+
const deltaY = e.clientY - startY;
254+
yRotation = startYRotation + deltaX * 0.01;
255+
xRotation = clamp(
256+
startTarget + deltaY * -0.01,
257+
-Math.PI * 0.4,
258+
Math.PI * 0.4
259+
);
260+
};
261+
262+
const stopDrag = () => {
263+
window.removeEventListener('pointermove', drag);
264+
window.removeEventListener('pointerup', stopDrag);
265+
};
266+
267+
const startDrag = (e: PointerEvent) => {
268+
window.addEventListener('pointermove', drag);
269+
window.addEventListener('pointerup', stopDrag);
270+
e.preventDefault();
271+
startX = e.clientX;
272+
startY = e.clientY;
273+
startYRotation = yRotation;
274+
startTarget = xRotation;
275+
};
276+
canvas.addEventListener('pointerdown', startDrag);

sample/videoUploading/meta.ts

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,16 @@
11
export default {
22
name: 'Video Uploading',
3-
description: 'This example shows how to upload video frame to WebGPU.',
3+
description: `\
4+
This example shows how to upload video frame to WebGPU.
5+
giraffe by [Taryn Elliott](https://www.pexels.com/video/giraffe-walking-in-the-forest-5214261/).
6+
lhc by [unknown](https://foo.com).
7+
lake by [Fabio Casati](https://commons.wikimedia.org/wiki/File:Video_360%C2%B0._Timelapse._Bled_Lake_in_Slovenia..webm), [CC BY 3.0](https://creativecommons.org/licenses/by/3.0)
8+
`,
49
filename: __DIRNAME__,
510
sources: [
611
{ path: 'main.ts' },
712
{ path: '../../shaders/fullscreenTexturedQuad.wgsl' },
8-
{ path: '../../shaders/sampleExternalTexture.frag.wgsl' },
13+
{ path: './sampleExternalTexture.frag.wgsl' },
14+
{ path: './sampleExternalTextureAsPanorama.wgsl' },
915
],
1016
};
Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
@group(0) @binding(1) var mySampler: sampler;
22
@group(0) @binding(2) var myTexture: texture_external;
3+
@group(0) @binding(3) var<uniform> myMatrix: mat3x3f;
34

45
@fragment
56
fn main(@location(0) fragUV : vec2f) -> @location(0) vec4f {
6-
return textureSampleBaseClampToEdge(myTexture, mySampler, fragUV);
7+
let uv = (myMatrix * vec3f(fragUV, 1.0)).xy;
8+
return textureSampleBaseClampToEdge(myTexture, mySampler, uv);
79
}
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
struct Uniforms {
2+
viewDirectionProjectionInverse: mat4x4f,
3+
targetSize: vec2f,
4+
};
5+
6+
struct VSOutput {
7+
@builtin(position) position: vec4f,
8+
@location(0) uv: vec2f,
9+
};
10+
11+
@vertex
12+
fn vs(@builtin(vertex_index) vertexIndex: u32) -> VSOutput {
13+
let pos = array(
14+
vec2f(-1, -1),
15+
vec2f(-1, 3),
16+
vec2f( 3, -1),
17+
);
18+
19+
let xy = pos[vertexIndex];
20+
return VSOutput(
21+
vec4f(xy, 0.0, 1.0),
22+
xy * vec2f(0.5, -0.5) + vec2f(0.5)
23+
);
24+
}
25+
26+
@group(0) @binding(1) var panoramaSampler: sampler;
27+
@group(0) @binding(2) var panoramaTexture: texture_external;
28+
@group(0) @binding(3) var<uniform> uniforms: Uniforms;
29+
30+
const PI = radians(180.0);
31+
@fragment
32+
fn main(@builtin(position) position: vec4f) -> @location(0) vec4f {
33+
let pos = position.xy / uniforms.targetSize * 2.0 - 1.0;
34+
let t = uniforms.viewDirectionProjectionInverse * vec4f(pos, 0, 1);
35+
let dir = normalize(t.xyz / t.w);
36+
37+
let longitude = atan2(dir.z, dir.x);
38+
let latitude = asin(dir.y / length(dir));
39+
40+
let uv = vec2f(
41+
longitude / (2.0 * PI) + 0.5,
42+
latitude / PI + 0.5,
43+
);
44+
45+
return textureSampleBaseClampToEdge(panoramaTexture, panoramaSampler, uv);
46+
}

0 commit comments

Comments
 (0)