Skip to content

Commit b250f99

Browse files
committed
Add a Panorama to Video Example
1 parent b28ddf7 commit b250f99

9 files changed

+231
-17
lines changed
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
By: [Taryn Elliott](https://www.pexels.com/@taryn-elliott/)
2+
From: https://www.pexels.com/video/giraffe-walking-in-the-forest-5214261/
3+
License: https://www.pexels.com/license/
Binary file not shown.
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
By [Fabio Casati](https://www.youtube.com/channel/UCTnaAJ2DlSM6jtdUFXtGu8Q)
2+
From: https://commons.wikimedia.org/wiki/File:Video_360%C2%B0._Timelapse._Bled_Lake_in_Slovenia..webm
3+
License: [CC BY 3.0](https://creativecommons.org/licenses/by/3.0)

sample/videoUploading/index.html

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,10 @@
2121
html, body {
2222
margin: 0; /* remove default margin */
2323
height: 100%; /* make body fill the browser window */
24-
display: flex;
25-
place-content: center center;
2624
}
2725
canvas {
28-
width: 600px;
29-
height: 600px;
26+
width: 100%;
27+
height: 100%;
3028
max-width: 100%;
3129
display: block;
3230
}

sample/videoUploading/main.ts

Lines changed: 164 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,34 +1,55 @@
11
import { GUI } from 'dat.gui';
2+
import { mat3, mat4 } from 'wgpu-matrix';
23
import fullscreenTexturedQuadWGSL from '../../shaders/fullscreenTexturedQuad.wgsl';
34
import sampleExternalTextureWGSL from '../../shaders/sampleExternalTexture.frag.wgsl';
5+
import sampleExternalTextureAsPanoramaWGSL from './sampleExternalTextureAsPanorama.wgsl';
46
import { quitIfWebGPUNotAvailable } from '../util';
57

68
const adapter = await navigator.gpu?.requestAdapter();
79
const device = await adapter?.requestDevice();
810
quitIfWebGPUNotAvailable(adapter, device);
911

12+
const videos = [
13+
{
14+
name: 'giraffe',
15+
url: '../../assets/video/5214261-hd_1920_1080_25fps.mp4',
16+
mode: 'cover',
17+
},
18+
{ nane: 'lhc', url: '../../assets/video/pano.webm', mode: '360' },
19+
{
20+
name: 'lake',
21+
url: '../../assets/video/Video_360°._Timelapse._Bled_Lake_in_Slovenia..webm.720p.vp9.webm',
22+
mode: '360',
23+
},
24+
];
25+
1026
// Set video element
1127
const video = document.createElement('video');
1228
video.loop = true;
1329
video.playsInline = true;
1430
video.autoplay = true;
1531
video.muted = true;
16-
video.src = '../../assets/video/pano.webm';
17-
await video.play();
32+
33+
let canReadVideo = false;
34+
35+
async function playVideo(ndx: number) {
36+
canReadVideo = false;
37+
video.src = videos[ndx].url;
38+
await video.play();
39+
canReadVideo = true;
40+
}
41+
await playVideo(0);
1842

1943
const canvas = document.querySelector('canvas') as HTMLCanvasElement;
2044
const context = canvas.getContext('webgpu') as GPUCanvasContext;
21-
const devicePixelRatio = window.devicePixelRatio;
22-
canvas.width = canvas.clientWidth * devicePixelRatio;
23-
canvas.height = canvas.clientHeight * devicePixelRatio;
2445
const presentationFormat = navigator.gpu.getPreferredCanvasFormat();
2546

2647
context.configure({
2748
device,
2849
format: presentationFormat,
2950
});
3051

31-
const pipeline = device.createRenderPipeline({
52+
const videoCoverPipeline = device.createRenderPipeline({
3253
layout: 'auto',
3354
vertex: {
3455
module: device.createShaderModule({
@@ -50,15 +71,38 @@ const pipeline = device.createRenderPipeline({
5071
},
5172
});
5273

74+
const module = device.createShaderModule({
75+
code: sampleExternalTextureAsPanoramaWGSL,
76+
});
77+
const video360Pipeline = device.createRenderPipeline({
78+
layout: 'auto',
79+
vertex: { module },
80+
fragment: {
81+
module,
82+
targets: [{ format: presentationFormat }],
83+
},
84+
primitive: {
85+
topology: 'triangle-list',
86+
},
87+
});
88+
5389
const sampler = device.createSampler({
5490
magFilter: 'linear',
5591
minFilter: 'linear',
5692
});
5793

94+
// make buffer big enough for either pipeline
95+
const uniformBuffer = device.createBuffer({
96+
size: 80,
97+
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
98+
});
99+
58100
const params = new URLSearchParams(window.location.search);
59101
const settings = {
60102
requestFrame: 'requestAnimationFrame',
61103
videoSource: params.get('videoSource') || 'videoElement',
104+
video: 0,
105+
fov: 75,
62106
};
63107

64108
const gui = new GUI();
@@ -67,12 +111,31 @@ gui.add(settings, 'requestFrame', [
67111
'requestAnimationFrame',
68112
'requestVideoFrameCallback',
69113
]);
114+
gui
115+
.add(
116+
settings,
117+
'video',
118+
Object.fromEntries(videos.map(({ name }, i) => [name, i]))
119+
)
120+
.onChange(() => {
121+
playVideo(settings.video);
122+
});
70123

71-
function frame() {
124+
let yRotation = 0;
125+
let xRotation = 0;
126+
127+
function drawVideo() {
128+
const maxSize = device.limits.maxTextureDimension2D;
129+
canvas.width = Math.min(Math.max(1, canvas.offsetWidth), maxSize);
130+
canvas.height = Math.min(Math.max(1, canvas.offsetHeight), maxSize);
72131
const externalTextureSource =
73132
settings.videoSource === 'videoFrame' ? new VideoFrame(video) : video;
74133

75-
const uniformBindGroup = device.createBindGroup({
134+
const mode = videos[settings.video].mode;
135+
const pipeline = mode === '360' ? video360Pipeline : videoCoverPipeline;
136+
const canvasTexture = context.getCurrentTexture();
137+
138+
const bindGroup = device.createBindGroup({
76139
layout: pipeline.getBindGroupLayout(0),
77140
entries: [
78141
{
@@ -85,11 +148,60 @@ function frame() {
85148
source: externalTextureSource,
86149
}),
87150
},
151+
{
152+
binding: 3,
153+
resource: uniformBuffer,
154+
},
88155
],
89156
});
90157

158+
if (mode === '360') {
159+
// Spin the camera around the y axis and add in the user's x and y rotation;
160+
const time = performance.now() * 0.001;
161+
const rotation = time * 0.1 + yRotation;
162+
const projection = mat4.perspective(
163+
(settings.fov * Math.PI) / 180,
164+
canvas.clientWidth / canvas.clientHeight,
165+
0.5,
166+
100
167+
);
168+
169+
// Note: You can use any method you want to compute a view matrix,
170+
// just be sure to zero out the translation.
171+
const camera = mat4.identity();
172+
mat4.rotateY(camera, rotation, camera);
173+
mat4.rotateX(camera, xRotation, camera);
174+
mat4.setTranslation(camera, [0, 0, 0], camera);
175+
const view = mat4.inverse(camera);
176+
const viewDirectionProjection = mat4.multiply(projection, view);
177+
const viewDirectionProjectionInverse = mat4.inverse(
178+
viewDirectionProjection
179+
);
180+
181+
const uniforms = new Float32Array([
182+
...viewDirectionProjectionInverse,
183+
canvasTexture.width,
184+
canvasTexture.height,
185+
]);
186+
device.queue.writeBuffer(uniformBuffer, 0, uniforms);
187+
} else {
188+
// compute a `cover` matrix for a unit UV quad.
189+
const mat = mat3.identity();
190+
const videoAspect = video.videoWidth / video.videoHeight;
191+
const canvasAspect = canvas.offsetWidth / canvas.offsetHeight;
192+
const combinedAspect = videoAspect / canvasAspect;
193+
mat3.translate(mat, [0.5, 0.5], mat);
194+
mat3.scale(
195+
mat,
196+
combinedAspect > 1 ? [1 / combinedAspect, 1] : [1, combinedAspect],
197+
mat
198+
);
199+
mat3.translate(mat, [-0.5, -0.5], mat);
200+
device.queue.writeBuffer(uniformBuffer, 0, mat);
201+
}
202+
91203
const commandEncoder = device.createCommandEncoder();
92-
const textureView = context.getCurrentTexture().createView();
204+
const textureView = canvasTexture.createView();
93205

94206
const renderPassDescriptor: GPURenderPassDescriptor = {
95207
colorAttachments: [
@@ -104,16 +216,21 @@ function frame() {
104216

105217
const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
106218
passEncoder.setPipeline(pipeline);
107-
passEncoder.setBindGroup(0, uniformBindGroup);
219+
passEncoder.setBindGroup(0, bindGroup);
108220
passEncoder.draw(6);
109221
passEncoder.end();
110222
device.queue.submit([commandEncoder.finish()]);
111223

112224
if (externalTextureSource instanceof VideoFrame) {
113225
externalTextureSource.close();
114226
}
227+
}
115228

116-
if (settings.requestFrame == 'requestVideoFrameCallback') {
229+
function frame() {
230+
if (canReadVideo) {
231+
drawVideo();
232+
}
233+
if (canReadVideo && settings.requestFrame == 'requestVideoFrameCallback') {
117234
video.requestVideoFrameCallback(frame);
118235
} else {
119236
requestAnimationFrame(frame);
@@ -125,3 +242,39 @@ if (settings.requestFrame == 'requestVideoFrameCallback') {
125242
} else {
126243
requestAnimationFrame(frame);
127244
}
245+
246+
let startX = 0;
247+
let startY = 0;
248+
let startYRotation = 0;
249+
let startTarget = 0;
250+
251+
const clamp = (value: number, min: number, max: number) => {
252+
return Math.max(min, Math.min(max, value));
253+
};
254+
255+
const drag = (e: PointerEvent) => {
256+
const deltaX = e.clientX - startX;
257+
const deltaY = e.clientY - startY;
258+
yRotation = startYRotation + deltaX * 0.01;
259+
xRotation = clamp(
260+
startTarget + deltaY * -0.01,
261+
-Math.PI * 0.4,
262+
Math.PI * 0.4
263+
);
264+
};
265+
266+
const stopDrag = () => {
267+
window.removeEventListener('pointermove', drag);
268+
window.removeEventListener('pointerup', stopDrag);
269+
};
270+
271+
const startDrag = (e: PointerEvent) => {
272+
window.addEventListener('pointermove', drag);
273+
window.addEventListener('pointerup', stopDrag);
274+
e.preventDefault();
275+
startX = e.clientX;
276+
startY = e.clientY;
277+
startYRotation = yRotation;
278+
startTarget = xRotation;
279+
};
280+
canvas.addEventListener('pointerdown', startDrag);

sample/videoUploading/meta.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,16 @@
11
export default {
22
name: 'Video Uploading',
3-
description: 'This example shows how to upload video frame to WebGPU.',
3+
description: `\
4+
This example shows how to upload video frame to WebGPU.
5+
giraffe by [Taryn Elliott](https://www.pexels.com/video/giraffe-walking-in-the-forest-5214261/).
6+
lhc by [unknown](https://foo.com).
7+
lake by [Fabio Casati](https://commons.wikimedia.org/wiki/File:Video_360%C2%B0._Timelapse._Bled_Lake_in_Slovenia..webm), [CC BY 3.0](https://creativecommons.org/licenses/by/3.0)
8+
`,
49
filename: __DIRNAME__,
510
sources: [
611
{ path: 'main.ts' },
712
{ path: '../../shaders/fullscreenTexturedQuad.wgsl' },
813
{ path: '../../shaders/sampleExternalTexture.frag.wgsl' },
14+
{ path: './sampleExternalTextureAsPanorama.wgsl' },
915
],
1016
};
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
struct Uniforms {
2+
viewDirectionProjectionInverse: mat4x4f,
3+
targetSize: vec2f,
4+
};
5+
6+
struct VSOutput {
7+
@builtin(position) position: vec4f,
8+
@location(0) uv: vec2f,
9+
};
10+
11+
@vertex
12+
fn vs(@builtin(vertex_index) vertexIndex: u32) -> VSOutput {
13+
let pos = array(
14+
vec2f(-1, -1),
15+
vec2f(-1, 3),
16+
vec2f( 3, -1),
17+
);
18+
19+
let xy = pos[vertexIndex];
20+
return VSOutput(
21+
vec4f(xy, 0.0, 1.0),
22+
xy * vec2f(0.5, -0.5) + vec2f(0.5)
23+
);
24+
}
25+
26+
@group(0) @binding(1) var panoramaSampler: sampler;
27+
@group(0) @binding(2) var panoramaTexture: texture_external;
28+
@group(0) @binding(3) var<uniform> uniforms: Uniforms;
29+
30+
const PI = radians(180.0);
31+
@fragment
32+
fn main(@builtin(position) position: vec4f) -> @location(0) vec4f {
33+
let pos = position.xy / uniforms.targetSize * 2.0 - 1.0;
34+
let t = uniforms.viewDirectionProjectionInverse * vec4f(pos, 0, 1);
35+
let dir = normalize(t.xyz / t.w);
36+
37+
let longitude = atan2(dir.z, dir.x);
38+
let latitude = asin(dir.y / length(dir));
39+
40+
let uv = vec2f(
41+
longitude / (2.0 * PI) + 0.5,
42+
latitude / PI + 0.5,
43+
);
44+
45+
return textureSampleBaseClampToEdge(panoramaTexture, panoramaSampler, uv);
46+
}
Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,12 @@
11
@group(0) @binding(1) var mySampler: sampler;
22
@group(0) @binding(2) var myTexture: texture_external;
3+
@group(0) @binding(3) var<uniform> myMatrix: mat3x3f;
34

45
@fragment
56
fn main(@location(0) fragUV : vec2f) -> @location(0) vec4f {
6-
return textureSampleBaseClampToEdge(myTexture, mySampler, fragUV);
7+
let uv = (myMatrix * vec3f(fragUV, 1.0)).xy;
8+
if (uv.x < 0.0 || uv.x > 1.0 || uv.y < 0.0 || uv.y > 1.0) {
9+
return vec4f(1.0, 0.0, 0.0, 1.0);
10+
}
11+
return textureSampleBaseClampToEdge(myTexture, mySampler, uv);
712
}

0 commit comments

Comments
 (0)