0
# WebGPU Rendering
1
2
Three.js WebGPU renderer provides next-generation GPU rendering capabilities with compute shaders, advanced graphics features, and improved performance. WebGPU offers more direct access to modern GPU capabilities compared to WebGL, enabling advanced rendering techniques and general-purpose GPU computation.
3
4
**Import:** WebGPU functionality is available via the dedicated WebGPU build:
5
6
```javascript
7
import { WebGPURenderer } from 'three/webgpu';
8
import {
9
StorageInstancedBufferAttribute,
10
StorageBufferAttribute,
11
WebGPUCoordinateSystem
12
} from 'three/webgpu';
13
```
14
15
## Capabilities
16
17
### WebGPU Renderer
18
19
Advanced GPU renderer with compute capabilities and modern graphics pipeline support.
20
21
```javascript { .api }
22
/**
23
* WebGPU-based renderer with compute shader support and advanced rendering features
24
*/
25
class WebGPURenderer extends Renderer {
26
/**
27
* Create WebGPU renderer
28
* @param parameters - Renderer configuration options
29
*/
30
constructor(parameters?: WebGPURendererParameters);
31
32
/** HTML canvas element for rendering */
33
domElement: HTMLCanvasElement;
34
35
/** Whether renderer has been initialized */
36
isWebGPURenderer: true;
37
38
/** Coordinate system used (WebGPU vs WebGL differences) */
39
coordinateSystem: typeof WebGPUCoordinateSystem;
40
41
/**
42
* Initialize WebGPU context and resources
43
* @returns Promise that resolves when initialization is complete
44
*/
45
init(): Promise<WebGPURenderer>;
46
47
/**
48
* Render scene with camera
49
* @param scene - Scene to render
50
* @param camera - Camera defining viewpoint
51
* @returns This renderer for chaining
52
*/
53
render(scene: Object3D, camera: Camera): this;
54
55
/**
56
* Render scene to render target
57
* @param scene - Scene to render
58
* @param camera - Camera defining viewpoint
59
* @param renderTarget - Target to render to
60
* @param forceClear - Whether to force clear before rendering
61
*/
62
setRenderTarget(renderTarget: RenderTarget | null, activeCubeFace?: number, activeMipmapLevel?: number): void;
63
64
/**
65
* Execute compute shader operations
66
* @param computeNodes - Array of compute nodes to execute
67
* @returns Promise that resolves when compute is complete
68
*/
69
compute(computeNodes: Node[]): Promise<void>;
70
71
/**
72
* Execute compute shader operations asynchronously
73
* @param computeNodes - Array of compute nodes to execute
74
* @returns Promise that resolves when compute is complete
75
*/
76
computeAsync(computeNodes: Node[]): Promise<void>;
77
78
/**
79
* Set size of renderer and canvas
80
* @param width - Width in pixels
81
* @param height - Height in pixels
82
* @param updateStyle - Whether to update CSS styles
83
*/
84
setSize(width: number, height: number, updateStyle?: boolean): void;
85
86
/**
87
* Set device pixel ratio for high-DPI displays
88
* @param value - Device pixel ratio (window.devicePixelRatio)
89
*/
90
setPixelRatio(value: number): void;
91
92
/**
93
* Set viewport within canvas
94
* @param x - X offset in pixels
95
* @param y - Y offset in pixels
96
* @param width - Viewport width in pixels
97
* @param height - Viewport height in pixels
98
*/
99
setViewport(x: number, y: number, width: number, height: number): void;
100
101
/**
102
* Set scissor test region
103
* @param x - X offset in pixels
104
* @param y - Y offset in pixels
105
* @param width - Scissor width in pixels
106
* @param height - Scissor height in pixels
107
*/
108
setScissor(x: number, y: number, width: number, height: number): void;
109
110
/**
111
* Enable or disable scissor test
112
* @param boolean - Whether scissor test is enabled
113
*/
114
setScissorTest(boolean: boolean): void;
115
116
/**
117
* Set clear color for background
118
* @param color - Clear color
119
* @param alpha - Alpha value for clear color
120
*/
121
setClearColor(color: ColorRepresentation, alpha?: number): void;
122
123
/**
124
* Set clear alpha value
125
* @param alpha - Alpha value (0-1)
126
*/
127
setClearAlpha(alpha: number): void;
128
129
/**
130
* Clear render buffers
131
* @param color - Whether to clear color buffer
132
* @param depth - Whether to clear depth buffer
133
* @param stencil - Whether to clear stencil buffer
134
*/
135
clear(color?: boolean, depth?: boolean, stencil?: boolean): void;
136
137
/**
138
* Dispose of WebGPU resources
139
*/
140
dispose(): void;
141
142
/**
143
* Get current render target
144
* @returns Active render target or null
145
*/
146
getRenderTarget(): RenderTarget | null;
147
148
/**
149
* Get WebGPU device capabilities
150
* @returns Object describing supported features and limits
151
*/
152
getCapabilities(): WebGPUCapabilities;
153
154
/**
155
* Get rendering context information
156
* @returns WebGPU context information
157
*/
158
getContext(): GPUCanvasContext;
159
160
/**
161
* Copy render target to another render target
162
* @param source - Source render target
163
* @param target - Destination render target
164
* @param viewport - Optional viewport for partial copy
165
*/
166
copyFramebufferToTexture(texture: Texture, position?: Vector2, level?: number): void;
167
168
/**
169
* Copy texture to render target
170
* @param position - Position to copy to
171
* @param texture - Source texture
172
* @param level - Mipmap level
173
*/
174
copyTextureToTexture(position: Vector2, srcTexture: Texture, dstTexture: Texture, level?: number): void;
175
}
176
177
interface WebGPURendererParameters {
178
/** Canvas element to render to */
179
canvas?: HTMLCanvasElement;
180
181
/** GPU device to use (auto-selected if not specified) */
182
device?: GPUDevice;
183
184
/** GPU adapter to use (auto-selected if not specified) */
185
adapter?: GPUAdapter;
186
187
/** Antialiasing configuration */
188
antialias?: boolean;
189
190
/** Whether canvas has alpha channel */
191
alpha?: boolean;
192
193
/** Whether to premultiply alpha */
194
premultipliedAlpha?: boolean;
195
196
/** Whether to preserve drawing buffer */
197
preserveDrawingBuffer?: boolean;
198
199
/** Power preference for GPU selection */
200
powerPreference?: 'low-power' | 'high-performance';
201
202
/** Required features for WebGPU device */
203
requiredFeatures?: Iterable<GPUFeatureName>;
204
205
/** Required limits for WebGPU device */
206
requiredLimits?: Record<string, GPUSize64>;
207
208
/** Fallback adapter options */
209
fallbackAdapters?: GPURequestAdapterOptions[];
210
}
211
212
interface WebGPUCapabilities {
213
/** Whether WebGPU is supported */
214
isWebGPU: boolean;
215
216
/** Maximum texture size */
217
maxTextures: number;
218
219
/** Maximum vertex attributes */
220
maxVertexAttributes: number;
221
222
/** Maximum vertex uniform vectors */
223
maxVertexUniforms: number;
224
225
/** Maximum fragment uniform vectors */
226
maxFragmentUniforms: number;
227
228
/** Maximum samples for multisampling */
229
maxSamples: number;
230
231
/** Supported texture formats */
232
textureFormats: Set<string>;
233
234
/** Device features */
235
features: Set<string>;
236
237
/** Device limits */
238
limits: GPUSupportedLimits;
239
}
240
```
241
242
**Usage Example:**
243
244
```javascript
245
import { WebGPURenderer } from 'three/webgpu';
246
import * as THREE from 'three';
247
248
async function initWebGPU() {
249
// Check WebGPU support
250
if (!navigator.gpu) {
251
throw new Error('WebGPU not supported');
252
}
253
254
// Create renderer
255
const renderer = new WebGPURenderer({
256
antialias: true,
257
requiredFeatures: ['timestamp-query'],
258
powerPreference: 'high-performance'
259
});
260
261
// Initialize WebGPU context
262
await renderer.init();
263
264
// Set up canvas
265
renderer.setSize(window.innerWidth, window.innerHeight);
266
renderer.setPixelRatio(window.devicePixelRatio);
267
document.body.appendChild(renderer.domElement);
268
269
// Create scene
270
const scene = new THREE.Scene();
271
const camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000);
272
273
// Render loop
274
function animate() {
275
requestAnimationFrame(animate);
276
renderer.render(scene, camera);
277
}
278
animate();
279
}
280
281
initWebGPU().catch(console.error);
282
```
283
284
### Compute Shaders
285
286
WebGPU enables general-purpose GPU computation through compute shaders for parallel processing tasks.
287
288
```javascript { .api }
289
/**
290
* Node-based compute shader system for GPU computation
291
*/
292
interface ComputeNode extends Node {
293
/** Number of compute workgroups in X dimension */
294
workgroupSize: number;
295
296
/** Compute shader function */
297
computeNode: Node;
298
299
/**
300
* Execute compute shader
301
* @param renderer - WebGPU renderer
302
* @returns Promise resolving when compute is complete
303
*/
304
compute(renderer: WebGPURenderer): Promise<void>;
305
}
306
307
/**
308
* Create compute node for GPU computation
309
* @param computeShader - Compute shader function as node
310
* @param count - Number of compute invocations
311
* @param workgroupSize - Size of compute workgroup
312
* @returns Compute node ready for execution
313
*/
314
declare function compute(computeShader: Node, count: number, workgroupSize?: number): ComputeNode;
315
316
/**
317
* Storage buffer for compute shader data exchange
318
*/
319
class StorageBufferNode extends BufferNode {
320
/**
321
* Create storage buffer node
322
* @param value - Initial buffer data
323
* @param bufferType - Type of storage buffer
324
* @param bufferCount - Number of elements
325
*/
326
constructor(value: ArrayLike<number>, bufferType?: string, bufferCount?: number);
327
328
/** Buffer access mode */
329
access: 'read' | 'write' | 'read_write';
330
}
331
```
332
333
**Usage Example:**
334
335
```javascript
336
import { WebGPURenderer, compute, storage, uniform } from 'three/webgpu';
337
import { wgsl } from 'three/tsl';
338
339
async function runCompute() {
340
const renderer = new WebGPURenderer();
341
await renderer.init();
342
343
// Create storage buffers for input/output data
344
const inputBuffer = storage(new Float32Array([1, 2, 3, 4, 5]), 'vec4<f32>', 1);
345
const outputBuffer = storage(new Float32Array(4), 'vec4<f32>', 1);
346
347
// Define compute shader using WGSL
348
const computeShader = wgsl(`
349
@compute @workgroup_size(1)
350
fn main(@builtin(global_invocation_id) global_id : vec3<u32>) {
351
let index = global_id.x;
352
outputBuffer[index] = inputBuffer[index] * 2.0;
353
}
354
`);
355
356
// Create compute node
357
const computeNode = compute(computeShader, 1);
358
359
// Execute compute shader
360
await renderer.compute([computeNode]);
361
362
// Read results
363
const results = await renderer.readStorageBuffer(outputBuffer);
364
console.log('Compute results:', results);
365
}
366
```
367
368
### Storage Attributes
369
370
Enhanced buffer attributes for compute shaders and advanced rendering techniques.
371
372
```javascript { .api }
373
/**
374
* Storage buffer attribute for compute shader data exchange
375
*/
376
class StorageBufferAttribute extends BufferAttribute {
377
/**
378
* Create storage buffer attribute
379
* @param array - Typed array containing attribute data
380
* @param itemSize - Number of components per vertex
381
* @param offset - Byte offset into buffer
382
* @param normalized - Whether to normalize integer values
383
*/
384
constructor(array: TypedArray, itemSize: number, offset?: number, normalized?: boolean);
385
386
/** Buffer usage flags for WebGPU */
387
usage: GPUBufferUsageFlags;
388
389
/** Whether buffer can be read by CPU */
390
isStorageBufferAttribute: true;
391
}
392
393
/**
394
* Instanced storage buffer attribute for compute-driven instancing
395
*/
396
class StorageInstancedBufferAttribute extends StorageBufferAttribute {
397
/**
398
* Create storage instanced buffer attribute
399
* @param array - Typed array containing instance data
400
* @param itemSize - Number of components per instance
401
* @param meshPerAttribute - Number of meshes per attribute (for multidraw)
402
* @param offset - Byte offset into buffer
403
* @param normalized - Whether to normalize integer values
404
*/
405
constructor(
406
array: TypedArray,
407
itemSize: number,
408
meshPerAttribute?: number,
409
offset?: number,
410
normalized?: boolean
411
);
412
413
/** Number of meshes per attribute */
414
meshPerAttribute: number;
415
416
/** Whether this is an instanced attribute */
417
isInstancedBufferAttribute: true;
418
}
419
```
420
421
### WebGPU Specific Features
422
423
Advanced features unique to WebGPU renderer implementation.
424
425
```javascript { .api }
426
/**
427
* Query system for GPU timing and performance metrics
428
*/
429
class TimestampQuery {
430
/** Query name/identifier */
431
name: string;
432
433
/** Whether query is active */
434
active: boolean;
435
436
/**
437
* Get query results in nanoseconds
438
* @returns GPU time in nanoseconds or null if not ready
439
*/
440
getResult(): number | null;
441
442
/**
443
* Reset query for reuse
444
*/
445
reset(): void;
446
}
447
448
/**
449
* Create timestamp query for GPU timing
450
* @param name - Query identifier
451
* @returns Timestamp query object
452
*/
453
declare function createTimestampQuery(name: string): TimestampQuery;
454
455
/**
456
* WebGPU render bundle for optimized draw call recording
457
*/
458
interface RenderBundle {
459
/** Bundle identifier */
460
id: string;
461
462
/** Recorded draw calls */
463
drawCalls: DrawCall[];
464
465
/**
466
* Execute bundle on GPU
467
* @param renderer - WebGPU renderer
468
*/
469
execute(renderer: WebGPURenderer): void;
470
}
471
472
/**
473
* Create render bundle for draw call batching
474
* @param scene - Scene to record
475
* @param camera - Camera for rendering
476
* @returns Render bundle with recorded draw calls
477
*/
478
declare function createRenderBundle(scene: Object3D, camera: Camera): RenderBundle;
479
```
480
481
### Node System Integration
482
483
WebGPU renderer integrates deeply with Three.js node system for advanced material effects.
484
485
```javascript { .api }
486
/**
487
* WebGPU-specific node types for advanced rendering
488
*/
489
490
/**
491
* Compute texture node for procedural texture generation
492
*/
493
class ComputeTextureNode extends TempNode {
494
/**
495
* Create compute texture node
496
* @param computeShader - Compute shader for texture generation
497
* @param width - Texture width
498
* @param height - Texture height
499
* @param format - Texture format
500
*/
501
constructor(computeShader: Node, width: number, height: number, format?: PixelFormat);
502
503
/** Texture dimensions */
504
width: number;
505
height: number;
506
507
/** Texture format */
508
format: PixelFormat;
509
}
510
511
/**
512
* GPU particle system using compute shaders
513
*/
514
class GPUParticleSystem extends Object3D {
515
/**
516
* Create GPU particle system
517
* @param maxParticles - Maximum number of particles
518
* @param computeShader - Particle update compute shader
519
*/
520
constructor(maxParticles: number, computeShader: Node);
521
522
/** Maximum particle count */
523
maxParticles: number;
524
525
/** Current active particle count */
526
particleCount: number;
527
528
/** Particle position buffer */
529
positionBuffer: StorageBufferAttribute;
530
531
/** Particle velocity buffer */
532
velocityBuffer: StorageBufferAttribute;
533
534
/**
535
* Update particle system
536
* @param deltaTime - Time delta for simulation
537
*/
538
update(deltaTime: number): void;
539
540
/**
541
* Reset all particles
542
*/
543
reset(): void;
544
545
/**
546
* Emit new particles
547
* @param count - Number of particles to emit
548
* @param position - Emission position
549
* @param velocity - Initial velocity
550
*/
551
emit(count: number, position: Vector3, velocity: Vector3): void;
552
}
553
```
554
555
## Types
556
557
```javascript { .api }
558
// WebGPU-specific type definitions
559
interface GPUDevice extends EventTarget {
560
features: GPUSupportedFeatures;
561
limits: GPUSupportedLimits;
562
queue: GPUQueue;
563
createBuffer(descriptor: GPUBufferDescriptor): GPUBuffer;
564
createTexture(descriptor: GPUTextureDescriptor): GPUTexture;
565
createSampler(descriptor?: GPUSamplerDescriptor): GPUSampler;
566
createShaderModule(descriptor: GPUShaderModuleDescriptor): GPUShaderModule;
567
createComputePipeline(descriptor: GPUComputePipelineDescriptor): GPUComputePipeline;
568
createRenderPipeline(descriptor: GPURenderPipelineDescriptor): GPURenderPipeline;
569
}
570
571
interface GPUAdapter {
572
features: GPUSupportedFeatures;
573
limits: GPUSupportedLimits;
574
info: GPUAdapterInfo;
575
requestDevice(descriptor?: GPUDeviceDescriptor): Promise<GPUDevice>;
576
}
577
578
type GPUBufferUsageFlags = number;
579
type GPUTextureUsageFlags = number;
580
type GPUShaderStageFlags = number;
581
582
// WebGPU coordinate system constant
583
declare const WebGPUCoordinateSystem: 2001;
584
```
585
586
## Usage Examples
587
588
**Advanced Compute-Based Particle System:**
589
590
```javascript
591
import { WebGPURenderer, compute, storage, uniform } from 'three/webgpu';
592
import { wgsl, vec3, float } from 'three/tsl';
593
import * as THREE from 'three';
594
595
class ComputeParticleSystem {
596
constructor(particleCount = 10000) {
597
this.particleCount = particleCount;
598
this.init();
599
}
600
601
async init() {
602
this.renderer = new WebGPURenderer();
603
await this.renderer.init();
604
605
// Create particle data buffers
606
this.positions = new Float32Array(this.particleCount * 3);
607
this.velocities = new Float32Array(this.particleCount * 3);
608
609
// Initialize with random positions and velocities
610
for (let i = 0; i < this.particleCount; i++) {
611
const i3 = i * 3;
612
this.positions[i3] = (Math.random() - 0.5) * 10;
613
this.positions[i3 + 1] = (Math.random() - 0.5) * 10;
614
this.positions[i3 + 2] = (Math.random() - 0.5) * 10;
615
616
this.velocities[i3] = (Math.random() - 0.5) * 2;
617
this.velocities[i3 + 1] = (Math.random() - 0.5) * 2;
618
this.velocities[i3 + 2] = (Math.random() - 0.5) * 2;
619
}
620
621
// Create storage buffers
622
this.positionBuffer = storage(this.positions, 'vec3<f32>');
623
this.velocityBuffer = storage(this.velocities, 'vec3<f32>');
624
625
// Simulation parameters
626
this.timeUniform = uniform(0);
627
this.deltaTimeUniform = uniform(0.016);
628
this.gravityUniform = uniform(vec3(0, -9.81, 0));
629
630
// Create compute shader for particle simulation
631
this.updateShader = wgsl(`
632
@group(0) @binding(0) var<storage, read_write> positions: array<vec3<f32>>;
633
@group(0) @binding(1) var<storage, read_write> velocities: array<vec3<f32>>;
634
@group(1) @binding(0) var<uniform> deltaTime: f32;
635
@group(1) @binding(1) var<uniform> gravity: vec3<f32>;
636
637
@compute @workgroup_size(64)
638
fn main(@builtin(global_invocation_id) global_id: vec3<u32>) {
639
let index = global_id.x;
640
if (index >= ${this.particleCount}u) { return; }
641
642
// Apply gravity
643
velocities[index] += gravity * deltaTime;
644
645
// Update position
646
positions[index] += velocities[index] * deltaTime;
647
648
// Simple ground collision
649
if (positions[index].y < -5.0) {
650
positions[index].y = -5.0;
651
velocities[index].y = abs(velocities[index].y) * 0.8;
652
}
653
654
// Simple boundary collision
655
for (var i = 0; i < 3; i++) {
656
if (abs(positions[index][i]) > 10.0) {
657
positions[index][i] = sign(positions[index][i]) * 10.0;
658
velocities[index][i] *= -0.9;
659
}
660
}
661
}
662
`);
663
664
// Create compute node
665
const workgroupSize = 64;
666
const workgroupCount = Math.ceil(this.particleCount / workgroupSize);
667
this.computeNode = compute(this.updateShader, workgroupCount);
668
669
this.setupRenderObjects();
670
}
671
672
setupRenderObjects() {
673
// Create geometry for particles
674
this.geometry = new THREE.BufferGeometry();
675
this.geometry.setAttribute('position', new THREE.BufferAttribute(this.positions, 3));
676
677
// Create particle material
678
this.material = new THREE.PointsMaterial({
679
color: 0x88ccff,
680
size: 0.1,
681
sizeAttenuation: true
682
});
683
684
// Create points object
685
this.points = new THREE.Points(this.geometry, this.material);
686
}
687
688
async update(deltaTime) {
689
// Update uniforms
690
this.deltaTimeUniform.value = deltaTime;
691
692
// Run compute shader
693
await this.renderer.compute([this.computeNode]);
694
695
// Update geometry with new positions
696
this.geometry.attributes.position.needsUpdate = true;
697
}
698
699
addToScene(scene) {
700
scene.add(this.points);
701
}
702
}
703
704
// Usage
705
async function createParticleDemo() {
706
const scene = new THREE.Scene();
707
const camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000);
708
camera.position.set(0, 0, 15);
709
710
const particleSystem = new ComputeParticleSystem(50000);
711
await particleSystem.init();
712
particleSystem.addToScene(scene);
713
714
const clock = new THREE.Clock();
715
716
function animate() {
717
const deltaTime = clock.getDelta();
718
719
particleSystem.update(deltaTime);
720
particleSystem.renderer.render(scene, camera);
721
722
requestAnimationFrame(animate);
723
}
724
725
animate();
726
}
727
```