Skip to content

Commit 027b6dd

Browse files
authored
Merge pull request #2148 from Kitware/webgpu_coincident_offset
feat(WebGPU): use invereted depth and improve edges
2 parents b92ad54 + 4afe77e commit 027b6dd

File tree

13 files changed

+194
-46
lines changed

13 files changed

+194
-46
lines changed

Sources/Rendering/WebGPU/BufferManager/index.js

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -548,12 +548,12 @@ function vtkWebGPUBufferManager(publicAPI, model) {
548548

549549
// prettier-ignore
550550
const array = new Float32Array([
551-
-1.0, -1.0, 1.0,
552-
1.0, -1.0, 1.0,
553-
1.0, 1.0, 1.0,
554-
-1.0, -1.0, 1.0,
555-
1.0, 1.0, 1.0,
556-
-1.0, 1.0, 1.0,
551+
-1.0, -1.0, 0.0,
552+
1.0, -1.0, 0.0,
553+
1.0, 1.0, 0.0,
554+
-1.0, -1.0, 0.0,
555+
1.0, 1.0, 0.0,
556+
-1.0, 1.0, 0.0,
557557
]);
558558
model.fullScreenQuadBuffer.createAndWrite(array, GPUBufferUsage.VERTEX);
559559
model.fullScreenQuadBuffer.setStrideInBytes(12);

Sources/Rendering/WebGPU/Camera/index.js

Lines changed: 68 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,67 @@ function vtkWebGPUCamera(publicAPI, model) {
1313
// Set our className
1414
model.classHierarchy.push('vtkWebGPUCamera');
1515

16+
publicAPI.getProjectionMatrix = (outMat, aspect, cRange, windowCenter) => {
17+
mat4.identity(outMat);
18+
if (model.renderable.getParallelProjection()) {
19+
// set up a rectangular parallelipiped
20+
const parallelScale = model.renderable.getParallelScale();
21+
const width = parallelScale * aspect;
22+
const height = parallelScale;
23+
24+
const xmin = (windowCenter[0] - 1.0) * width;
25+
const xmax = (windowCenter[0] + 1.0) * width;
26+
const ymin = (windowCenter[1] - 1.0) * height;
27+
const ymax = (windowCenter[1] + 1.0) * height;
28+
29+
const xr = 1.0 / (xmax - xmin);
30+
const yr = 1.0 / (ymax - ymin);
31+
outMat[0] = 2.0 * xr;
32+
outMat[5] = 2.0 * yr;
33+
outMat[10] = 1.0 / (cRange[1] - cRange[0]);
34+
outMat[12] = (xmax + xmin) * xr;
35+
outMat[13] = (ymax + ymin) * yr;
36+
outMat[14] = cRange[1] / (cRange[1] - cRange[0]);
37+
} else {
38+
const tmp = Math.tan((Math.PI * model.renderable.getViewAngle()) / 360.0);
39+
let width;
40+
let height;
41+
if (model.renderable.getUseHorizontalViewAngle() === true) {
42+
width = cRange[0] * tmp;
43+
height = (cRange[0] * tmp) / aspect;
44+
} else {
45+
width = cRange[0] * tmp * aspect;
46+
height = cRange[0] * tmp;
47+
}
48+
49+
const xmin = (windowCenter[0] - 1.0) * width;
50+
const xmax = (windowCenter[0] + 1.0) * width;
51+
const ymin = (windowCenter[1] - 1.0) * height;
52+
const ymax = (windowCenter[1] + 1.0) * height;
53+
54+
outMat[0] = (2.0 * cRange[0]) / (xmax - xmin);
55+
outMat[5] = (2.0 * cRange[0]) / (ymax - ymin);
56+
outMat[12] = (xmin + xmax) / (xmax - xmin);
57+
outMat[13] = (ymin + ymax) / (ymax - ymin);
58+
outMat[10] = 0.0;
59+
outMat[14] = cRange[0];
60+
outMat[11] = -1.0;
61+
outMat[15] = 0.0;
62+
}
63+
};
64+
65+
publicAPI.convertToOpenGLDepth = (val) => {
66+
if (model.renderable.getParallelProjection()) {
67+
return 1.0 - val;
68+
}
69+
const cRange = model.renderable.getClippingRangeByReference();
70+
let zval = -cRange[0] / val;
71+
zval =
72+
(cRange[0] + cRange[1]) / (cRange[1] - cRange[0]) +
73+
(2.0 * cRange[0] * cRange[1]) / (zval * (cRange[1] - cRange[0]));
74+
return 0.5 * zval + 0.5;
75+
};
76+
1677
publicAPI.getKeyMatrices = (webGPURenderer) => {
1778
// has the camera changed?
1879
const ren = webGPURenderer.getRenderable();
@@ -44,14 +105,13 @@ function vtkWebGPUCamera(publicAPI, model) {
44105

45106
const aspectRatio = webGPURenderer.getAspectRatio();
46107

47-
const vcpc = model.renderable.getProjectionMatrix(aspectRatio, -1, 1);
48-
mat4.transpose(model.keyMatrices.vcpc, vcpc);
49-
50-
// adjust due to WebGPU using a different coordinate system in Z
51-
model.keyMatrices.vcpc[2] = 0.5 * vcpc[8] + 0.5 * vcpc[12];
52-
model.keyMatrices.vcpc[6] = 0.5 * vcpc[9] + 0.5 * vcpc[13];
53-
model.keyMatrices.vcpc[10] = 0.5 * vcpc[10] + 0.5 * vcpc[14];
54-
model.keyMatrices.vcpc[14] = 0.5 * vcpc[11] + 0.5 * vcpc[15];
108+
const cRange = model.renderable.getClippingRangeByReference();
109+
publicAPI.getProjectionMatrix(
110+
model.keyMatrices.vcpc,
111+
aspectRatio,
112+
cRange,
113+
model.renderable.getWindowCenterByReference()
114+
);
55115

56116
mat4.multiply(
57117
model.keyMatrices.scpc,

Sources/Rendering/WebGPU/Glyph3DMapper/index.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ function vtkWebGPUGlyph3DMapper(publicAPI, model) {
7171
publicAPI.replaceShaderSelect = (hash, pipeline, vertexInput) => {
7272
if (hash.includes('sel')) {
7373
const vDesc = pipeline.getShaderDescription('vertex');
74-
vDesc.addOutput('u32', 'compositeID');
74+
vDesc.addOutput('u32', 'compositeID', 'flat');
7575
let code = vDesc.getCode();
7676
code = vtkWebGPUShaderCache.substitute(code, '//VTK::Select::Impl', [
7777
' output.compositeID = input.instanceIndex;',

Sources/Rendering/WebGPU/HardwareSelectionPass/index.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ function vtkWebGPUHardwareSelectionPass(publicAPI, model) {
9898
primitive: { cullMode: 'none' },
9999
depthStencil: {
100100
depthWriteEnabled: true,
101-
depthCompare: 'less',
101+
depthCompare: 'greater',
102102
format: 'depth32float',
103103
},
104104
fragment: {

Sources/Rendering/WebGPU/HardwareSelector/index.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,7 @@ function getPixelInformationWithData(
7272
buffdata.zbufferBufferWidth +
7373
inDisplayPosition[0];
7474
info.zValue = buffdata.depthValues[offset];
75+
info.zValue = buffdata.webGPURenderer.convertToOpenGLDepth(info.zValue);
7576
info.displayPosition = inDisplayPosition;
7677
}
7778
return info;

Sources/Rendering/WebGPU/OrderIndependentTranslucentPass/index.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -176,7 +176,7 @@ function vtkWebGPUOrderIndependentTranslucentPass(publicAPI, model) {
176176
primitive: { cullMode: 'none' },
177177
depthStencil: {
178178
depthWriteEnabled: false,
179-
depthCompare: 'less',
179+
depthCompare: 'greater',
180180
format: 'depth32float',
181181
},
182182
fragment: {

Sources/Rendering/WebGPU/PolyDataMapper/index.js

Lines changed: 20 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,8 @@ fn main(
104104
105105
if (computedColor.a == 0.0) { discard; };
106106
107+
//VTK::Position::Impl
108+
107109
//VTK::RenderEncoder::Impl
108110
return output;
109111
}
@@ -214,9 +216,23 @@ function vtkWebGPUPolyDataMapper(publicAPI, model) {
214216
const vDesc = pipeline.getShaderDescription('vertex');
215217
vDesc.addBuiltinOutput('vec4<f32>', '[[builtin(position)]] Position');
216218
let code = vDesc.getCode();
217-
code = vtkWebGPUShaderCache.substitute(code, '//VTK::Position::Impl', [
218-
' output.Position = rendererUBO.SCPCMatrix*mapperUBO.BCSCMatrix*vertexBC;',
219-
]).result;
219+
if (isEdges(hash)) {
220+
vDesc.addBuiltinInput('u32', '[[builtin(instance_index)]] instanceIndex');
221+
// widen the edge
222+
code = vtkWebGPUShaderCache.substitute(code, '//VTK::Position::Impl', [
223+
' var tmpPos: vec4<f32> = rendererUBO.SCPCMatrix*mapperUBO.BCSCMatrix*vertexBC;',
224+
' var tmpPos2: vec3<f32> = tmpPos.xyz / tmpPos.w;',
225+
' tmpPos2.x = tmpPos2.x + 1.4*(f32(input.instanceIndex % 2u) - 0.5)/rendererUBO.viewportSize.x;',
226+
' tmpPos2.y = tmpPos2.y + 1.4*(f32(input.instanceIndex / 2u) - 0.5)/rendererUBO.viewportSize.y;',
227+
' tmpPos2.z = tmpPos2.z + 0.00001;', // could become a setting
228+
' output.Position = vec4<f32>(tmpPos2.xyz * tmpPos.w, tmpPos.w);',
229+
]).result;
230+
} else {
231+
code = vtkWebGPUShaderCache.substitute(code, '//VTK::Position::Impl', [
232+
' output.Position = rendererUBO.SCPCMatrix*mapperUBO.BCSCMatrix*vertexBC;',
233+
]).result;
234+
}
235+
220236
vDesc.setCode(code);
221237
};
222238

@@ -712,7 +728,7 @@ function vtkWebGPUPolyDataMapper(publicAPI, model) {
712728
);
713729

714730
primHelper.setWebGPURenderer(model.WebGPURenderer);
715-
primHelper.setNumberOfInstances(1);
731+
primHelper.setNumberOfInstances(4);
716732
const vbo = primHelper.getVertexInput().getBuffer('vertexBC');
717733
primHelper.setNumberOfVertices(
718734
vbo.getSizeInBytes() / vbo.getStrideInBytes()

Sources/Rendering/WebGPU/README.md

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -128,3 +128,43 @@ Note that none of the classes in the WebGPU directory are meant to be accessed d
128128
The volume renderer in WebGPU starts in the ForwardPass, which if it detects volumes invokes a volume pass. The volume pass requests bounding boxes from all volumes and renders them, along with the opaque polygonal depth buffer to create min and max ray depth textures. These textures are bounds for each fragment's ray casting. Then the VolumePassFSQ gets invoked with these two bounding textures to actually perfom the ray casting of the voxels between the min and max.
129129

130130
The ray casting is done for all volumes at once and the VolumePassFSQ class is where all the complexity and work is done.
131+
132+
133+
## Zbuffer implementation and calculations
134+
135+
The depth buffer is stored as a 32bit float and ranges from 1.0 to 0.0. The distance to the near clipping plane is by far the largest factor determining the accuracy of the zbuffer. The farther out you can place the near plane the better. See https://zero-radiance.github.io/post/z-buffer/ for a more detailed analysis of why we use this approach.
136+
137+
### Orthographic
138+
139+
For orthographic projections the zbuffer ranges from 1.0 at the near plane to 0.0 at the far plane. The depth value in both the vertex and fragment shader is given as
140+
141+
```position.z = (zVC + f)/(f - n)```
142+
143+
within the fragment shader you can get the z value (in view coordinates)
144+
145+
```zVC = position.z * (far - near) - far```
146+
147+
The depth valus are linear in depth.
148+
149+
### Perspective
150+
151+
For perspective we use a reverse infinite far clip projection which ranges from 1.0 at the near plane to 0.0 at infinity. The depth value in the vertex shader is
152+
153+
```position.z = near```
154+
```position.w = -zVC```
155+
156+
and in the fragment after division by w as
157+
158+
```position.z = -near / zVC```
159+
160+
within the shader you can get the z value (in view coordinates)
161+
162+
```zVC = -near / position.z```
163+
164+
The depth values are not linear in depth.
165+
166+
You can offset geometry by a factor cF ranging from 0.0 to 1.0 using the following forumla
167+
168+
```z' = 1.0 - (1.0 - cF)*(1.0 - z)```
169+
```z' = z + cF - cF*z```
170+
```z' = (1.0 - cF)*z + cF```

Sources/Rendering/WebGPU/RenderEncoder/index.js

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,7 @@ export function extend(publicAPI, model, initialValues = {}) {
141141
],
142142
depthStencilAttachment: {
143143
view: undefined,
144-
depthLoadValue: 1.0,
144+
depthLoadValue: 0.0,
145145
depthStoreOp: 'store',
146146
stencilLoadValue: 0,
147147
stencilStoreOp: 'store',
@@ -164,7 +164,7 @@ export function extend(publicAPI, model, initialValues = {}) {
164164
primitive: { cullMode: 'none' },
165165
depthStencil: {
166166
depthWriteEnabled: true,
167-
depthCompare: 'less-equal',
167+
depthCompare: 'greater-equal',
168168
format: 'depth32float',
169169
},
170170
fragment: {

Sources/Rendering/WebGPU/Renderer/index.js

Lines changed: 21 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -51,11 +51,15 @@ function vtkWebGPURenderer(publicAPI, model) {
5151
return;
5252
}
5353

54+
model.camera = model.renderable.getActiveCamera();
55+
5456
publicAPI.updateLights();
5557
publicAPI.prepareNodes();
56-
publicAPI.addMissingNode(model.renderable.getActiveCamera());
58+
publicAPI.addMissingNode(model.camera);
5759
publicAPI.addMissingNodes(model.renderable.getViewPropsWithNestedProps());
5860
publicAPI.removeUnusedNodes();
61+
62+
model.webgpuCamera = publicAPI.getViewNodeFor(model.camera);
5963
publicAPI.updateStabilizedMatrix();
6064
}
6165
};
@@ -87,10 +91,9 @@ function vtkWebGPURenderer(publicAPI, model) {
8791
// when the center of the view frustum moves a lot
8892
// we recenter it. The center of the view frustum is roughly
8993
// camPos + dirOfProj*(far + near)*0.5
90-
const cam = model.renderable.getActiveCamera();
91-
const clipRange = cam.getClippingRange();
92-
const pos = cam.getPositionByReference();
93-
const dop = cam.getDirectionOfProjectionByReference();
94+
const clipRange = model.camera.getClippingRange();
95+
const pos = model.camera.getPositionByReference();
96+
const dop = model.camera.getDirectionOfProjectionByReference();
9497
const center = [];
9598
const offset = [];
9699
vec3.scale(offset, dop, 0.5 * (clipRange[0] + clipRange[1]));
@@ -137,23 +140,27 @@ function vtkWebGPURenderer(publicAPI, model) {
137140
publicAPI.updateUBO = () => {
138141
// make sure the data is up to date
139142
// has the camera changed?
140-
const cam = model.renderable.getActiveCamera();
141-
const webgpuCamera = publicAPI.getViewNodeFor(cam);
142143
const utime = model.UBO.getSendTime();
143144
if (
144145
model.parent.getMTime() > utime ||
145146
publicAPI.getMTime() > utime ||
146-
cam.getMTime() > utime ||
147+
model.camera.getMTime() > utime ||
147148
model.renderable.getMTime() > utime
148149
) {
149-
const keyMats = webgpuCamera.getKeyMatrices(publicAPI);
150+
const keyMats = model.webgpuCamera.getKeyMatrices(publicAPI);
150151
model.UBO.setArray('WCVCMatrix', keyMats.wcvc);
151152
model.UBO.setArray('SCPCMatrix', keyMats.scpc);
152153
model.UBO.setArray('PCSCMatrix', keyMats.pcsc);
153154
model.UBO.setArray('SCVCMatrix', keyMats.scvc);
154155
model.UBO.setArray('VCPCMatrix', keyMats.vcpc);
155156
model.UBO.setArray('WCVCNormals', keyMats.normalMatrix);
156-
model.UBO.setValue('cameraParallel', cam.getParallelProjection());
157+
158+
const tsize = publicAPI.getYInvertedTiledSizeAndOrigin();
159+
model.UBO.setArray('viewportSize', [tsize.usize, tsize.vsize]);
160+
model.UBO.setValue(
161+
'cameraParallel',
162+
model.camera.getParallelProjection()
163+
);
157164

158165
const device = model.parent.getDevice();
159166
model.UBO.sendIfNeeded(device);
@@ -299,6 +306,9 @@ function vtkWebGPURenderer(publicAPI, model) {
299306
);
300307
};
301308

309+
publicAPI.convertToOpenGLDepth = (val) =>
310+
model.webgpuCamera.convertToOpenGLDepth(val);
311+
302312
publicAPI.getYInvertedTiledSizeAndOrigin = () => {
303313
const res = publicAPI.getTiledSizeAndOrigin();
304314
const size = model.parent.getSizeByReference();
@@ -393,6 +403,7 @@ export function extend(publicAPI, model, initialValues = {}) {
393403
model.UBO.addEntry('SCVCMatrix', 'mat4x4<f32>');
394404
model.UBO.addEntry('VCPCMatrix', 'mat4x4<f32>');
395405
model.UBO.addEntry('WCVCNormals', 'mat4x4<f32>');
406+
model.UBO.addEntry('viewportSize', 'vec2<f32>');
396407
model.UBO.addEntry('cameraParallel', 'u32');
397408

398409
model.bindGroup = vtkWebGPUBindGroup.newInstance();

0 commit comments

Comments
 (0)