Skip to content

Commit a158b2f

Browse files
authored
Merge pull request #12893 from CesiumGS/fix-pnts-draco-loading
Fix pnts draco loading
2 parents 17647cb + 713dbb7 commit a158b2f

File tree

7 files changed

+112
-50
lines changed

7 files changed

+112
-50
lines changed

CHANGES.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
- Fix flickering artifact in Gaussian splat models caused by incorrect sorting results. [#12662](https://github.com/CesiumGS/cesium/issues/12662)
1212
- Improved performance and reduced memory usage of `Event` class. [#12896](https://github.com/CesiumGS/cesium/pull/12896)
1313
- Fixes vertical misalignment of glyphs in labels with small fonts [#8474](https://github.com/CesiumGS/cesium/issues/8474)
14+
- Prevent runtime errors for certain forms of invalid PNTS files [#12872](https://github.com/CesiumGS/cesium/issues/12872)
1415

1516
#### Additions :tada:
1617

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
# PointCloudDracoInvalid
2+
3+
A test related to https://github.com/CesiumGS/cesium/issues/12872 :
4+
5+
For a certain period of time (2 years), and under certain conditions, the
6+
point cloud tiler generated invalid PNTS files when draco compression
7+
was enabled. The batch table could define certain properties as binary
8+
body references, without a batch table binary being present, and without
9+
the `3DTILES_draco_point_compression` extension object being present
10+
in the batch table JSON.
11+
12+
The PNTS file here is one PNTS file from the data set that was linked
13+
in the issue (with details in the issue description). This is to
14+
ensure that CesiumJS gracefully handles this case, without crashing.
Binary file not shown.
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
{
2+
"asset": {
3+
"version": "1.0"
4+
},
5+
"extensionsRequired": ["3DTILES_draco_point_compression"],
6+
"extensionsUsed": ["3DTILES_draco_point_compression"],
7+
"geometricError": 1.7320508075688772,
8+
"root": {
9+
"boundingVolume": {
10+
"box": [0.5, 0.5, 0.5, 0.5, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.5]
11+
},
12+
"content": { "uri": "pointCloudDracoInvalid.pnts" },
13+
"geometricError": 0.0,
14+
"refine": "ADD"
15+
}
16+
}

packages/engine/Source/Scene/BatchTableHierarchy.js

Lines changed: 2 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ import ComponentDatatype from "../Core/ComponentDatatype.js";
66
import defined from "../Core/defined.js";
77
import DeveloperError from "../Core/DeveloperError.js";
88
import getBinaryAccessor from "./getBinaryAccessor.js";
9-
import RuntimeError from "../Core/RuntimeError.js";
9+
import Cesium3DTileBatchTable from "./Cesium3DTileBatchTable.js";
1010

1111
/**
1212
* Object for handling the <code>3DTILES_batch_table_hierarchy</code> extension
@@ -127,7 +127,7 @@ function initialize(hierarchy, hierarchyJson, binaryBody) {
127127
for (i = 0; i < classesLength; ++i) {
128128
const classInstancesLength = classes[i].length;
129129
const properties = classes[i].instances;
130-
const binaryProperties = getBinaryProperties(
130+
const binaryProperties = Cesium3DTileBatchTable.getBinaryProperties(
131131
classInstancesLength,
132132
properties,
133133
binaryBody,
@@ -154,54 +154,6 @@ function initialize(hierarchy, hierarchyJson, binaryBody) {
154154
hierarchy._byteLength = byteLength;
155155
}
156156

157-
function getBinaryProperties(featuresLength, properties, binaryBody) {
158-
let binaryProperties;
159-
for (const name in properties) {
160-
if (properties.hasOwnProperty(name)) {
161-
const property = properties[name];
162-
const byteOffset = property.byteOffset;
163-
if (defined(byteOffset)) {
164-
// This is a binary property
165-
const componentType = property.componentType;
166-
const type = property.type;
167-
if (!defined(componentType)) {
168-
throw new RuntimeError("componentType is required.");
169-
}
170-
if (!defined(type)) {
171-
throw new RuntimeError("type is required.");
172-
}
173-
if (!defined(binaryBody)) {
174-
throw new RuntimeError(
175-
`Property ${name} requires a batch table binary.`,
176-
);
177-
}
178-
179-
const binaryAccessor = getBinaryAccessor(property);
180-
const componentCount = binaryAccessor.componentsPerAttribute;
181-
const classType = binaryAccessor.classType;
182-
const typedArray = binaryAccessor.createArrayBufferView(
183-
binaryBody.buffer,
184-
binaryBody.byteOffset + byteOffset,
185-
featuresLength,
186-
);
187-
188-
if (!defined(binaryProperties)) {
189-
binaryProperties = {};
190-
}
191-
192-
// Store any information needed to access the binary data, including the typed array,
193-
// componentCount (e.g. a VEC4 would be 4), and the type used to pack and unpack (e.g. Cartesian4).
194-
binaryProperties[name] = {
195-
typedArray: typedArray,
196-
componentCount: componentCount,
197-
type: classType,
198-
};
199-
}
200-
}
201-
}
202-
return binaryProperties;
203-
}
204-
205157
function countBinaryPropertyMemory(binaryProperties) {
206158
let byteLength = 0;
207159
for (const name in binaryProperties) {

packages/engine/Source/Scene/PntsParser.js

Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import combine from "../Core/combine.js";
55
import ComponentDatatype from "../Core/ComponentDatatype.js";
66
import defined from "../Core/defined.js";
77
import getJsonFromTypedArray from "../Core/getJsonFromTypedArray.js";
8+
import oneTimeWarning from "../Core/oneTimeWarning.js";
89
import RuntimeError from "../Core/RuntimeError.js";
910
import AttributeType from "./AttributeType.js";
1011
import Cesium3DTileFeatureTable from "./Cesium3DTileFeatureTable.js";
@@ -185,9 +186,73 @@ PntsParser.parse = function (arrayBuffer, byteOffset) {
185186
parsedContent.batchTableBinary = batchTableBinary;
186187
}
187188

189+
// Handle the case that binary body references are contained in the
190+
// batch table without a batch table binary being present
191+
removeInvalidBinaryBodyReferences(parsedContent);
192+
188193
return parsedContent;
189194
};
190195

196+
/**
197+
* Remove all invalid binary body references from the batch table
198+
* JSON of the given parsed content.
199+
*
200+
* This is a workaround for gracefully handling the invalid PNTS
201+
* files that may have been created by the point cloud tiler.
202+
* See https://github.com/CesiumGS/cesium/issues/12872
203+
*
204+
* When the batch table JSON is undefined, nothing will be done.
205+
* When the batch table binary is defined, nothing will be done
206+
* (assuming that any binary body references are valid - this is
207+
* not checked here).
208+
*
209+
* Otherwise, this will remove all binary body references from the
210+
* batch table JSON that are not resolved from draco via the
211+
* `parsedContent.draco.batchTableProperties`.
212+
*
213+
* If any (invalid) binary body reference is found (and removed),
214+
* a one-time warning will be printed.
215+
*
216+
* @param {object} parsedContent The parsed content
217+
*/
218+
function removeInvalidBinaryBodyReferences(parsedContent) {
219+
const batchTableJson = parsedContent.batchTableJson;
220+
if (!defined(batchTableJson)) {
221+
return;
222+
}
223+
const batchTableBinary = parsedContent.batchTableBinary;
224+
if (defined(batchTableBinary)) {
225+
return;
226+
}
227+
const dracoBatchTablePropertyNames = Object.keys(
228+
parsedContent.draco?.batchTableProperties ?? {},
229+
);
230+
231+
// Collect the names of all binary body references (identified
232+
// by the property having a `byteOffset`) that have not been
233+
// resolved via the parsedContent.draco.batchTableProperties
234+
const invalidBinaryBodyReferenceNames = [];
235+
for (const name of Object.keys(batchTableJson)) {
236+
const property = batchTableJson[name];
237+
const byteOffset = property.byteOffset;
238+
if (defined(byteOffset)) {
239+
if (!dracoBatchTablePropertyNames.includes(name)) {
240+
invalidBinaryBodyReferenceNames.push(name);
241+
}
242+
}
243+
}
244+
245+
// If there have been invalid binary body references, print
246+
// a one-time warning for each of them, and delete them.
247+
for (const name of invalidBinaryBodyReferenceNames) {
248+
oneTimeWarning(
249+
`PntsParser-invalidBinaryBodyReference`,
250+
`The point cloud data contained a binary property ${name} that could not be resolved - skipping`,
251+
);
252+
delete batchTableJson[name];
253+
}
254+
}
255+
191256
function parseDracoProperties(featureTable, batchTableJson) {
192257
const featureTableJson = featureTable.json;
193258
let dracoBuffer;

packages/engine/Specs/Scene/Model/PntsLoaderSpec.js

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,8 @@ describe(
4848
"./Data/Cesium3DTiles/PointCloud/PointCloudDracoPartial/pointCloudDracoPartial.pnts";
4949
const pointCloudDracoBatchedUrl =
5050
"./Data/Cesium3DTiles/PointCloud/PointCloudDracoBatched/pointCloudDracoBatched.pnts";
51+
const pointCloudDracoInvalidUrl =
52+
"./Data/Cesium3DTiles/PointCloud/PointCloudDracoInvalid/pointCloudDracoInvalid.pnts";
5153
const pointCloudWGS84Url =
5254
"./Data/Cesium3DTiles/PointCloud/PointCloudWGS84/pointCloudWGS84.pnts";
5355
const pointCloudBatchedUrl =
@@ -538,6 +540,18 @@ describe(
538540
});
539541
});
540542

543+
it("loads PointCloudDracoInvalid without crashing", async function () {
544+
// Test for https://github.com/CesiumGS/cesium/issues/12872:
545+
// PNTS files that are invalid due to a missing batch table
546+
// binary and draco compression extension object should
547+
// load. (The metadata is expected to be empty here)
548+
const loader = await loadPnts(pointCloudDracoInvalidUrl);
549+
const components = loader.components;
550+
expect(components).toBeDefined();
551+
const isBatched = false;
552+
expectMetadata(components.structuralMetadata, {}, isBatched);
553+
});
554+
541555
it("loads PointCloudWGS84", function () {
542556
return loadPnts(pointCloudWGS84Url).then(function (loader) {
543557
const components = loader.components;

0 commit comments

Comments
 (0)