Skip to content

Commit 7bd322d

Browse files
authored
enhance: Entity runs validate after marking circular reference point (#3133)
1 parent 2c19204 commit 7bd322d

File tree

5 files changed

+42
-16
lines changed

5 files changed

+42
-16
lines changed

.changeset/wet-mirrors-visit.md

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
---
2+
'@data-client/endpoint': patch
3+
---
4+
5+
Validate after marking cirucular reference loops
6+
7+
This should not change any behavior as validate should be deterministic so if it fails
8+
it will fail again and failure measure throwing which exits the whole stack.
9+
This improves code grouping. (And possibly cache locality improvement - though didn't check.)

packages/endpoint/src/schemas/EntitySchema.ts

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -300,8 +300,9 @@ export default function EntitySchema<TBase extends Constructor>(
300300
} else {
301301
id = `${id}`;
302302
}
303-
const entityType = this.key;
304303

304+
/* Circular reference short-circuiter */
305+
const entityType = this.key;
305306
if (!(entityType in visitedEntities)) {
306307
visitedEntities[entityType] = {};
307308
}
@@ -313,11 +314,11 @@ export default function EntitySchema<TBase extends Constructor>(
313314
) {
314315
return id;
315316
}
317+
visitedEntities[entityType][id].push(input);
318+
316319
const errorMessage = this.validate(processedEntity);
317320
throwValidationError(errorMessage);
318321

319-
visitedEntities[entityType][id].push(input);
320-
321322
Object.keys(this.schema).forEach(key => {
322323
if (Object.hasOwn(processedEntity, key)) {
323324
processedEntity[key] = visit(

packages/normalizr/src/__tests__/__snapshots__/index.test.js.snap

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -538,6 +538,8 @@ exports[`normalize normalizes entities with circular references 1`] = `
538538
}
539539
`;
540540

541+
exports[`normalize normalizes entities with circular references that fails validation 1`] = `"this always fails"`;
542+
541543
exports[`normalize normalizes nested entities 1`] = `
542544
{
543545
"entities": {

packages/normalizr/src/__tests__/index.test.js

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -216,6 +216,20 @@ describe('normalize', () => {
216216
expect(normalize(input, User)).toMatchSnapshot();
217217
});
218218

219+
test('normalizes entities with circular references that fails validation', () => {
220+
class User extends IDEntity {
221+
static validate(processedEntity) {
222+
return 'this always fails';
223+
}
224+
}
225+
User.schema = { friends: [User] };
226+
227+
const input = { id: '123', friends: [] };
228+
input.friends.push(input);
229+
230+
expect(() => normalize(input, User)).toThrowErrorMatchingSnapshot();
231+
});
232+
219233
test('normalizes nested entities', () => {
220234
class User extends IDEntity {}
221235
class Comment extends IDEntity {

packages/normalizr/src/normalize.ts

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -59,9 +59,9 @@ const addEntities =
5959
(
6060
newEntities: Record<string, any>,
6161
newIndexes: Record<string, any>,
62-
storeEntities: Record<string, any>,
63-
storeIndexes: Record<string, any>,
64-
storeEntityMeta: {
62+
entitiesCopy: Record<string, any>,
63+
indexesCopy: Record<string, any>,
64+
entityMetaCopy: {
6565
[entityKey: string]: {
6666
[pk: string]: {
6767
date: number;
@@ -78,8 +78,8 @@ const addEntities =
7878
if (!(schemaKey in newEntities)) {
7979
newEntities[schemaKey] = {};
8080
// we will be editing these, so we need to clone them first
81-
storeEntities[schemaKey] = { ...storeEntities[schemaKey] };
82-
storeEntityMeta[schemaKey] = { ...storeEntityMeta[schemaKey] };
81+
entitiesCopy[schemaKey] = { ...entitiesCopy[schemaKey] };
82+
entityMetaCopy[schemaKey] = { ...entityMetaCopy[schemaKey] };
8383
}
8484

8585
const existingEntity = newEntities[schemaKey][id];
@@ -89,49 +89,49 @@ const addEntities =
8989
processedEntity,
9090
);
9191
} else {
92-
const inStoreEntity = storeEntities[schemaKey][id];
92+
const inStoreEntity = entitiesCopy[schemaKey][id];
9393
let inStoreMeta: {
9494
date: number;
9595
expiresAt: number;
9696
fetchedAt: number;
9797
};
9898
// this case we already have this entity in store
99-
if (inStoreEntity && (inStoreMeta = storeEntityMeta[schemaKey][id])) {
99+
if (inStoreEntity && (inStoreMeta = entityMetaCopy[schemaKey][id])) {
100100
newEntities[schemaKey][id] = schema.mergeWithStore(
101101
inStoreMeta,
102102
actionMeta,
103103
inStoreEntity,
104104
processedEntity,
105105
);
106-
storeEntityMeta[schemaKey][id] = schema.mergeMetaWithStore(
106+
entityMetaCopy[schemaKey][id] = schema.mergeMetaWithStore(
107107
inStoreMeta,
108108
actionMeta,
109109
inStoreEntity,
110110
processedEntity,
111111
);
112112
} else {
113113
newEntities[schemaKey][id] = processedEntity;
114-
storeEntityMeta[schemaKey][id] = actionMeta;
114+
entityMetaCopy[schemaKey][id] = actionMeta;
115115
}
116116
}
117117

118118
// update index
119119
if (schema.indexes) {
120120
if (!(schemaKey in newIndexes)) {
121121
newIndexes[schemaKey] = {};
122-
storeIndexes[schemaKey] = { ...storeIndexes[schemaKey] };
122+
indexesCopy[schemaKey] = { ...indexesCopy[schemaKey] };
123123
}
124124
handleIndexes(
125125
id,
126126
schema.indexes,
127127
newIndexes[schemaKey],
128-
storeIndexes[schemaKey],
128+
indexesCopy[schemaKey],
129129
newEntities[schemaKey][id],
130-
storeEntities[schemaKey],
130+
entitiesCopy[schemaKey],
131131
);
132132
}
133133
// set this after index updates so we know what indexes to remove from
134-
storeEntities[schemaKey][id] = newEntities[schemaKey][id];
134+
entitiesCopy[schemaKey][id] = newEntities[schemaKey][id];
135135
};
136136

137137
function handleIndexes(

0 commit comments

Comments
 (0)