Skip to content

Commit ff2473f

Browse files
committed
still needs a cleanup step
but now partial snapshots work...
1 parent b7083a4 commit ff2473f

File tree

1 file changed

+96
-55
lines changed

1 file changed

+96
-55
lines changed

packages/core/src/world/world.ts

Lines changed: 96 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -371,22 +371,14 @@ export function createWorld(
371371
const entityMasks = ctx.entityMasks.map((gen) => [...gen]);
372372
const traitData: WorldSnapshot['traitData'] = [];
373373
let traits = [] as (TraitInstance | undefined)[]
374+
let entityIdTraits = null as number[] | null
374375

375376
if (args.length === 1 && args[0] === '*') {
376377
traits = allInstances;
377378
} else {
378379
world.query(...args).useTraitInstances((traitInstances, entities)=>{
379-
// if we want to exclude non entity matches
380-
/// (for better query semantics)
381-
//// should we remap ids?
382-
///// does load() destroy everything previous?
383-
////// can you do a partial snapshot?
384-
// in this example you also get the full store back
385-
/// TraitInstance > Store but same idea
386-
/// but looping over the matching entities is what constrains it
387-
//// to the matching entities (query semantics)
388-
//// https://github.com/pmndrs/koota?tab=readme-ov-file#modifying-trait-stores-directly
389380
traits = traitInstances;
381+
entityIdTraits = entities.map((entity)=>entity.id());
390382
})
391383
}
392384

@@ -402,66 +394,97 @@ export function createWorld(
402394
const schema = trait.schema as Record<string, any>;
403395
const serializers = traitCtx.options?.serialize as Record<string, Function> | undefined;
404396

405-
const data = Object.keys(schema).map((key) => {
397+
const soaData = Object.keys(schema).map((key) => {
406398
let dataArray = (store as Record<string, unknown[]>)[key].slice(
407399
0,
408400
entityIndex.maxId
409401
);
410-
411-
// Check for per-column serializer
412402
const serializer = serializers?.[key];
413-
if (serializer) {
414-
dataArray = dataArray.map((v) => (v === undefined || v === null) ? v : serializer(v));
403+
if (serializer || entityIdTraits) {
404+
for (let i = 0; i < dataArray.length; i++){
405+
const data = dataArray[i]
406+
if(data === undefined || data === null){
407+
continue;
408+
}
409+
if(entityIdTraits){
410+
if(!entityIdTraits.includes(i)){
411+
dataArray[i] = null
412+
continue;
413+
}
414+
}
415+
if(serializer){
416+
dataArray[i] = serializer(data)
417+
continue;
418+
}
419+
}
420+
}
421+
if(typeof schema[key] === 'function'){
422+
return JSON.stringify(dataArray)
415423
}
416-
417424
return dataArray;
418425
});
419-
traitData.push({ id: trait.id, type: 'soa', data });
426+
traitData.push({ id: trait.id, type: 'soa', data: soaData });
420427
} else if (type === 'aos') {
421-
let data = (store as any[]).slice(0, entityIndex.maxId);
428+
let aosData = (store as any[]).slice(0, entityIndex.maxId);
422429

423430
const serializer = traitCtx.options?.serialize as Function | undefined;
424431
// Check for custom serializer
425-
if (serializer) {
426-
data = data.map((v) => v ? serializer(v) : v);
432+
if (serializer || entityIdTraits) {
433+
for (let i = 0; i < aosData.length; i++){
434+
const data = aosData[i]
435+
if(data === undefined || data === null){
436+
continue;
437+
}
438+
if(entityIdTraits){
439+
if(!entityIdTraits.includes(i)){
440+
aosData[i] = null
441+
continue;
442+
}
443+
}
444+
if(serializer){
445+
aosData[i] = serializer(data)
446+
continue;
447+
}
448+
}
427449
}
428450

429451
traitData.push({
430452
id: trait.id,
431453
type: 'aos',
432-
data: JSON.stringify(data),
454+
data: JSON.stringify(aosData),
433455
});
434456
}
435457
});
436-
437-
// Gather Relation Topology
438458
const relations: WorldSnapshot['relations'] = [];
439-
traits.forEach((instance) => {
440-
if (!instance || !instance.relationTargets) return;
441-
const { trait, relationTargets } = instance;
442-
const traitCtx = trait[$internal];
443-
const relation = traitCtx.relation;
444-
if (!relation) return;
445-
446-
const isExclusive = relation[$internal].exclusive;
447-
if (isExclusive) {
448-
// Exclusive: Array of Entity IDs (or undefined)
449-
const data = (relationTargets as (number | undefined)[]).slice(
450-
0,
451-
entityIndex.maxId
452-
);
453-
relations.push({ id: trait.id, type: 'exclusive', data });
454-
} else {
455-
// Non-Exclusive: Array of Array of Entity IDs
456-
// We use JSON here to handle the nested variable-length arrays easily
457-
const rawData = (relationTargets as number[][]).slice(0, entityIndex.maxId);
458-
relations.push({
459-
id: trait.id,
460-
type: 'relation',
461-
data: JSON.stringify(rawData),
462-
});
463-
}
464-
});
459+
if (args.length === 1 && args[0] === '*') {
460+
// Gather Relation Topology
461+
traits.forEach((instance) => {
462+
if (!instance || !instance.relationTargets) return;
463+
const { trait, relationTargets } = instance;
464+
const traitCtx = trait[$internal];
465+
const relation = traitCtx.relation;
466+
if (!relation) return;
467+
468+
const isExclusive = relation[$internal].exclusive;
469+
if (isExclusive) {
470+
// Exclusive: Array of Entity IDs (or undefined)
471+
const data = (relationTargets as (number | undefined)[]).slice(
472+
0,
473+
entityIndex.maxId
474+
);
475+
relations.push({ id: trait.id, type: 'exclusive', data });
476+
} else {
477+
// Non-Exclusive: Array of Array of Entity IDs
478+
// We use JSON here to handle the nested variable-length arrays easily
479+
const rawData = (relationTargets as number[][]).slice(0, entityIndex.maxId);
480+
relations.push({
481+
id: trait.id,
482+
type: 'relation',
483+
data: JSON.stringify(rawData),
484+
});
485+
}
486+
});
487+
}
465488

466489
return {
467490
worldId: id,
@@ -527,27 +550,45 @@ export function createWorld(
527550

528551
if (dataArray !== undefined) {
529552
const deserializer = deserializers?.[key];
553+
if(typeof val === 'function'){
554+
dataArray = JSON.parse(dataArray)
555+
}
556+
const currentStore = store[key];
530557
if (deserializer) {
531558
// For hydration, we retrieve the current value from the store to pass to deserialize
532-
const currentStore = store[key];
533559
dataArray = dataArray.map((v: any, idx: number) =>
534-
(v === undefined || v === null) ? v : deserializer(v, currentStore[idx])
560+
// support partial snapshot
561+
(v === undefined || v === null) ? currentStore[idx] ?? v : deserializer(v, currentStore[idx])
562+
);
563+
}
564+
else{
565+
dataArray = dataArray.map((v: any, idx: number) =>
566+
// support partial snapshot
567+
(v === undefined || v === null) ? currentStore[idx] ?? v : v
535568
);
536569
}
537570
store[key] = dataArray;
538571
}
539572
}
540573
} else {
541574
const store = instance.store as any[];
542-
let deserialized = JSON.parse(t.data);
575+
const parsedDataArray = JSON.parse(t.data);
543576
const deserializer = traitCtx.options?.deserialize as Function | undefined;
544-
577+
let deserialized = []
578+
545579
if (deserializer) {
546-
const newStore = deserialized.map((v: any, idx: number) =>
547-
v ? deserializer(v, store[idx]) : v
580+
const newStore = parsedDataArray.map((v: any, idx: number) =>
581+
// support partial snapshot
582+
v ? deserializer(v, store[idx]) : store[idx] ?? v
548583
);
549584
deserialized = newStore;
550585
}
586+
else{
587+
deserialized = parsedDataArray.map((v: any, idx: number) =>
588+
// support partial snapshot
589+
(v === undefined || v === null) ? store[idx] ?? v : v
590+
);
591+
}
551592
store.length = 0;
552593
store.push(...deserialized);
553594
}

0 commit comments

Comments
 (0)