Skip to content

Commit bdab8c4

Browse files
committed
fix: updatePartInstancesSegmentIds: take into account when multiple segments have been merged into one.
Also ensure that the bulkWrite uses a db index
1 parent 63a1d17 commit bdab8c4

File tree

1 file changed

+61
-27
lines changed

1 file changed

+61
-27
lines changed

packages/job-worker/src/ingest/commit.ts

Lines changed: 61 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@ import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part'
4343
import { DatabasePersistedModel } from '../modelBase'
4444
import { updateSegmentIdsForAdlibbedPartInstances } from './commit/updateSegmentIdsForAdlibbedPartInstances'
4545
import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError'
46+
import { AnyBulkWriteOperation } from 'mongodb'
4647

4748
export type BeforePartMapItem = { id: PartId; rank: number }
4849
export type BeforeIngestOperationPartMap = ReadonlyMap<SegmentId, Array<BeforePartMapItem>>
@@ -301,26 +302,32 @@ async function updatePartInstancesSegmentIds(
301302
renamedSegments: ReadonlyMap<SegmentId, SegmentId> | null,
302303
beforePartMap: BeforeIngestOperationPartMap
303304
) {
304-
// A set of rules which can be translated to mongo queries for PartInstances to update
305+
/**
306+
* Maps new SegmentId ->
307+
* A set of rules which can be translated to mongo queries for PartInstances to update
308+
*/
305309
const renameRules = new Map<
306310
SegmentId,
307311
{
312+
/** Parts that have been moved to the new SegmentId */
308313
partIds: PartId[]
309-
fromSegmentId: SegmentId | null
314+
/** Segments that have been renamed to the new SegmentId */
315+
fromSegmentIds: SegmentId[]
310316
}
311317
>()
312318

313319
// Add whole segment renames to the set of rules
314320
if (renamedSegments) {
315321
for (const [fromSegmentId, toSegmentId] of renamedSegments) {
316-
const rule = renameRules.get(toSegmentId) ?? { partIds: [], fromSegmentId: null }
322+
const rule = renameRules.get(toSegmentId) ?? { partIds: [], fromSegmentIds: [] }
317323
renameRules.set(toSegmentId, rule)
318324

319-
rule.fromSegmentId = fromSegmentId
325+
rule.fromSegmentIds.push(fromSegmentId)
320326
}
321327
}
322328

323-
// Reverse the structure
329+
// Reverse the Map structure
330+
/** Maps Part -> SegmentId-of-the-part-before-ingest-changes */
324331
const beforePartSegmentIdMap = new Map<PartId, SegmentId>()
325332
for (const [segmentId, partItems] of beforePartMap.entries()) {
326333
for (const partItem of partItems) {
@@ -331,8 +338,11 @@ async function updatePartInstancesSegmentIds(
331338
// Some parts may have gotten a different segmentId to the base rule, so track those seperately in the rules
332339
for (const partModel of ingestModel.getAllOrderedParts()) {
333340
const oldSegmentId = beforePartSegmentIdMap.get(partModel.part._id)
341+
334342
if (oldSegmentId && oldSegmentId !== partModel.part.segmentId) {
335-
const rule = renameRules.get(partModel.part.segmentId) ?? { partIds: [], fromSegmentId: null }
343+
// The part has moved to another segment, add a rule to update its corresponding PartInstances:
344+
345+
const rule = renameRules.get(partModel.part.segmentId) ?? { partIds: [], fromSegmentIds: [] }
336346
renameRules.set(partModel.part.segmentId, rule)
337347

338348
rule.partIds.push(partModel.part._id)
@@ -341,30 +351,52 @@ async function updatePartInstancesSegmentIds(
341351

342352
// Perform a mongo update to modify the PartInstances
343353
if (renameRules.size > 0) {
344-
await context.directCollections.PartInstances.bulkWrite(
345-
Array.from(renameRules.entries()).map(([newSegmentId, rule]) => ({
346-
updateMany: {
347-
filter: {
348-
$or: _.compact([
349-
rule.fromSegmentId
350-
? {
351-
segmentId: rule.fromSegmentId,
352-
}
353-
: undefined,
354-
{
355-
'part._id': { $in: rule.partIds },
354+
const rulesInOrder = Array.from(renameRules.entries()).sort((a, b) => {
355+
// Ensure that the ones with partIds are processed last,
356+
// as that should take precedence.
357+
358+
if (a[1].partIds.length && !b[1].partIds.length) return 1
359+
if (!a[1].partIds.length && b[1].partIds.length) return -1
360+
return 0
361+
})
362+
363+
const writeOps: AnyBulkWriteOperation<DBPartInstance>[] = []
364+
365+
for (const [newSegmentId, rule] of rulesInOrder) {
366+
if (rule.fromSegmentIds.length) {
367+
writeOps.push({
368+
updateMany: {
369+
filter: {
370+
rundownId: ingestModel.rundownId,
371+
segmentId: { $in: rule.fromSegmentIds },
372+
},
373+
update: {
374+
$set: {
375+
segmentId: newSegmentId,
376+
'part.segmentId': newSegmentId,
356377
},
357-
]),
378+
},
358379
},
359-
update: {
360-
$set: {
361-
segmentId: newSegmentId,
362-
'part.segmentId': newSegmentId,
380+
})
381+
}
382+
if (rule.partIds.length) {
383+
writeOps.push({
384+
updateMany: {
385+
filter: {
386+
rundownId: ingestModel.rundownId,
387+
'part._id': { $in: rule.partIds },
388+
},
389+
update: {
390+
$set: {
391+
segmentId: newSegmentId,
392+
'part.segmentId': newSegmentId,
393+
},
363394
},
364395
},
365-
},
366-
}))
367-
)
396+
})
397+
}
398+
}
399+
if (writeOps.length) await context.directCollections.PartInstances.bulkWrite(writeOps)
368400
}
369401
}
370402

@@ -691,8 +723,10 @@ async function removeSegments(
691723
for (const segment of ingestModel.getAllSegments()) {
692724
const segmentId = segment.segment._id
693725
if (segment.segment.isHidden) {
726+
// Blueprints want to hide the Segment
727+
694728
if (!canRemoveSegment(previousPartInstance, currentPartInstance, nextPartInstance, segmentId)) {
695-
// Protect live segment from being hidden
729+
// The Segment is live, so we need to protect it from being hidden
696730
logger.warn(`Cannot hide live segment ${segmentId}, it will be orphaned`)
697731
switch (segment.segment.orphaned) {
698732
case SegmentOrphanedReason.DELETED:

0 commit comments

Comments
 (0)