diff --git a/.github/workflows/node.yaml b/.github/workflows/node.yaml index c067d58149..ae2351eba0 100644 --- a/.github/workflows/node.yaml +++ b/.github/workflows/node.yaml @@ -502,16 +502,19 @@ jobs: - node-version: 22.x package-name: job-worker send-coverage: true - # No tests for the gateways yet + # No tests for some gateways yet # - node-version: 22.x # package-name: playout-gateway - # - node-version: 22.x - # package-name: mos-gateway + # send-coverage: true + - node-version: 22.x + package-name: mos-gateway + send-coverage: true - node-version: 22.x package-name: live-status-gateway send-coverage: true - node-version: 22.x package-name: webui + send-coverage: true # manual meteor-lib as it only needs a couple of versions - node-version: 22.x package-name: meteor-lib diff --git a/meteor/__mocks__/helpers/database.ts b/meteor/__mocks__/helpers/database.ts index e19ea399e3..8ac29117ec 100644 --- a/meteor/__mocks__/helpers/database.ts +++ b/meteor/__mocks__/helpers/database.ts @@ -476,6 +476,7 @@ export async function setupMockShowStyleBlueprint( rundown, globalAdLibPieces: [], globalActions: [], + globalPieces: [], baseline: { timelineObjects: [] }, } }, diff --git a/meteor/server/api/__tests__/cleanup.test.ts b/meteor/server/api/__tests__/cleanup.test.ts index 65dbda5949..c69a4928b2 100644 --- a/meteor/server/api/__tests__/cleanup.test.ts +++ b/meteor/server/api/__tests__/cleanup.test.ts @@ -193,7 +193,7 @@ async function setDefaultDatatoDB(env: DefaultEnvironment, now: number) { startSegmentId: segmentId, timelineObjectsString: '' as any, } - const pieceId = await Pieces.mutableCollection.insertAsync(piece) + await Pieces.mutableCollection.insertAsync(piece) await AdLibActions.mutableCollection.insertAsync({ _id: getRandomId(), @@ -256,22 +256,12 @@ async function setDefaultDatatoDB(env: DefaultEnvironment, now: number) { }) const packageId = await ExpectedPackages.mutableCollection.insertAsync({ _id: getRandomId(), - blueprintPackageId: '', - // @ts-expect-error bucketId is not a part of all ExpectedPackageDBs - bucketId, - content: {} as any, - contentVersionHash: '', - created: 0, - fromPieceType: '' as any, - layers: [], - pieceId, - rundownId, - segmentId, - sideEffect: {} as any, studioId, - sources: {} as any, - type: '' as any, - version: {} as any, + rundownId, + bucketId: null, + created: 0, + package: {} as any, + ingestSources: [] as any, }) await ExpectedPackageWorkStatuses.insertAsync({ _id: getRandomId(), diff --git a/meteor/server/api/deviceTriggers/TagsService.ts b/meteor/server/api/deviceTriggers/TagsService.ts index 6983c3d888..0ec3e53df7 100644 --- a/meteor/server/api/deviceTriggers/TagsService.ts +++ b/meteor/server/api/deviceTriggers/TagsService.ts @@ -4,12 +4,14 @@ import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceIns import { PieceInstanceFields, ContentCache } from './reactiveContentCacheForPieceInstances' import { SourceLayers } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' import { + createPartCurrentTimes, PieceInstanceWithTimings, processAndPrunePieceInstanceTimings, } from '@sofie-automation/corelib/dist/playout/processAndPrune' import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' import { IWrappedAdLib } from '@sofie-automation/meteor-lib/dist/triggers/actionFilterChainCompilers' import { areSetsEqual, doSetsIntersect } from '@sofie-automation/corelib/dist/lib' +import { getCurrentTime } from '../../lib/lib' export class TagsService { protected onAirPiecesTags: Set = new Set() @@ -130,12 +132,11 @@ export class TagsService { ): PieceInstanceWithTimings[] { // Approximate when 'now' is in the PartInstance, so that any adlibbed Pieces will be timed roughly correctly const partStarted = partInstanceTimings?.plannedStartedPlayback - const nowInPart = partStarted === undefined ? 0 : Date.now() - partStarted return processAndPrunePieceInstanceTimings( sourceLayers, pieceInstances as PieceInstance[], - nowInPart, + createPartCurrentTimes(getCurrentTime(), partStarted), false, false ) diff --git a/meteor/server/api/ingest/debug.ts b/meteor/server/api/ingest/debug.ts index 4645a26450..160d62070b 100644 --- a/meteor/server/api/ingest/debug.ts +++ b/meteor/server/api/ingest/debug.ts @@ -8,7 +8,6 @@ import { QueueStudioJob } from '../../worker/worker' import { StudioJobs } from '@sofie-automation/corelib/dist/worker/studio' import { RundownPlaylistId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { MeteorDebugMethods } from '../../methods' -import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' MeteorDebugMethods({ /** @@ -47,31 +46,4 @@ MeteorDebugMethods({ segmentExternalId: segment.externalId, }) }, - /** - * Regenerate all the expected packages for all rundowns in the system. - * Additionally it will recreate any expectedMediaItems and expectedPlayoutItems. - * This shouldn't be necessary as ingest will do this for each rundown as part of its workflow - */ - debug_recreateExpectedPackages: async () => { - const rundowns = (await Rundowns.findFetchAsync( - {}, - { - projection: { - _id: 1, - studioId: 1, - source: 1, - }, - } - )) as Array> - - await Promise.all( - rundowns - .filter((rundown) => rundown.source.type !== 'snapshot') - .map(async (rundown) => - runIngestOperation(rundown.studioId, IngestJobs.ExpectedPackagesRegenerate, { - rundownId: rundown._id, - }) - ) - ) - }, }) diff --git a/meteor/server/api/ingest/packageInfo.ts b/meteor/server/api/ingest/packageInfo.ts index d3d52ccaf6..6ad883897e 100644 --- a/meteor/server/api/ingest/packageInfo.ts +++ b/meteor/server/api/ingest/packageInfo.ts @@ -1,10 +1,7 @@ import { - ExpectedPackageDBFromBucketAdLib, - ExpectedPackageDBFromBucketAdLibAction, - ExpectedPackageDBFromStudioBaselineObjects, ExpectedPackageDBType, - ExpectedPackageFromRundown, - ExpectedPackageFromRundownBaseline, + ExpectedPackageDB, + ExpectedPackageIngestSourceBucket, } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { PackageInfoDB } from '@sofie-automation/corelib/dist/dataModel/PackageInfos' import { ExpectedPackages, Rundowns } from '../../collections' @@ -28,51 +25,59 @@ export async function onUpdatedPackageInfo(packageId: ExpectedPackageId, _doc: P return } - if (pkg.listenToPackageInfoUpdates) { - switch (pkg.fromPieceType) { - case ExpectedPackageDBType.PIECE: - case ExpectedPackageDBType.ADLIB_PIECE: - case ExpectedPackageDBType.ADLIB_ACTION: - case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: - case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: - case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: - onUpdatedPackageInfoForRundownDebounce(pkg) - break - case ExpectedPackageDBType.BUCKET_ADLIB: - case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: - onUpdatedPackageInfoForBucketItemDebounce(pkg) - break - case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: - onUpdatedPackageInfoForStudioBaselineDebounce(pkg) - break - default: - assertNever(pkg) - break + if (pkg.package.listenToPackageInfoUpdates) { + for (const source of pkg.ingestSources) { + switch (source.fromPieceType) { + case ExpectedPackageDBType.PIECE: + case ExpectedPackageDBType.ADLIB_PIECE: + case ExpectedPackageDBType.ADLIB_ACTION: + case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: + case ExpectedPackageDBType.BASELINE_PIECE: + case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: + onUpdatedPackageInfoForRundownDebounce(pkg) + break + case ExpectedPackageDBType.BUCKET_ADLIB: + case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + onUpdatedPackageInfoForBucketItemDebounce(pkg, source) + break + case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: + onUpdatedPackageInfoForStudioBaselineDebounce(pkg) + break + default: + assertNever(source) + break + } } } } const pendingRundownPackageUpdates = new Map>() -function onUpdatedPackageInfoForRundownDebounce(pkg: ExpectedPackageFromRundown | ExpectedPackageFromRundownBaseline) { - const existingEntry = pendingRundownPackageUpdates.get(pkg.rundownId) +function onUpdatedPackageInfoForRundownDebounce(pkg: ExpectedPackageDB) { + if (!pkg.rundownId) { + logger.error(`Updating ExpectedPackage "${pkg._id}" for Rundown "${pkg.rundownId}" not possible`) + return + } + + const rundownId = pkg.rundownId + + const existingEntry = pendingRundownPackageUpdates.get(rundownId) if (existingEntry) { // already queued, add to the batch existingEntry.push(pkg._id) } else { - pendingRundownPackageUpdates.set(pkg.rundownId, [pkg._id]) + pendingRundownPackageUpdates.set(rundownId, [pkg._id]) } // TODO: Scaling - this won't batch correctly if package manager directs calls to multiple instances lazyIgnore( - `onUpdatedPackageInfoForRundown_${pkg.rundownId}`, + `onUpdatedPackageInfoForRundown_${rundownId}`, () => { - const packageIds = pendingRundownPackageUpdates.get(pkg.rundownId) + const packageIds = pendingRundownPackageUpdates.get(rundownId) if (packageIds) { - pendingRundownPackageUpdates.delete(pkg.rundownId) - onUpdatedPackageInfoForRundown(pkg.rundownId, packageIds).catch((e) => { - logger.error( - `Updating ExpectedPackages for Rundown "${pkg.rundownId}" failed: ${stringifyError(e)}` - ) + pendingRundownPackageUpdates.delete(rundownId) + onUpdatedPackageInfoForRundown(rundownId, packageIds).catch((e) => { + logger.error(`Updating ExpectedPackages for Rundown "${rundownId}" failed: ${stringifyError(e)}`) }) } }, @@ -107,19 +112,24 @@ async function onUpdatedPackageInfoForRundown( }) } -function onUpdatedPackageInfoForBucketItemDebounce( - pkg: ExpectedPackageDBFromBucketAdLib | ExpectedPackageDBFromBucketAdLibAction -) { +function onUpdatedPackageInfoForBucketItemDebounce(pkg: ExpectedPackageDB, source: ExpectedPackageIngestSourceBucket) { + if (!pkg.bucketId) { + logger.error(`Updating ExpectedPackage "${pkg._id}" for Bucket "${pkg.bucketId}" not possible`) + return + } + + const bucketId = pkg.bucketId + lazyIgnore( - `onUpdatedPackageInfoForBucket_${pkg.studioId}_${pkg.bucketId}_${pkg.pieceExternalId}`, + `onUpdatedPackageInfoForBucket_${pkg.studioId}_${bucketId}_${source.pieceExternalId}`, () => { runIngestOperation(pkg.studioId, IngestJobs.BucketItemRegenerate, { - bucketId: pkg.bucketId, - externalId: pkg.pieceExternalId, + bucketId: bucketId, + externalId: source.pieceExternalId, }).catch((err) => { logger.error( - `Updating ExpectedPackages for Bucket "${pkg.bucketId}" Item "${ - pkg.pieceExternalId + `Updating ExpectedPackages for Bucket "${bucketId}" Item "${ + source.pieceExternalId }" failed: ${stringifyError(err)}` ) }) @@ -128,7 +138,7 @@ function onUpdatedPackageInfoForBucketItemDebounce( ) } -function onUpdatedPackageInfoForStudioBaselineDebounce(pkg: ExpectedPackageDBFromStudioBaselineObjects) { +function onUpdatedPackageInfoForStudioBaselineDebounce(pkg: ExpectedPackageDB) { lazyIgnore( `onUpdatedPackageInfoForStudioBaseline_${pkg.studioId}`, () => { diff --git a/meteor/server/api/integration/expectedPackages.ts b/meteor/server/api/integration/expectedPackages.ts index 89861c55c0..619b619dec 100644 --- a/meteor/server/api/integration/expectedPackages.ts +++ b/meteor/server/api/integration/expectedPackages.ts @@ -33,6 +33,7 @@ import { } from '../../collections' import { logger } from '../../logging' import _ from 'underscore' +import { ExpectedPackageDB } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' export namespace PackageManagerIntegration { export async function updateExpectedPackageWorkStatuses( @@ -98,9 +99,17 @@ export namespace PackageManagerIntegration { const fromPackageIds = workStatus.fromPackages.map((p) => p.id) if (fromPackageIds.length) { ps.push( - ExpectedPackages.findOneAsync({ - _id: { $in: fromPackageIds }, - }).then((expPackage) => { + ExpectedPackages.findOneAsync( + { + _id: { $in: fromPackageIds }, + }, + { + projection: { + _id: 1, + studioId: 1, + }, + } + ).then((expPackage: Pick | undefined) => { if (!expPackage) throw new Meteor.Error(404, `ExpectedPackages "${fromPackageIds}" not found`) diff --git a/meteor/server/api/rest/v1/typeConversion.ts b/meteor/server/api/rest/v1/typeConversion.ts index d052ac3a8c..ada882e44c 100644 --- a/meteor/server/api/rest/v1/typeConversion.ts +++ b/meteor/server/api/rest/v1/typeConversion.ts @@ -396,6 +396,7 @@ export function studioSettingsFrom(apiStudioSettings: APIStudioSettings): Comple enableBuckets: apiStudioSettings.enableBuckets ?? true, // Backwards compatible enableEvaluationForm: apiStudioSettings.enableEvaluationForm ?? true, // Backwards compatible mockPieceContentStatus: apiStudioSettings.mockPieceContentStatus, + rundownGlobalPiecesPrepareTime: apiStudioSettings.rundownGlobalPiecesPrepareTime, } } @@ -423,6 +424,7 @@ export function APIStudioSettingsFrom(settings: IStudioSettings): Complete | DBInterface['_id'], callbacks: PromisifyCallbacks>, - options?: Omit, 'fields'> + findOptions?: Omit, 'fields'>, + callbackOptions?: { nonMutatingCallbacks?: boolean | undefined } ): Promise /** diff --git a/meteor/server/collections/implementations/asyncCollection.ts b/meteor/server/collections/implementations/asyncCollection.ts index 52bb47eca6..7a4349c26a 100644 --- a/meteor/server/collections/implementations/asyncCollection.ts +++ b/meteor/server/collections/implementations/asyncCollection.ts @@ -141,7 +141,8 @@ export class WrappedAsyncMongoCollection | DBInterface['_id'], callbacks: PromisifyCallbacks>, - options?: FindOptions + findOptions?: FindOptions, + callbackOptions?: { nonMutatingCallbacks?: boolean | undefined } ): Promise { const span = profiler.startSpan(`MongoCollection.${this.name}.observeChanges`) if (span) { @@ -152,8 +153,8 @@ export class WrappedAsyncMongoCollection( diff --git a/meteor/server/lib/rest/v1/studios.ts b/meteor/server/lib/rest/v1/studios.ts index 54c6208e9a..12ee848adc 100644 --- a/meteor/server/lib/rest/v1/studios.ts +++ b/meteor/server/lib/rest/v1/studios.ts @@ -224,4 +224,5 @@ export interface APIStudioSettings { enableBuckets?: boolean enableEvaluationForm?: boolean mockPieceContentStatus?: boolean + rundownGlobalPiecesPrepareTime?: number } diff --git a/meteor/server/migration/1_50_0.ts b/meteor/server/migration/1_50_0.ts index 3a17fd2f82..53258ccfc9 100644 --- a/meteor/server/migration/1_50_0.ts +++ b/meteor/server/migration/1_50_0.ts @@ -33,7 +33,6 @@ import { JSONBlobStringify, JSONSchema, TSR } from '@sofie-automation/blueprints import { DEFAULT_MINIMUM_TAKE_SPAN } from '@sofie-automation/shared-lib/dist/core/constants' import { PartId } from '@sofie-automation/shared-lib/dist/core/model/Ids' import { protectString } from '@sofie-automation/shared-lib/dist/lib/protectedString' -import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { AdLibActionId, BucketAdLibActionId, @@ -44,6 +43,7 @@ import { import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' +import * as PackagesPreR53 from '@sofie-automation/corelib/dist/dataModel/Old/ExpectedPackagesR52' // Release 50 @@ -161,9 +161,9 @@ const oldDeviceTypeToNewMapping = { } const EXPECTED_PACKAGE_TYPES_ADDED_PART_ID = [ - ExpectedPackageDBType.PIECE, - ExpectedPackageDBType.ADLIB_PIECE, - ExpectedPackageDBType.ADLIB_ACTION, + PackagesPreR53.ExpectedPackageDBType.PIECE, + PackagesPreR53.ExpectedPackageDBType.ADLIB_PIECE, + PackagesPreR53.ExpectedPackageDBType.ADLIB_ACTION, ] export const addSteps = addMigrationSteps('1.50.0', [ @@ -877,10 +877,10 @@ export const addSteps = addMigrationSteps('1.50.0', [ return false }, migrate: async () => { - const objects = await ExpectedPackages.findFetchAsync({ + const objects = (await ExpectedPackages.findFetchAsync({ fromPieceType: { $in: EXPECTED_PACKAGE_TYPES_ADDED_PART_ID as any }, // Force the types, as the query does not match due to the interfaces partId: { $exists: false }, - }) + })) as unknown as Array const neededPieceIds: Array< PieceId | AdLibActionId | RundownBaselineAdLibActionId | BucketAdLibId | BucketAdLibActionId @@ -926,7 +926,7 @@ export const addSteps = addMigrationSteps('1.50.0', [ PartId >() for (const piece of pieces) { - partIdLookup.set(piece._id, piece.startPartId) + if (piece.startPartId) partIdLookup.set(piece._id, piece.startPartId) } for (const adlib of adlibPieces) { if (adlib.partId) partIdLookup.set(adlib._id, adlib.partId) diff --git a/meteor/server/migration/X_X_X.ts b/meteor/server/migration/X_X_X.ts index eb315e9ec4..9197b1a79b 100644 --- a/meteor/server/migration/X_X_X.ts +++ b/meteor/server/migration/X_X_X.ts @@ -1,5 +1,13 @@ import { addMigrationSteps } from './databaseMigration' import { CURRENT_SYSTEM_VERSION } from './currentSystemVersion' +import { ExpectedPackages } from '../collections' +import * as PackagesPreR53 from '@sofie-automation/corelib/dist/dataModel/Old/ExpectedPackagesR52' +import { + ExpectedPackageDB, + ExpectedPackageIngestSource, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { assertNever, Complete } from '../lib/tempLib' +import { BucketId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' /* * ************************************************************************************** @@ -13,4 +21,114 @@ import { CURRENT_SYSTEM_VERSION } from './currentSystemVersion' export const addSteps = addMigrationSteps(CURRENT_SYSTEM_VERSION, [ // Add your migration here + + { + id: `convert ExpectedPackages to new format`, + canBeRunAutomatically: true, + validate: async () => { + const packages = await ExpectedPackages.findFetchAsync({ + fromPieceType: { $exists: true }, + }) + + if (packages.length > 0) { + return 'ExpectedPackages must be converted to new format' + } + + return false + }, + migrate: async () => { + const packages = (await ExpectedPackages.findFetchAsync({ + fromPieceType: { $exists: true }, + })) as unknown as PackagesPreR53.ExpectedPackageDB[] + + for (const pkg of packages) { + let rundownId: RundownId | null = null + let bucketId: BucketId | null = null + let ingestSource: ExpectedPackageIngestSource | undefined + + switch (pkg.fromPieceType) { + case PackagesPreR53.ExpectedPackageDBType.PIECE: + case PackagesPreR53.ExpectedPackageDBType.ADLIB_PIECE: + rundownId = pkg.rundownId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + partId: pkg.partId, + segmentId: pkg.segmentId, + } + break + case PackagesPreR53.ExpectedPackageDBType.ADLIB_ACTION: + rundownId = pkg.rundownId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + partId: pkg.partId, + segmentId: pkg.segmentId, + } + break + case PackagesPreR53.ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + rundownId = pkg.rundownId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + } + break + case PackagesPreR53.ExpectedPackageDBType.BASELINE_ADLIB_ACTION: + rundownId = pkg.rundownId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + } + break + case PackagesPreR53.ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: + rundownId = pkg.rundownId + ingestSource = { + fromPieceType: pkg.fromPieceType, + } + break + case PackagesPreR53.ExpectedPackageDBType.BUCKET_ADLIB: + bucketId = pkg.bucketId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + pieceExternalId: pkg.pieceExternalId, + } + break + case PackagesPreR53.ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + bucketId = pkg.bucketId + ingestSource = { + fromPieceType: pkg.fromPieceType, + pieceId: pkg.pieceId, + pieceExternalId: pkg.pieceExternalId, + } + break + case PackagesPreR53.ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: + ingestSource = { + fromPieceType: pkg.fromPieceType, + } + break + default: + assertNever(pkg) + break + } + + await ExpectedPackages.mutableCollection.removeAsync(pkg._id) + + if (ingestSource) { + await ExpectedPackages.mutableCollection.insertAsync({ + _id: pkg._id, // Preserve the old id to ensure references aren't broken. This will be 'corrected' upon first ingest operation + studioId: pkg.studioId, + rundownId: rundownId, + bucketId: bucketId, + package: { + ...(pkg as any), // Some fields should be pruned off this, but this is fine + _id: pkg.blueprintPackageId, + }, + created: pkg.created, + ingestSources: [ingestSource], + } satisfies Complete) + } + } + }, + }, ]) diff --git a/meteor/server/publications/_publications.ts b/meteor/server/publications/_publications.ts index 8bcb30b0b1..64a027a279 100644 --- a/meteor/server/publications/_publications.ts +++ b/meteor/server/publications/_publications.ts @@ -3,6 +3,7 @@ import './lib/lib' import './buckets' import './blueprintUpgradeStatus/publication' +import './ingestStatus/publication' import './packageManager/expectedPackages/publication' import './packageManager/packageContainers' import './packageManager/playoutContext' diff --git a/meteor/server/publications/ingestStatus/createIngestRundownStatus.ts b/meteor/server/publications/ingestStatus/createIngestRundownStatus.ts new file mode 100644 index 0000000000..b90c14b12f --- /dev/null +++ b/meteor/server/publications/ingestStatus/createIngestRundownStatus.ts @@ -0,0 +1,191 @@ +import type { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { NrcsIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { + IngestRundownStatus, + IngestPartPlaybackStatus, + IngestRundownActiveStatus, + IngestPartStatus, + IngestPartNotifyItemReady, +} from '@sofie-automation/shared-lib/dist/ingest/rundownStatus' +import type { ReadonlyDeep } from 'type-fest' +import _ from 'underscore' +import type { ContentCache, PartCompact, PartInstanceCompact, PlaylistCompact } from './reactiveContentCache' +import { ReactiveCacheCollection } from '../lib/ReactiveCacheCollection' +import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' + +export function createIngestRundownStatus( + cache: ReadonlyDeep, + rundownId: RundownId +): IngestRundownStatus | null { + const rundown = cache.Rundowns.findOne(rundownId) + if (!rundown) return null + + const newDoc: IngestRundownStatus = { + _id: rundownId, + externalId: rundown.externalId, + + active: IngestRundownActiveStatus.INACTIVE, + + segments: [], + } + + const playlist = cache.Playlists.findOne({ + _id: rundown.playlistId, + activationId: { $exists: true }, + }) + + if (playlist) { + newDoc.active = playlist.rehearsal ? IngestRundownActiveStatus.REHEARSAL : IngestRundownActiveStatus.ACTIVE + } + + const nrcsSegments = cache.NrcsIngestData.find({ rundownId, type: NrcsIngestCacheType.SEGMENT }).fetch() + for (const nrcsSegment of nrcsSegments) { + const nrcsParts = cache.NrcsIngestData.find({ + rundownId, + segmentId: nrcsSegment.segmentId, + type: NrcsIngestCacheType.PART, + }).fetch() + + newDoc.segments.push({ + externalId: nrcsSegment.data.externalId, + parts: _.compact( + nrcsParts.map((nrcsPart) => { + if (!nrcsPart.partId || !nrcsPart.segmentId) return null + + const parts = cache.Parts.find({ + rundownId: rundownId, + $or: [ + { + externalId: nrcsPart.data.externalId, + ingestNotifyPartExternalId: { $exists: false }, + }, + { + ingestNotifyPartExternalId: nrcsPart.data.externalId, + }, + ], + }).fetch() + const partInstances = findPartInstancesForIngestPart( + playlist, + rundownId, + cache.PartInstances, + nrcsPart.data.externalId + ) + + return createIngestPartStatus(playlist, partInstances, parts, nrcsPart.data.externalId) + }) + ), + }) + } + + return newDoc +} + +function findPartInstancesForIngestPart( + playlist: PlaylistCompact | undefined, + rundownId: RundownId, + partInstancesCache: ReadonlyDeep>, + partExternalId: string +) { + const result: Record = {} + if (!playlist) return result + + const candidatePartInstances = partInstancesCache + .find({ + rundownId: rundownId, + $or: [ + { + 'part.externalId': partExternalId, + 'part.ingestNotifyPartExternalId': { $exists: false }, + }, + { + 'part.ingestNotifyPartExternalId': partExternalId, + }, + ], + }) + .fetch() + + for (const partInstance of candidatePartInstances) { + if (partInstance.rundownId !== rundownId) continue + // Ignore the next partinstance + if (partInstance._id === playlist.nextPartInfo?.partInstanceId) continue + + const partId = unprotectString(partInstance.part._id) + + // The current part instance is the most important + if (partInstance._id === playlist.currentPartInfo?.partInstanceId) { + result[partId] = partInstance + continue + } + + // Take the part with the highest takeCount + const existingEntry = result[partId] + if (!existingEntry || existingEntry.takeCount < partInstance.takeCount) { + result[partId] = partInstance + } + } + + return result +} + +function createIngestPartStatus( + playlist: PlaylistCompact | undefined, + partInstances: Record, + parts: PartCompact[], + ingestPartExternalId: string +): IngestPartStatus { + // Determine the playback status from the PartInstance + let playbackStatus = IngestPartPlaybackStatus.UNKNOWN + + let isReady: boolean | null = null // Start off as null, the first value will make this true or false + + const itemsReady: IngestPartNotifyItemReady[] = [] + + const updateStatusWithPart = (part: PartCompact) => { + // If the part affects the ready status, update it + if (typeof part.ingestNotifyPartReady === 'boolean') { + isReady = (isReady ?? true) && part.ingestNotifyPartReady + } + + // Include the items + if (part.ingestNotifyItemsReady) { + itemsReady.push(...part.ingestNotifyItemsReady) + } + } + + // Loop through the partInstances, starting off the state + if (playlist) { + for (const partInstance of Object.values(partInstances)) { + if (!partInstance) continue + + if (partInstance.part.shouldNotifyCurrentPlayingPart) { + const isCurrentPartInstance = playlist.currentPartInfo?.partInstanceId === partInstance._id + + if (isCurrentPartInstance) { + // If the current, it is playing + playbackStatus = IngestPartPlaybackStatus.PLAY + } else if (playbackStatus === IngestPartPlaybackStatus.UNKNOWN) { + // If not the current, but has been played, it is stopped + playbackStatus = IngestPartPlaybackStatus.STOP + } + } + + updateStatusWithPart(partInstance.part) + } + } + + for (const part of parts) { + // Check if the part has already been handled by a partInstance + if (partInstances[unprotectString(part._id)]) continue + + updateStatusWithPart(part) + } + + return { + externalId: ingestPartExternalId, + + isReady: isReady, + itemsReady: itemsReady, + + playbackStatus, + } +} diff --git a/meteor/server/publications/ingestStatus/publication.ts b/meteor/server/publications/ingestStatus/publication.ts new file mode 100644 index 0000000000..a28dd3844f --- /dev/null +++ b/meteor/server/publications/ingestStatus/publication.ts @@ -0,0 +1,217 @@ +import { PeripheralDeviceId, RundownId, RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { ReadonlyDeep } from 'type-fest' +import { + CustomPublish, + CustomPublishCollection, + meteorCustomPublish, + setUpCollectionOptimizedObserver, + SetupObserversResult, + TriggerUpdate, +} from '../../lib/customPublication' +import { logger } from '../../logging' +import { ContentCache, createReactiveContentCache } from './reactiveContentCache' +import { RundownsObserver } from '../lib/rundownsObserver' +import { RundownContentObserver } from './rundownContentObserver' +import { + PeripheralDevicePubSub, + PeripheralDevicePubSubCollectionsNames, +} from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' +import { checkAccessAndGetPeripheralDevice } from '../../security/check' +import { check } from '../../lib/check' +import { IngestRundownStatus } from '@sofie-automation/shared-lib/dist/ingest/rundownStatus' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { createIngestRundownStatus } from './createIngestRundownStatus' +import { assertConnectionHasOneOfPermissions } from '../../security/auth' +import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' + +interface IngestRundownStatusArgs { + readonly deviceId: PeripheralDeviceId +} + +export interface IngestRundownStatusState { + contentCache: ReadonlyDeep +} + +interface IngestRundownStatusUpdateProps { + newCache: ContentCache + + invalidateRundownIds: RundownId[] + invalidatePlaylistIds: RundownPlaylistId[] +} + +async function setupIngestRundownStatusPublicationObservers( + args: ReadonlyDeep, + triggerUpdate: TriggerUpdate +): Promise { + const rundownsObserver = await RundownsObserver.createForPeripheralDevice(args.deviceId, async (rundownIds) => { + logger.silly(`Creating new RundownContentObserver`, rundownIds) + + // TODO - can this be done cheaper? + const cache = createReactiveContentCache(rundownIds) + + // Push update + triggerUpdate({ newCache: cache }) + + const contentObserver = await RundownContentObserver.create(rundownIds, cache) + + const innerQueries = [ + cache.Playlists.find({}).observeChanges( + { + added: (docId) => triggerUpdate({ invalidatePlaylistIds: [protectString(docId)] }), + changed: (docId) => triggerUpdate({ invalidatePlaylistIds: [protectString(docId)] }), + removed: (docId) => triggerUpdate({ invalidatePlaylistIds: [protectString(docId)] }), + }, + { nonMutatingCallbacks: true } + ), + cache.Rundowns.find({}).observeChanges( + { + added: (docId) => { + triggerUpdate({ invalidateRundownIds: [protectString(docId)] }) + contentObserver.checkPlaylistIds() + }, + changed: (docId) => { + triggerUpdate({ invalidateRundownIds: [protectString(docId)] }) + contentObserver.checkPlaylistIds() + }, + removed: (docId) => { + triggerUpdate({ invalidateRundownIds: [protectString(docId)] }) + contentObserver.checkPlaylistIds() + }, + }, + { nonMutatingCallbacks: true } + ), + cache.Parts.find({}).observe({ + added: (doc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId] }), + changed: (doc, oldDoc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId, oldDoc.rundownId] }), + removed: (doc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId] }), + }), + cache.PartInstances.find({}).observe({ + added: (doc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId] }), + changed: (doc, oldDoc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId, oldDoc.rundownId] }), + removed: (doc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId] }), + }), + cache.NrcsIngestData.find({}).observe({ + added: (doc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId] }), + changed: (doc, oldDoc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId, oldDoc.rundownId] }), + removed: (doc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId] }), + }), + ] + + return () => { + contentObserver.dispose() + + for (const query of innerQueries) { + query.stop() + } + } + }) + + // Set up observers: + return [rundownsObserver] +} + +async function manipulateIngestRundownStatusPublicationData( + _args: IngestRundownStatusArgs, + state: Partial, + collection: CustomPublishCollection, + updateProps: Partial> | undefined +): Promise { + // Prepare data for publication: + + if (updateProps?.newCache !== undefined) { + state.contentCache = updateProps.newCache ?? undefined + } + + if (!state.contentCache) { + // Remove all the notes + collection.remove(null) + + return + } + + const updateAll = !updateProps || !!updateProps?.newCache + if (updateAll) { + // Remove all the notes + collection.remove(null) + + const knownRundownIds = new Set(state.contentCache.RundownIds) + + for (const rundownId of knownRundownIds) { + const newDoc = createIngestRundownStatus(state.contentCache, rundownId) + if (newDoc) collection.replace(newDoc) + } + } else { + const regenerateForRundownIds = new Set(updateProps.invalidateRundownIds) + + // Include anything where the playlist has changed + if (updateProps.invalidatePlaylistIds && updateProps.invalidatePlaylistIds.length > 0) { + const rundownsToUpdate = state.contentCache.Rundowns.find( + { + playlistId: { $in: updateProps.invalidatePlaylistIds }, + }, + { + projection: { + _id: 1, + }, + } + ).fetch() as Pick[] + + for (const rundown of rundownsToUpdate) { + regenerateForRundownIds.add(rundown._id) + } + } + + for (const rundownId of regenerateForRundownIds) { + const newDoc = createIngestRundownStatus(state.contentCache, rundownId) + if (newDoc) { + collection.replace(newDoc) + } else { + collection.remove(rundownId) + } + } + } +} + +async function startOrJoinIngestStatusPublication( + pub: CustomPublish, + deviceId: PeripheralDeviceId +) { + await setUpCollectionOptimizedObserver< + IngestRundownStatus, + IngestRundownStatusArgs, + IngestRundownStatusState, + IngestRundownStatusUpdateProps + >( + `pub_${PeripheralDevicePubSub.ingestDeviceRundownStatus}_${deviceId}`, + { deviceId }, + setupIngestRundownStatusPublicationObservers, + manipulateIngestRundownStatusPublicationData, + pub, + 100 + ) +} + +meteorCustomPublish( + PeripheralDevicePubSub.ingestDeviceRundownStatus, + PeripheralDevicePubSubCollectionsNames.ingestRundownStatus, + async function (pub, deviceId: PeripheralDeviceId, token: string | undefined) { + check(deviceId, String) + + await checkAccessAndGetPeripheralDevice(deviceId, token, this) + + await startOrJoinIngestStatusPublication(pub, deviceId) + } +) + +meteorCustomPublish( + MeteorPubSub.ingestDeviceRundownStatusTestTool, + PeripheralDevicePubSubCollectionsNames.ingestRundownStatus, + async function (pub, deviceId: PeripheralDeviceId) { + check(deviceId, String) + + assertConnectionHasOneOfPermissions(this.connection, 'testing') + + await startOrJoinIngestStatusPublication(pub, deviceId) + } +) diff --git a/meteor/server/publications/ingestStatus/reactiveContentCache.ts b/meteor/server/publications/ingestStatus/reactiveContentCache.ts new file mode 100644 index 0000000000..a755ee4f02 --- /dev/null +++ b/meteor/server/publications/ingestStatus/reactiveContentCache.ts @@ -0,0 +1,99 @@ +import type { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' +import { ReactiveCacheCollection } from '../lib/ReactiveCacheCollection' +import { literal } from '@sofie-automation/corelib/dist/lib' +import type { MongoFieldSpecifierOnesStrict } from '@sofie-automation/corelib/dist/mongo' +import type { PartInstance } from '@sofie-automation/meteor-lib/dist/collections/PartInstances' +import type { NrcsIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import type { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import type { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import type { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' + +export type PlaylistCompact = Pick< + DBRundownPlaylist, + '_id' | 'activationId' | 'rehearsal' | 'currentPartInfo' | 'nextPartInfo' +> +export const playlistFieldSpecifier = literal>({ + _id: 1, + activationId: 1, + rehearsal: 1, + currentPartInfo: 1, + nextPartInfo: 1, +}) + +export type RundownCompact = Pick +export const rundownFieldSpecifier = literal>({ + _id: 1, + externalId: 1, + playlistId: 1, +}) + +export type PartCompact = Pick< + DBPart, + | '_id' + | 'rundownId' + | 'segmentId' + | 'externalId' + | 'shouldNotifyCurrentPlayingPart' + | 'ingestNotifyPartReady' + | 'ingestNotifyItemsReady' + | 'ingestNotifyPartExternalId' +> +export const partFieldSpecifier = literal>({ + _id: 1, + rundownId: 1, + segmentId: 1, + externalId: 1, + shouldNotifyCurrentPlayingPart: 1, + ingestNotifyPartReady: 1, + ingestNotifyItemsReady: 1, + ingestNotifyPartExternalId: 1, +}) + +export type PartInstanceCompact = Pick +export const partInstanceFieldSpecifier = literal>({ + _id: 1, + rundownId: 1, + segmentId: 1, + part: 1, // This could be more granular, but it should be pretty stable + takeCount: 1, +}) + +export type NrcsIngestDataCacheObjCompact = Pick< + NrcsIngestDataCacheObj, + '_id' | 'type' | 'rundownId' | 'segmentId' | 'partId' +> & { data: { externalId: string } } +export const nrcsIngestDataCacheObjSpecifier = literal>({ + _id: 1, + type: 1, + rundownId: 1, + segmentId: 1, + partId: 1, + data: { + // We need to be very selective here, as the payload portion could contain data not safe for minimongo + externalId: 1, + }, +}) + +export interface ContentCache { + RundownIds: RundownId[] + + Playlists: ReactiveCacheCollection + Rundowns: ReactiveCacheCollection + NrcsIngestData: ReactiveCacheCollection + Parts: ReactiveCacheCollection + PartInstances: ReactiveCacheCollection +} + +export function createReactiveContentCache(rundownIds: RundownId[]): ContentCache { + const cache: ContentCache = { + RundownIds: rundownIds, + + Playlists: new ReactiveCacheCollection('playlists'), + Rundowns: new ReactiveCacheCollection('rundowns'), + NrcsIngestData: new ReactiveCacheCollection('nrcsIngestData'), + Parts: new ReactiveCacheCollection('parts'), + PartInstances: new ReactiveCacheCollection('partInstances'), + } + + return cache +} diff --git a/meteor/server/publications/ingestStatus/rundownContentObserver.ts b/meteor/server/publications/ingestStatus/rundownContentObserver.ts new file mode 100644 index 0000000000..6894e97136 --- /dev/null +++ b/meteor/server/publications/ingestStatus/rundownContentObserver.ts @@ -0,0 +1,153 @@ +import { Meteor } from 'meteor/meteor' +import { RundownId, RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { logger } from '../../logging' +import { + ContentCache, + nrcsIngestDataCacheObjSpecifier, + partFieldSpecifier, + partInstanceFieldSpecifier, + playlistFieldSpecifier, + rundownFieldSpecifier, + // segmentFieldSpecifier, +} from './reactiveContentCache' +import { NrcsIngestDataCache, PartInstances, Parts, RundownPlaylists, Rundowns } from '../../collections' +import { waitForAllObserversReady } from '../lib/lib' +import _ from 'underscore' +import { ReactiveMongoObserverGroup, ReactiveMongoObserverGroupHandle } from '../lib/observerGroup' +import { equivalentArrays } from '@sofie-automation/shared-lib/dist/lib/lib' + +const REACTIVITY_DEBOUNCE = 20 + +export class RundownContentObserver { + #observers: Meteor.LiveQueryHandle[] = [] + readonly #cache: ContentCache + + #playlistIds: RundownPlaylistId[] = [] + #playlistIdObserver!: ReactiveMongoObserverGroupHandle + + #disposed = false + + private constructor(cache: ContentCache) { + this.#cache = cache + } + + static async create(rundownIds: RundownId[], cache: ContentCache): Promise { + logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) + + const observer = new RundownContentObserver(cache) + + observer.#playlistIdObserver = await ReactiveMongoObserverGroup(async () => { + // Clear already cached data + cache.Playlists.remove({}) + + return [ + RundownPlaylists.observe( + { + // We can use the `this.#playlistIds` here, as this is restarted every time that property changes + _id: { $in: observer.#playlistIds }, + }, + { + added: (doc) => { + cache.Playlists.upsert(doc._id, doc) + }, + changed: (doc) => { + cache.Playlists.upsert(doc._id, doc) + }, + removed: (doc) => { + cache.Playlists.remove(doc._id) + }, + }, + { + projection: playlistFieldSpecifier, + } + ), + ] + }) + + observer.#observers = await waitForAllObserversReady([ + Rundowns.observeChanges( + { + _id: { + $in: rundownIds, + }, + }, + cache.Rundowns.link(), + { + projection: rundownFieldSpecifier, + }, + { + nonMutatingCallbacks: true, + } + ), + Parts.observeChanges( + { + rundownId: { + $in: rundownIds, + }, + }, + cache.Parts.link(), + { + projection: partFieldSpecifier, + }, + { + nonMutatingCallbacks: true, + } + ), + PartInstances.observeChanges( + { + rundownId: { $in: rundownIds }, + reset: { $ne: true }, + orphaned: { $exists: false }, + }, + cache.PartInstances.link(), + { projection: partInstanceFieldSpecifier }, + { + nonMutatingCallbacks: true, + } + ), + NrcsIngestDataCache.observeChanges( + { + rundownId: { + $in: rundownIds, + }, + }, + cache.NrcsIngestData.link(), + { + projection: nrcsIngestDataCacheObjSpecifier, + }, + { + nonMutatingCallbacks: true, + } + ), + + observer.#playlistIdObserver, + ]) + + return observer + } + + public checkPlaylistIds = _.debounce( + Meteor.bindEnvironment(() => { + if (this.#disposed) return + + const playlistIds = Array.from(new Set(this.#cache.Rundowns.find({}).map((rundown) => rundown.playlistId))) + + if (!equivalentArrays(playlistIds, this.#playlistIds)) { + this.#playlistIds = playlistIds + // trigger the playlist group to restart + this.#playlistIdObserver.restart() + } + }), + REACTIVITY_DEBOUNCE + ) + + public get cache(): ContentCache { + return this.#cache + } + + public dispose = (): void => { + this.#disposed = true + + this.#observers.forEach((observer) => observer.stop()) + } +} diff --git a/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts b/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts index ffeb44577b..06760d6c94 100644 --- a/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts +++ b/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts @@ -1,4 +1,9 @@ -import { RundownId, RundownPlaylistId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + PeripheralDeviceId, + RundownId, + RundownPlaylistId, + StudioId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { Rundowns } from '../../../collections' @@ -25,7 +30,7 @@ describe('RundownsObserver', () => { // should not be any observers yet expect(RundownsMock.observers).toHaveLength(0) - const observer = await RundownsObserver.create(studioId, playlistId, onChanged) + const observer = await RundownsObserver.createForPlaylist(studioId, playlistId, onChanged) try { // should now be an observer expect(RundownsMock.observers).toHaveLength(1) @@ -78,7 +83,7 @@ describe('RundownsObserver', () => { // should not be any observers yet expect(RundownsMock.observers).toHaveLength(0) - const observer = await RundownsObserver.create(studioId, playlistId, onChanged) + const observer = await RundownsObserver.createForPlaylist(studioId, playlistId, onChanged) try { // ensure starts correct await waitUntil(async () => { @@ -132,7 +137,7 @@ describe('RundownsObserver', () => { // should not be any observers yet expect(RundownsMock.observers).toHaveLength(0) - const observer = await RundownsObserver.create(studioId, playlistId, onChanged) + const observer = await RundownsObserver.createForPlaylist(studioId, playlistId, onChanged) try { // ensure starts correct // ensure starts correct @@ -186,7 +191,7 @@ describe('RundownsObserver', () => { // should not be any observers yet expect(RundownsMock.observers).toHaveLength(0) - const observer = await RundownsObserver.create(studioId, playlistId, onChanged) + const observer = await RundownsObserver.createForPlaylist(studioId, playlistId, onChanged) try { // ensure starts correct // ensure starts correct @@ -263,4 +268,56 @@ describe('RundownsObserver', () => { observer.stop() } }) + + test('create and destroy observer - for peripheraldevice', async () => { + const deviceId = protectString('device0') + + const onChangedCleanup = jest.fn() + const onChanged = jest.fn(async () => onChangedCleanup) + + // should not be any observers yet + expect(RundownsMock.observers).toHaveLength(0) + + const observer = await RundownsObserver.createForPeripheralDevice(deviceId, onChanged) + try { + // should now be an observer + expect(RundownsMock.observers).toHaveLength(1) + + // Before debounce + expect(onChanged).toHaveBeenCalledTimes(0) + + // After debounce + await waitUntil(async () => { + // Run timers, so that promises in the observer has a chance to resolve: + await runAllTimers() + expect(onChanged).toHaveBeenCalledTimes(1) + expect(onChangedCleanup).toHaveBeenCalledTimes(0) + }, MAX_WAIT_TIME) + + // still got an observer + expect(RundownsMock.observers).toHaveLength(1) + + // get the mock observer, and ensure to looks sane + expect(RundownsMock.observers).toHaveLength(1) + const mockObserver = RundownsMock.observers[0] + expect(mockObserver).toBeTruthy() + expect(mockObserver.callbacksChanges).toBeFalsy() + expect(mockObserver.callbacksObserve).toBeTruthy() + expect(mockObserver.callbacksObserve?.added).toBeTruthy() + expect(mockObserver.callbacksObserve?.changed).toBeTruthy() + expect(mockObserver.callbacksObserve?.removed).toBeTruthy() + expect(mockObserver.query).toEqual({ + 'source.peripheralDeviceId': 'device0', + 'source.type': 'nrcs', + }) + } finally { + // Make sure to cleanup + observer.stop() + + // Check it stopped + expect(onChanged).toHaveBeenCalledTimes(1) + expect(onChangedCleanup).toHaveBeenCalledTimes(1) + expect(RundownsMock.observers).toHaveLength(0) + } + }) }) diff --git a/meteor/server/publications/lib/rundownsObserver.ts b/meteor/server/publications/lib/rundownsObserver.ts index 6aec996d81..421a6c3f46 100644 --- a/meteor/server/publications/lib/rundownsObserver.ts +++ b/meteor/server/publications/lib/rundownsObserver.ts @@ -1,7 +1,14 @@ import { Meteor } from 'meteor/meteor' -import { RundownId, RundownPlaylistId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import type { + PeripheralDeviceId, + RundownId, + RundownPlaylistId, + StudioId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' import { Rundowns } from '../../collections' import { PromiseDebounce } from './PromiseDebounce' +import type { MongoQuery } from '@sofie-automation/corelib/dist/mongo' +import type { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { logger } from '../../logging' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' @@ -44,24 +51,39 @@ export class RundownsObserver implements Meteor.LiveQueryHandle { this.#changed = onChanged } - static async create( + static async createForPlaylist( studioId: StudioId, playlistId: RundownPlaylistId, onChanged: ChangedHandler ): Promise { const observer = new RundownsObserver(onChanged) - await observer.init(studioId, playlistId) + await observer.init({ + playlistId, + studioId, + }) return observer } - private async init(studioId: StudioId, playlistId: RundownPlaylistId) { + static async createForPeripheralDevice( + // studioId: StudioId, // TODO - this? + deviceId: PeripheralDeviceId, + onChanged: ChangedHandler + ): Promise { + const observer = new RundownsObserver(onChanged) + + await observer.init({ + 'source.type': 'nrcs', + 'source.peripheralDeviceId': deviceId, + }) + + return observer + } + + private async init(query: MongoQuery) { this.#rundownsLiveQuery = await Rundowns.observe( - { - playlistId, - studioId, - }, + query, { added: (doc) => { this.#rundownIds.add(doc._id) @@ -97,5 +119,6 @@ export class RundownsObserver implements Meteor.LiveQueryHandle { this.#rundownsLiveQuery.stop() this.#changed = undefined this.#cleanup?.() + this.#cleanup = undefined } } diff --git a/meteor/server/publications/packageManager/expectedPackages/contentCache.ts b/meteor/server/publications/packageManager/expectedPackages/contentCache.ts index b26af4113d..d54cf5f731 100644 --- a/meteor/server/publications/packageManager/expectedPackages/contentCache.ts +++ b/meteor/server/publications/packageManager/expectedPackages/contentCache.ts @@ -28,15 +28,22 @@ export const pieceInstanceFieldsSpecifier = literal + +export const expectedPackageDBFieldsSpecifier = literal>({ + _id: 1, + package: 1, +}) + export interface ExpectedPackagesContentCache { - ExpectedPackages: ReactiveCacheCollection + ExpectedPackages: ReactiveCacheCollection RundownPlaylists: ReactiveCacheCollection PieceInstances: ReactiveCacheCollection } export function createReactiveContentCache(): ExpectedPackagesContentCache { const cache: ExpectedPackagesContentCache = { - ExpectedPackages: new ReactiveCacheCollection('expectedPackages'), + ExpectedPackages: new ReactiveCacheCollection('expectedPackages'), RundownPlaylists: new ReactiveCacheCollection('rundownPlaylists'), PieceInstances: new ReactiveCacheCollection('pieceInstances'), } diff --git a/meteor/server/publications/packageManager/expectedPackages/contentObserver.ts b/meteor/server/publications/packageManager/expectedPackages/contentObserver.ts index 6ff293fdee..9db462b5d0 100644 --- a/meteor/server/publications/packageManager/expectedPackages/contentObserver.ts +++ b/meteor/server/publications/packageManager/expectedPackages/contentObserver.ts @@ -5,6 +5,7 @@ import { ExpectedPackagesContentCache, rundownPlaylistFieldSpecifier, pieceInstanceFieldsSpecifier, + expectedPackageDBFieldsSpecifier, } from './contentCache' import { ExpectedPackages, PieceInstances, RundownPlaylists } from '../../../collections' import { ReactiveMongoObserverGroup, ReactiveMongoObserverGroupHandle } from '../../lib/observerGroup' @@ -61,7 +62,10 @@ export class ExpectedPackagesContentObserver implements Meteor.LiveQueryHandle { { studioId: studioId, }, - cache.ExpectedPackages.link() + cache.ExpectedPackages.link(), + { + projection: expectedPackageDBFieldsSpecifier, + } ), RundownPlaylists.observeChanges( diff --git a/meteor/server/publications/packageManager/expectedPackages/generate.ts b/meteor/server/publications/packageManager/expectedPackages/generate.ts index cdc21bf353..bb9c773adb 100644 --- a/meteor/server/publications/packageManager/expectedPackages/generate.ts +++ b/meteor/server/publications/packageManager/expectedPackages/generate.ts @@ -1,5 +1,5 @@ import { PackageContainerOnPackage, Accessor, AccessorOnPackage } from '@sofie-automation/blueprints-integration' -import { getContentVersionHash, getExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { getExpectedPackageIdForPieceInstance } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { PeripheralDeviceId, ExpectedPackageId, PieceInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { @@ -48,7 +48,7 @@ export async function updateCollectionForExpectedPackageIds( // Map the expectedPackages onto their specified layer: const allDeviceIds = new Set() - for (const layerName of packageDoc.layers) { + for (const layerName of packageDoc.package.layers) { const layerDeviceIds = layerNameToDeviceIds.get(layerName) for (const deviceId of layerDeviceIds || []) { allDeviceIds.add(deviceId) @@ -62,7 +62,7 @@ export async function updateCollectionForExpectedPackageIds( const routedPackage = generateExpectedPackageForDevice( studio, { - ...packageDoc, + ...packageDoc.package, _id: unprotectString(packageDoc._id), }, deviceId, @@ -118,7 +118,10 @@ export async function updateCollectionForPieceInstanceIds( if (!pieceInstanceDoc.piece?.expectedPackages) continue pieceInstanceDoc.piece.expectedPackages.forEach((expectedPackage, i) => { - const sanitisedPackageId = getExpectedPackageId(pieceInstanceId, expectedPackage._id || '__unnamed' + i) + const sanitisedPackageId = getExpectedPackageIdForPieceInstance( + pieceInstanceId, + expectedPackage._id || '__unnamed' + i + ) // Map the expectedPackages onto their specified layer: const allDeviceIds = new Set() @@ -138,8 +141,6 @@ export async function updateCollectionForPieceInstanceIds( { ...expectedPackage, _id: unprotectString(sanitisedPackageId), - rundownId: pieceInstanceDoc.rundownId, - contentVersionHash: getContentVersionHash(expectedPackage), }, deviceId, pieceInstanceId, @@ -215,11 +216,14 @@ function generateExpectedPackageForDevice( if (!combinedTargets.length) { logger.warn(`Pub.expectedPackagesForDevice: No targets found for "${expectedPackage._id}"`) } - expectedPackage.sideEffect = getSideEffect(expectedPackage, studio) + const packageSideEffect = getSideEffect(expectedPackage, studio) return { _id: protectString(`${expectedPackage._id}_${deviceId}_${pieceInstanceId}`), - expectedPackage: expectedPackage, + expectedPackage: { + ...expectedPackage, + sideEffect: packageSideEffect, + }, sources: combinedSources, targets: combinedTargets, priority: priority, @@ -247,7 +251,7 @@ function calculateCombinedSource( for (const accessorId of accessorIds) { const sourceAccessor: Accessor.Any | undefined = lookedUpSource.container.accessors[accessorId] - const packageAccessor: AccessorOnPackage.Any | undefined = packageSource.accessors?.[accessorId] + const packageAccessor: ReadonlyDeep | undefined = packageSource.accessors?.[accessorId] if (packageAccessor && sourceAccessor && packageAccessor.type === sourceAccessor.type) { combinedSource.accessors[accessorId] = deepExtend({}, sourceAccessor, packageAccessor) diff --git a/meteor/server/publications/partInstancesUI/publication.ts b/meteor/server/publications/partInstancesUI/publication.ts index 15ef24d707..5c30cea8a2 100644 --- a/meteor/server/publications/partInstancesUI/publication.ts +++ b/meteor/server/publications/partInstancesUI/publication.ts @@ -66,60 +66,64 @@ async function setupUIPartInstancesPublicationObservers( )) as Pick | undefined if (!playlist) throw new Error(`RundownPlaylist with activationId="${args.playlistActivationId}" not found!`) - const rundownsObserver = await RundownsObserver.create(playlist.studioId, playlist._id, async (rundownIds) => { - logger.silly(`Creating new RundownContentObserver`) - - const cache = createReactiveContentCache() - - // Push update - triggerUpdate({ newCache: cache }) - - const obs1 = await RundownContentObserver.create( - playlist.studioId, - args.playlistActivationId, - rundownIds, - cache - ) + const rundownsObserver = await RundownsObserver.createForPlaylist( + playlist.studioId, + playlist._id, + async (rundownIds) => { + logger.silly(`Creating new RundownContentObserver`) + + const cache = createReactiveContentCache() + + // Push update + triggerUpdate({ newCache: cache }) + + const obs1 = await RundownContentObserver.create( + playlist.studioId, + args.playlistActivationId, + rundownIds, + cache + ) - const innerQueries = [ - cache.Segments.find({}).observeChanges({ - added: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - }), - cache.PartInstances.find({}).observe({ - added: (doc) => triggerUpdate({ invalidatePartInstanceIds: [doc._id] }), - changed: (doc, oldDoc) => { - if (doc.part._rank !== oldDoc.part._rank) { - // with part rank change we need to invalidate the entire segment, - // as the order may affect which unchanged parts are/aren't in quickLoop - triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }) - } else { - triggerUpdate({ invalidatePartInstanceIds: [doc._id] }) - } - }, - removed: (doc) => triggerUpdate({ invalidatePartInstanceIds: [doc._id] }), - }), - cache.RundownPlaylists.find({}).observeChanges({ - added: () => triggerUpdate({ invalidateQuickLoop: true }), - changed: () => triggerUpdate({ invalidateQuickLoop: true }), - removed: () => triggerUpdate({ invalidateQuickLoop: true }), - }), - cache.StudioSettings.find({}).observeChanges({ - added: () => triggerUpdate({ invalidateQuickLoop: true }), - changed: () => triggerUpdate({ invalidateQuickLoop: true }), - removed: () => triggerUpdate({ invalidateQuickLoop: true }), - }), - ] - - return () => { - obs1.dispose() - - for (const query of innerQueries) { - query.stop() + const innerQueries = [ + cache.Segments.find({}).observeChanges({ + added: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + }), + cache.PartInstances.find({}).observe({ + added: (doc) => triggerUpdate({ invalidatePartInstanceIds: [doc._id] }), + changed: (doc, oldDoc) => { + if (doc.part._rank !== oldDoc.part._rank) { + // with part rank change we need to invalidate the entire segment, + // as the order may affect which unchanged parts are/aren't in quickLoop + triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }) + } else { + triggerUpdate({ invalidatePartInstanceIds: [doc._id] }) + } + }, + removed: (doc) => triggerUpdate({ invalidatePartInstanceIds: [doc._id] }), + }), + cache.RundownPlaylists.find({}).observeChanges({ + added: () => triggerUpdate({ invalidateQuickLoop: true }), + changed: () => triggerUpdate({ invalidateQuickLoop: true }), + removed: () => triggerUpdate({ invalidateQuickLoop: true }), + }), + cache.StudioSettings.find({}).observeChanges({ + added: () => triggerUpdate({ invalidateQuickLoop: true }), + changed: () => triggerUpdate({ invalidateQuickLoop: true }), + removed: () => triggerUpdate({ invalidateQuickLoop: true }), + }), + ] + + return () => { + obs1.dispose() + + for (const query of innerQueries) { + query.stop() + } } } - }) + ) // Set up observers: return [rundownsObserver] diff --git a/meteor/server/publications/partInstancesUI/reactiveContentCache.ts b/meteor/server/publications/partInstancesUI/reactiveContentCache.ts index 66e1e0658e..aac741cbb4 100644 --- a/meteor/server/publications/partInstancesUI/reactiveContentCache.ts +++ b/meteor/server/publications/partInstancesUI/reactiveContentCache.ts @@ -5,8 +5,8 @@ import { MongoFieldSpecifierOnesStrict, MongoFieldSpecifierZeroes } from '@sofie import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { DBStudio, IStudioSettings } from '@sofie-automation/corelib/dist/dataModel/Studio' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' -import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' +import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' export type RundownPlaylistCompact = Pick export const rundownPlaylistFieldSpecifier = literal>({ diff --git a/meteor/server/publications/partsUI/publication.ts b/meteor/server/publications/partsUI/publication.ts index d2f1a100e3..c52fe141db 100644 --- a/meteor/server/publications/partsUI/publication.ts +++ b/meteor/server/publications/partsUI/publication.ts @@ -57,55 +57,59 @@ async function setupUIPartsPublicationObservers( })) as Pick | undefined if (!playlist) throw new Error(`RundownPlaylist "${args.playlistId}" not found!`) - const rundownsObserver = await RundownsObserver.create(playlist.studioId, playlist._id, async (rundownIds) => { - logger.silly(`Creating new RundownContentObserver`) - - const cache = createReactiveContentCache() - - // Push update - triggerUpdate({ newCache: cache }) - - const obs1 = await RundownContentObserver.create(playlist.studioId, playlist._id, rundownIds, cache) - - const innerQueries = [ - cache.Segments.find({}).observeChanges({ - added: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - }), - cache.Parts.find({}).observe({ - added: (doc) => triggerUpdate({ invalidatePartIds: [doc._id] }), - changed: (doc, oldDoc) => { - if (doc._rank !== oldDoc._rank) { - // with part rank change we need to invalidate the entire segment, - // as the order may affect which unchanged parts are/aren't in quickLoop - triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }) - } else { - triggerUpdate({ invalidatePartIds: [doc._id] }) - } - }, - removed: (doc) => triggerUpdate({ invalidatePartIds: [doc._id] }), - }), - cache.RundownPlaylists.find({}).observeChanges({ - added: () => triggerUpdate({ invalidateQuickLoop: true }), - changed: () => triggerUpdate({ invalidateQuickLoop: true }), - removed: () => triggerUpdate({ invalidateQuickLoop: true }), - }), - cache.StudioSettings.find({}).observeChanges({ - added: () => triggerUpdate({ invalidateQuickLoop: true }), - changed: () => triggerUpdate({ invalidateQuickLoop: true }), - removed: () => triggerUpdate({ invalidateQuickLoop: true }), - }), - ] - - return () => { - obs1.dispose() - - for (const query of innerQueries) { - query.stop() + const rundownsObserver = await RundownsObserver.createForPlaylist( + playlist.studioId, + playlist._id, + async (rundownIds) => { + logger.silly(`Creating new RundownContentObserver`) + + const cache = createReactiveContentCache() + + // Push update + triggerUpdate({ newCache: cache }) + + const obs1 = await RundownContentObserver.create(playlist.studioId, playlist._id, rundownIds, cache) + + const innerQueries = [ + cache.Segments.find({}).observeChanges({ + added: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + }), + cache.Parts.find({}).observe({ + added: (doc) => triggerUpdate({ invalidatePartIds: [doc._id] }), + changed: (doc, oldDoc) => { + if (doc._rank !== oldDoc._rank) { + // with part rank change we need to invalidate the entire segment, + // as the order may affect which unchanged parts are/aren't in quickLoop + triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }) + } else { + triggerUpdate({ invalidatePartIds: [doc._id] }) + } + }, + removed: (doc) => triggerUpdate({ invalidatePartIds: [doc._id] }), + }), + cache.RundownPlaylists.find({}).observeChanges({ + added: () => triggerUpdate({ invalidateQuickLoop: true }), + changed: () => triggerUpdate({ invalidateQuickLoop: true }), + removed: () => triggerUpdate({ invalidateQuickLoop: true }), + }), + cache.StudioSettings.find({}).observeChanges({ + added: () => triggerUpdate({ invalidateQuickLoop: true }), + changed: () => triggerUpdate({ invalidateQuickLoop: true }), + removed: () => triggerUpdate({ invalidateQuickLoop: true }), + }), + ] + + return () => { + obs1.dispose() + + for (const query of innerQueries) { + query.stop() + } } } - }) + ) // Set up observers: return [rundownsObserver] diff --git a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts index 1fd4f25426..f6a8069a8e 100644 --- a/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts +++ b/meteor/server/publications/pieceContentStatusUI/__tests__/checkPieceContentStatus.test.ts @@ -37,6 +37,7 @@ import { MediaObjects } from '../../../collections' import { PieceDependencies } from '../common' import { DEFAULT_MINIMUM_TAKE_SPAN } from '@sofie-automation/shared-lib/dist/core/constants' import { PieceContentStatusMessageFactory } from '../messageFactory' +import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' const mockMediaObjectsCollection = MongoMock.getInnerMockCollection(MediaObjects) @@ -450,9 +451,17 @@ describe('lib/mediaObjects', () => { timelineObjectsString: EmptyPieceTimelineObjectsBlob, }) + const mockOwnerId = protectString('rundown0') + const messageFactory = new PieceContentStatusMessageFactory(undefined) - const status1 = await checkPieceContentStatusAndDependencies(mockStudio, messageFactory, piece1, sourcelayer1) + const status1 = await checkPieceContentStatusAndDependencies( + mockStudio, + mockOwnerId, + messageFactory, + piece1, + sourcelayer1 + ) expect(status1[0].status).toEqual(PieceStatusCode.OK) expect(status1[0].messages).toHaveLength(0) expect(status1[1]).toMatchObject( @@ -463,7 +472,13 @@ describe('lib/mediaObjects', () => { }) ) - const status2 = await checkPieceContentStatusAndDependencies(mockStudio, messageFactory, piece2, sourcelayer1) + const status2 = await checkPieceContentStatusAndDependencies( + mockStudio, + mockOwnerId, + messageFactory, + piece2, + sourcelayer1 + ) expect(status2[0].status).toEqual(PieceStatusCode.SOURCE_BROKEN) expect(status2[0].messages).toHaveLength(1) expect(status2[0].messages[0]).toMatchObject({ @@ -477,7 +492,13 @@ describe('lib/mediaObjects', () => { }) ) - const status3 = await checkPieceContentStatusAndDependencies(mockStudio, messageFactory, piece3, sourcelayer1) + const status3 = await checkPieceContentStatusAndDependencies( + mockStudio, + mockOwnerId, + messageFactory, + piece3, + sourcelayer1 + ) expect(status3[0].status).toEqual(PieceStatusCode.SOURCE_MISSING) expect(status3[0].messages).toHaveLength(1) expect(status3[0].messages[0]).toMatchObject({ diff --git a/meteor/server/publications/pieceContentStatusUI/bucket/regenerateForItem.ts b/meteor/server/publications/pieceContentStatusUI/bucket/regenerateForItem.ts index 79285d7c4f..263c706d27 100644 --- a/meteor/server/publications/pieceContentStatusUI/bucket/regenerateForItem.ts +++ b/meteor/server/publications/pieceContentStatusUI/bucket/regenerateForItem.ts @@ -47,6 +47,7 @@ export async function regenerateForBucketAdLibIds( if (sourceLayer) { const [status, itemDependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + actionDoc.bucketId, messageFactories.get(actionDoc.showStyleBaseId), actionDoc, sourceLayer @@ -119,6 +120,7 @@ export async function regenerateForBucketActionIds( const [status, itemDependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + actionDoc.bucketId, messageFactories.get(actionDoc.showStyleBaseId), fakedPiece, sourceLayer diff --git a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts index 3247c10a91..fc0c667ed3 100644 --- a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts +++ b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts @@ -10,8 +10,19 @@ import { SourceLayerType, VTContent, } from '@sofie-automation/blueprints-integration' -import { getExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -import { ExpectedPackageId, PeripheralDeviceId, PieceInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + ExpectedPackageDBType, + getExpectedPackageIdForPieceInstance, + getExpectedPackageIdFromIngestSource, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { + BucketId, + ExpectedPackageId, + PeripheralDeviceId, + PieceInstanceId, + RundownId, + StudioId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' import { getPackageContainerPackageId, PackageContainerPackageStatusDB, @@ -219,6 +230,7 @@ export interface PieceContentStatusStudio export async function checkPieceContentStatusAndDependencies( studio: PieceContentStatusStudio, + packageOwnerId: RundownId | BucketId | StudioId, messageFactory: PieceContentStatusMessageFactory | undefined, piece: PieceContentStatusPiece, sourceLayer: ISourceLayer @@ -240,7 +252,7 @@ export async function checkPieceContentStatusAndDependencies( blacks: [], scenes: [], - thumbnailUrl: undefined, + thumbnailUrl: '/dev/fakeThumbnail.png', previewUrl: '/dev/fakePreview.mp4', packageName: null, @@ -289,6 +301,7 @@ export async function checkPieceContentStatusAndDependencies( piece, sourceLayer, studio, + packageOwnerId, getPackageInfos, getPackageContainerPackageStatus, messageFactory || DEFAULT_MESSAGE_FACTORY @@ -588,6 +601,7 @@ async function checkPieceContentExpectedPackageStatus( piece: PieceContentStatusPiece, sourceLayer: ISourceLayer, studio: PieceContentStatusStudio, + packageOwnerId: RundownId | BucketId | StudioId, getPackageInfos: (packageId: ExpectedPackageId) => Promise, getPackageContainerPackageStatus: ( packageContainerId: string, @@ -656,15 +670,31 @@ async function checkPieceContentExpectedPackageStatus( checkedPackageContainers.add(matchedPackageContainer[0]) - const expectedPackageIds = [getExpectedPackageId(piece._id, expectedPackage._id)] + const expectedPackageIds = [ + // Synthesize the expected packageId from the piece + getExpectedPackageIdFromIngestSource( + packageOwnerId, + { + fromPieceType: ExpectedPackageDBType.PIECE, + // HACK: This shouldn't be cast as any, because this could be a bucket piece, but that gives the same result + pieceId: piece._id as any, + // HACK: We need a value, but the method doesn't use them.. + partId: piece._id as any, + segmentId: piece._id as any, + }, + expectedPackage._id + ), + ] if (piece.pieceInstanceId) { // If this is a PieceInstance, try looking up the PieceInstance first - expectedPackageIds.unshift(getExpectedPackageId(piece.pieceInstanceId, expectedPackage._id)) + expectedPackageIds.unshift( + getExpectedPackageIdForPieceInstance(piece.pieceInstanceId, expectedPackage._id) + ) if (piece.previousPieceInstanceId) { // Also try the previous PieceInstance, when this is an infinite continuation in case package-manager needs to catchup expectedPackageIds.unshift( - getExpectedPackageId(piece.previousPieceInstanceId, expectedPackage._id) + getExpectedPackageIdForPieceInstance(piece.previousPieceInstanceId, expectedPackage._id) ) } } diff --git a/meteor/server/publications/pieceContentStatusUI/rundown/publication.ts b/meteor/server/publications/pieceContentStatusUI/rundown/publication.ts index 0103802d91..6f78d87ed2 100644 --- a/meteor/server/publications/pieceContentStatusUI/rundown/publication.ts +++ b/meteor/server/publications/pieceContentStatusUI/rundown/publication.ts @@ -125,86 +125,90 @@ async function setupUIPieceContentStatusesPublicationObservers( })) as Pick | undefined if (!playlist) throw new Error(`RundownPlaylist "${args.rundownPlaylistId}" not found!`) - const rundownsObserver = await RundownsObserver.create(playlist.studioId, playlist._id, async (rundownIds) => { - logger.silly(`Creating new RundownContentObserver`) - - // TODO - can this be done cheaper? - const contentCache = createReactiveContentCache() - triggerUpdate({ newCache: contentCache }) - - const obs1 = await RundownContentObserver.create(rundownIds, contentCache) - - const innerQueries = [ - contentCache.Segments.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedSegmentIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedSegmentIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedSegmentIds: [protectString(id)] }), - }), - contentCache.Parts.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedPartIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedPartIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedPartIds: [protectString(id)] }), - }), - contentCache.Pieces.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedPieceIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedPieceIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedPieceIds: [protectString(id)] }), - }), - contentCache.PartInstances.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedPartInstanceIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedPartInstanceIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedPartInstanceIds: [protectString(id)] }), - }), - contentCache.PieceInstances.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedPieceInstanceIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedPieceInstanceIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedPieceInstanceIds: [protectString(id)] }), - }), - contentCache.AdLibPieces.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedAdlibPieceIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedAdlibPieceIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedAdlibPieceIds: [protectString(id)] }), - }), - contentCache.AdLibActions.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedAdlibActionIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedAdlibActionIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedAdlibActionIds: [protectString(id)] }), - }), - contentCache.BaselineAdLibPieces.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedBaselineAdlibPieceIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedBaselineAdlibPieceIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedBaselineAdlibPieceIds: [protectString(id)] }), - }), - contentCache.BaselineAdLibActions.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedBaselineAdlibActionIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedBaselineAdlibActionIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedBaselineAdlibActionIds: [protectString(id)] }), - }), - contentCache.Rundowns.find({}).observeChanges({ - added: () => triggerUpdate({ invalidateAll: true }), - changed: () => triggerUpdate({ invalidateAll: true }), - removed: () => triggerUpdate({ invalidateAll: true }), - }), - contentCache.Blueprints.find({}).observeChanges({ - added: () => triggerUpdate({ invalidateAll: true }), - changed: () => triggerUpdate({ invalidateAll: true }), - removed: () => triggerUpdate({ invalidateAll: true }), - }), - contentCache.ShowStyleSourceLayers.find({}).observeChanges({ - added: () => triggerUpdate({ invalidateAll: true }), - changed: () => triggerUpdate({ invalidateAll: true }), - removed: () => triggerUpdate({ invalidateAll: true }), - }), - ] - - return () => { - obs1.dispose() - - for (const query of innerQueries) { - query.stop() + const rundownsObserver = await RundownsObserver.createForPlaylist( + playlist.studioId, + playlist._id, + async (rundownIds) => { + logger.silly(`Creating new RundownContentObserver`) + + // TODO - can this be done cheaper? + const contentCache = createReactiveContentCache() + triggerUpdate({ newCache: contentCache }) + + const obs1 = await RundownContentObserver.create(rundownIds, contentCache) + + const innerQueries = [ + contentCache.Segments.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedSegmentIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedSegmentIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedSegmentIds: [protectString(id)] }), + }), + contentCache.Parts.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedPartIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedPartIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedPartIds: [protectString(id)] }), + }), + contentCache.Pieces.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedPieceIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedPieceIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedPieceIds: [protectString(id)] }), + }), + contentCache.PartInstances.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedPartInstanceIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedPartInstanceIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedPartInstanceIds: [protectString(id)] }), + }), + contentCache.PieceInstances.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedPieceInstanceIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedPieceInstanceIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedPieceInstanceIds: [protectString(id)] }), + }), + contentCache.AdLibPieces.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedAdlibPieceIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedAdlibPieceIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedAdlibPieceIds: [protectString(id)] }), + }), + contentCache.AdLibActions.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedAdlibActionIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedAdlibActionIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedAdlibActionIds: [protectString(id)] }), + }), + contentCache.BaselineAdLibPieces.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedBaselineAdlibPieceIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedBaselineAdlibPieceIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedBaselineAdlibPieceIds: [protectString(id)] }), + }), + contentCache.BaselineAdLibActions.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedBaselineAdlibActionIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedBaselineAdlibActionIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedBaselineAdlibActionIds: [protectString(id)] }), + }), + contentCache.Rundowns.find({}).observeChanges({ + added: () => triggerUpdate({ invalidateAll: true }), + changed: () => triggerUpdate({ invalidateAll: true }), + removed: () => triggerUpdate({ invalidateAll: true }), + }), + contentCache.Blueprints.find({}).observeChanges({ + added: () => triggerUpdate({ invalidateAll: true }), + changed: () => triggerUpdate({ invalidateAll: true }), + removed: () => triggerUpdate({ invalidateAll: true }), + }), + contentCache.ShowStyleSourceLayers.find({}).observeChanges({ + added: () => triggerUpdate({ invalidateAll: true }), + changed: () => triggerUpdate({ invalidateAll: true }), + removed: () => triggerUpdate({ invalidateAll: true }), + }), + ] + + return () => { + obs1.dispose() + + for (const query of innerQueries) { + query.stop() + } } } - }) + ) // Set up observers: return [ diff --git a/meteor/server/publications/pieceContentStatusUI/rundown/regenerateItems.ts b/meteor/server/publications/pieceContentStatusUI/rundown/regenerateItems.ts index 078f91361b..3b2b33cde4 100644 --- a/meteor/server/publications/pieceContentStatusUI/rundown/regenerateItems.ts +++ b/meteor/server/publications/pieceContentStatusUI/rundown/regenerateItems.ts @@ -45,6 +45,7 @@ async function regenerateGenericPiece( if (part && segment && sourceLayer) { const [status, dependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + part.rundownId, messageFactory, pieceDoc, sourceLayer @@ -107,7 +108,7 @@ export async function regenerateForPieceIds( { _id: protectString(`piece_${pieceId}`), - partId: pieceDoc.startPartId, + partId: pieceDoc.startPartId ?? undefined, rundownId: pieceDoc.startRundownId, pieceId: pieceId, @@ -181,6 +182,7 @@ export async function regenerateForPieceInstanceIds( if (partInstance && segment && sourceLayer) { const [status, dependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + pieceDoc.rundownId, messageFactories.get(pieceDoc.rundownId), { ...pieceDoc.piece, @@ -193,7 +195,7 @@ export async function regenerateForPieceInstanceIds( const res: UIPieceContentStatus = { _id: protectString(`piece_${pieceId}`), - partId: pieceDoc.piece.startPartId, + partId: pieceDoc.piece.startPartId ?? undefined, rundownId: pieceDoc.rundownId, pieceId: pieceId, @@ -380,6 +382,7 @@ export async function regenerateForBaselineAdLibPieceIds( if (sourceLayer) { const [status, dependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + pieceDoc.rundownId, messageFactories.get(pieceDoc.rundownId), pieceDoc, sourceLayer @@ -460,6 +463,7 @@ export async function regenerateForBaselineAdLibActionIds( if (sourceLayer) { const [status, dependencies] = await checkPieceContentStatusAndDependencies( uiStudio, + actionDoc.rundownId, messageFactories.get(actionDoc.rundownId), fakedPiece, sourceLayer diff --git a/meteor/server/publications/segmentPartNotesUI/publication.ts b/meteor/server/publications/segmentPartNotesUI/publication.ts index d01a55c66a..05d4d86a3e 100644 --- a/meteor/server/publications/segmentPartNotesUI/publication.ts +++ b/meteor/server/publications/segmentPartNotesUI/publication.ts @@ -64,48 +64,54 @@ async function setupUISegmentPartNotesPublicationObservers( })) as Pick | undefined if (!playlist) throw new Error(`RundownPlaylist "${args.playlistId}" not found!`) - const rundownsObserver = await RundownsObserver.create(playlist.studioId, playlist._id, async (rundownIds) => { - logger.silly(`Creating new RundownContentObserver`) - - // TODO - can this be done cheaper? - const cache = createReactiveContentCache() - - // Push update - triggerUpdate({ newCache: cache }) - - const obs1 = await RundownContentObserver.create(rundownIds, cache) - - const innerQueries = [ - cache.Segments.find({}).observeChanges({ - added: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - }), - cache.Parts.find({}).observe({ - added: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), - changed: (doc, oldDoc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId, oldDoc.segmentId] }), - removed: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), - }), - cache.DeletedPartInstances.find({}).observe({ - added: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), - changed: (doc, oldDoc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId, oldDoc.segmentId] }), - removed: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), - }), - cache.Rundowns.find({}).observeChanges({ - added: (id) => triggerUpdate({ invalidateRundownIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ invalidateRundownIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ invalidateRundownIds: [protectString(id)] }), - }), - ] - - return () => { - obs1.dispose() - - for (const query of innerQueries) { - query.stop() + const rundownsObserver = await RundownsObserver.createForPlaylist( + playlist.studioId, + playlist._id, + async (rundownIds) => { + logger.silly(`Creating new RundownContentObserver`) + + // TODO - can this be done cheaper? + const cache = createReactiveContentCache() + + // Push update + triggerUpdate({ newCache: cache }) + + const obs1 = await RundownContentObserver.create(rundownIds, cache) + + const innerQueries = [ + cache.Segments.find({}).observeChanges({ + added: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + }), + cache.Parts.find({}).observe({ + added: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), + changed: (doc, oldDoc) => + triggerUpdate({ invalidateSegmentIds: [doc.segmentId, oldDoc.segmentId] }), + removed: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), + }), + cache.DeletedPartInstances.find({}).observe({ + added: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), + changed: (doc, oldDoc) => + triggerUpdate({ invalidateSegmentIds: [doc.segmentId, oldDoc.segmentId] }), + removed: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), + }), + cache.Rundowns.find({}).observeChanges({ + added: (id) => triggerUpdate({ invalidateRundownIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ invalidateRundownIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ invalidateRundownIds: [protectString(id)] }), + }), + ] + + return () => { + obs1.dispose() + + for (const query of innerQueries) { + query.stop() + } } } - }) + ) // Set up observers: return [rundownsObserver] diff --git a/meteor/server/publications/system.ts b/meteor/server/publications/system.ts index 306014f446..77968b2d99 100644 --- a/meteor/server/publications/system.ts +++ b/meteor/server/publications/system.ts @@ -27,7 +27,6 @@ meteorPublish(MeteorPubSub.coreSystem, async function (_token: string | undefine }) meteorPublish(MeteorPubSub.notificationsForRundown, async function (studioId: StudioId, rundownId: RundownId) { - // HACK: This should do real auth triggerWriteAccessBecauseNoCheckNecessary() check(studioId, String) @@ -43,7 +42,6 @@ meteorPublish(MeteorPubSub.notificationsForRundown, async function (studioId: St meteorPublish( MeteorPubSub.notificationsForRundownPlaylist, async function (studioId: StudioId, playlistId: RundownPlaylistId) { - // HACK: This should do real auth triggerWriteAccessBecauseNoCheckNecessary() check(studioId, String) diff --git a/packages/blueprints-integration/src/api/showStyle.ts b/packages/blueprints-integration/src/api/showStyle.ts index 61ad4d8a87..274020685a 100644 --- a/packages/blueprints-integration/src/api/showStyle.ts +++ b/packages/blueprints-integration/src/api/showStyle.ts @@ -35,6 +35,8 @@ import type { IBlueprintSegment, IBlueprintPiece, IBlueprintPart, + IBlueprintRundownPiece, + IBlueprintRundownPieceDB, } from '../documents/index.js' import type { IBlueprintShowStyleVariant, IOutputLayer, ISourceLayer } from '../showStyle.js' import type { TSR, OnGenerateTimelineObj, TimelineObjectCoreExt } from '../timeline.js' @@ -266,6 +268,7 @@ export interface BlueprintResultRundown { rundown: IBlueprintRundown globalAdLibPieces: IBlueprintAdLibPiece[] globalActions: IBlueprintActionManifest[] + globalPieces: IBlueprintRundownPiece[] baseline: BlueprintResultBaseline } export interface BlueprintResultSegment { @@ -292,6 +295,11 @@ export interface BlueprintSyncIngestNewData { actions: IBlueprintActionManifest[] /** A list of adlibs that have pieceInstances in the partInstance in question */ referencedAdlibs: IBlueprintAdLibPieceDB[] + /** + * The list of pieces which belong to the Rundown, and may be active + * Note: Some of these may have played and been stopped before the current PartInstance + */ + rundownPieces: IBlueprintRundownPieceDB[] } // TODO: add something like this later? diff --git a/packages/blueprints-integration/src/documents/index.ts b/packages/blueprints-integration/src/documents/index.ts index d635e43be4..895a42ebae 100644 --- a/packages/blueprints-integration/src/documents/index.ts +++ b/packages/blueprints-integration/src/documents/index.ts @@ -7,5 +7,6 @@ export * from './pieceInstance.js' export * from './pieceGeneric.js' export * from './playlistTiming.js' export * from './rundown.js' +export * from './rundownPiece.js' export * from './rundownPlaylist.js' export * from './segment.js' diff --git a/packages/blueprints-integration/src/documents/part.ts b/packages/blueprints-integration/src/documents/part.ts index a45a5f8310..09e88c44e6 100644 --- a/packages/blueprints-integration/src/documents/part.ts +++ b/packages/blueprints-integration/src/documents/part.ts @@ -1,6 +1,7 @@ import { UserEditingDefinition, UserEditingProperties } from '../userEditing.js' import type { NoteSeverity } from '../lib.js' import type { ITranslatableMessage } from '../translations.js' +import type { IngestPartNotifyItemReady } from '@sofie-automation/shared-lib/dist/ingest/rundownStatus' /** Timings for the inTransition, when supported and allowed */ export interface IBlueprintPartInTransition { @@ -58,9 +59,18 @@ export interface IBlueprintMutatablePart * it will trigger a user edit operation of type DefaultUserOperationEditProperties */ userEditProperties?: UserEditingProperties + /** * Whether to stop this piece before the 'keepalive' period of the part */ diff --git a/packages/blueprints-integration/src/documents/rundownPiece.ts b/packages/blueprints-integration/src/documents/rundownPiece.ts new file mode 100644 index 0000000000..f84a786f15 --- /dev/null +++ b/packages/blueprints-integration/src/documents/rundownPiece.ts @@ -0,0 +1,29 @@ +import { IBlueprintPieceGeneric } from './pieceGeneric.js' + +/** + * A variant of a Piece, that is owned by the Rundown. + * This + */ +export interface IBlueprintRundownPiece + extends Omit, 'lifespan'> { + /** When the piece should be active on the timeline. */ + enable: { + start: number + duration?: number + + // For now, these pieces are always absolute (using wall time) rather than relative to the rundown + isAbsolute: true + } + + /** Whether the piece is a real piece, or exists as a marker to stop an infinite piece. If virtual, it does not add any contents to the timeline */ + virtual?: boolean + + /** Whether the piece affects the output of the Studio or is describing an invisible state within the Studio */ + notInVision?: boolean +} + +/** The Rundown piece sent from Core */ +export interface IBlueprintRundownPieceDB + extends IBlueprintRundownPiece { + _id: string +} diff --git a/packages/blueprints-integration/src/previews.ts b/packages/blueprints-integration/src/previews.ts index 038461a359..fbfb31ac65 100644 --- a/packages/blueprints-integration/src/previews.ts +++ b/packages/blueprints-integration/src/previews.ts @@ -1,4 +1,4 @@ -import { SplitsContentBoxContent, SplitsContentBoxProperties } from './content.js' +import { SourceLayerType, SplitsContentBoxContent, SplitsContentBoxProperties } from './content.js' import { NoteSeverity } from './lib.js' import { ITranslatableMessage } from './translations.js' @@ -6,6 +6,10 @@ export interface PopupPreview

{ name?: string preview?: P warnings?: InvalidPreview[] + /** + * Add custom content preview content + */ + additionalPreviewContent?: Array } export type Previews = TablePreview | ScriptPreview | HTMLPreview | SplitPreview | VTPreview | BlueprintImagePreview @@ -19,6 +23,55 @@ export enum PreviewType { BlueprintImage = 'blueprintImage', } +// The PreviewContent types are a partly replica of the types in PreviewPopUpContext.tsx +export type PreviewContent = + | { + type: 'iframe' + href: string + postMessage?: any + dimensions?: { width: number; height: number } + } + | { + type: 'image' + src: string + } + | { + type: 'video' + src: string + } + | { + type: 'script' + script?: string + firstWords?: string + lastWords?: string + comment?: string + lastModified?: number + } + | { + type: 'title' + content: string + } + | { + type: 'inOutWords' + in?: string + out: string + } + | { + type: 'layerInfo' + layerType: SourceLayerType + text: Array + inTime?: number | string + outTime?: number | string + duration?: number | string + } + | { + type: 'separationLine' + } + | { + type: 'data' + content: { key: string; value: string }[] + } + interface PreviewBase { type: PreviewType } diff --git a/packages/corelib/src/dataModel/ExpectedPackageWorkStatuses.ts b/packages/corelib/src/dataModel/ExpectedPackageWorkStatuses.ts index 68f682ebdf..8411fb5791 100644 --- a/packages/corelib/src/dataModel/ExpectedPackageWorkStatuses.ts +++ b/packages/corelib/src/dataModel/ExpectedPackageWorkStatuses.ts @@ -1,6 +1,5 @@ import { ExpectedPackageStatusAPI, Time } from '@sofie-automation/blueprints-integration' -import { ExpectedPackageDBBase } from './ExpectedPackages.js' -import { ExpectedPackageWorkStatusId, PeripheralDeviceId } from './Ids.js' +import { ExpectedPackageId, ExpectedPackageWorkStatusId, PeripheralDeviceId, StudioId } from './Ids.js' /** * ExpectedPackageWorkStatus contains statuses about Work that is being performed on expected packages @@ -10,7 +9,7 @@ import { ExpectedPackageWorkStatusId, PeripheralDeviceId } from './Ids.js' export interface ExpectedPackageWorkStatus extends Omit { _id: ExpectedPackageWorkStatusId - studioId: ExpectedPackageDBBase['studioId'] + studioId: StudioId fromPackages: ExpectedPackageWorkStatusFromPackage[] /** Which PeripheralDevice this update came from */ @@ -20,5 +19,5 @@ export interface ExpectedPackageWorkStatus extends Omit { - id: ExpectedPackageDBBase['_id'] + id: ExpectedPackageId } diff --git a/packages/corelib/src/dataModel/ExpectedPackages.ts b/packages/corelib/src/dataModel/ExpectedPackages.ts index 1597ed1d45..ca32b6f132 100644 --- a/packages/corelib/src/dataModel/ExpectedPackages.ts +++ b/packages/corelib/src/dataModel/ExpectedPackages.ts @@ -1,6 +1,6 @@ import { ExpectedPackage, Time } from '@sofie-automation/blueprints-integration' -import { protectString } from '../protectedString.js' -import { getHash, hashObj } from '../lib.js' +import { protectString, unprotectString } from '../protectedString.js' +import { getHash, assertNever } from '../lib.js' import { AdLibActionId, BucketAdLibActionId, @@ -18,7 +18,7 @@ import { import { ReadonlyDeep } from 'type-fest' /* - Expected Packages are created from Pieces in the rundown. + Expected Packages are created from Pieces and other content in the rundown. A "Package" is a generic term for a "thing that can be played", such as media files, audio, graphics etc.. The blueprints generate Pieces with expectedPackages on them. These are then picked up by a Package Manager who then tries to fullfill the expectations. @@ -26,49 +26,59 @@ import { ReadonlyDeep } from 'type-fest' The Package Manager will then copy the file to the right place. */ -export type ExpectedPackageFromRundown = ExpectedPackageDBFromPiece | ExpectedPackageDBFromAdLibAction - -export type ExpectedPackageFromRundownBaseline = - | ExpectedPackageDBFromBaselineAdLibAction - | ExpectedPackageDBFromBaselineAdLibPiece - | ExpectedPackageDBFromRundownBaselineObjects - -export type ExpectedPackageDBFromBucket = ExpectedPackageDBFromBucketAdLib | ExpectedPackageDBFromBucketAdLibAction - -export type ExpectedPackageDB = - | ExpectedPackageFromRundown - | ExpectedPackageDBFromBucket - | ExpectedPackageFromRundownBaseline - | ExpectedPackageDBFromStudioBaselineObjects - export enum ExpectedPackageDBType { PIECE = 'piece', ADLIB_PIECE = 'adlib_piece', ADLIB_ACTION = 'adlib_action', BASELINE_ADLIB_PIECE = 'baseline_adlib_piece', BASELINE_ADLIB_ACTION = 'baseline_adlib_action', + BASELINE_PIECE = 'baseline_piece', BUCKET_ADLIB = 'bucket_adlib', BUCKET_ADLIB_ACTION = 'bucket_adlib_action', RUNDOWN_BASELINE_OBJECTS = 'rundown_baseline_objects', STUDIO_BASELINE_OBJECTS = 'studio_baseline_objects', } -export interface ExpectedPackageDBBase extends Omit { - _id: ExpectedPackageId - /** The local package id - as given by the blueprints */ - blueprintPackageId: string + +export interface ExpectedPackageDB { + _id: ExpectedPackageId // derived from rundownId and hash of `package` /** The studio of the Rundown of the Piece this package belongs to */ studioId: StudioId - /** Hash that changes whenever the content or version changes. See getContentVersionHash() */ - contentVersionHash: string - - // pieceId: ProtectedString | null - fromPieceType: ExpectedPackageDBType + /** The rundown this package belongs to, if any. Must not be set when bucketId is set */ + rundownId: RundownId | null + /** The bucket this package belongs to, if any. Must not be set when rundownId is set */ + bucketId: BucketId | null created: Time + + package: ReadonlyDeep + + // HACK: This should be ExpectedPackageIngestSource[], but for the first iteration this is limited to a single source + ingestSources: [ExpectedPackageIngestSource] + + // playoutSources: { + // /** Any playout PieceInstance. This is limited to the current and next partInstances */ + // pieceInstanceIds: PieceInstanceId[] + // } +} + +export interface ExpectedPackageIngestSourceBucketPiece { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB + /** The Bucket adlib this package belongs to */ + pieceId: BucketAdLibId + /** The `externalId` of the Bucket adlib this package belongs to */ + pieceExternalId: string +} +export interface ExpectedPackageIngestSourceBucketAdlibAction { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION + /** The Bucket adlib-action this package belongs to */ + pieceId: BucketAdLibActionId + /** The `externalId` of the Bucket adlib-action this package belongs to */ + pieceExternalId: string } -export interface ExpectedPackageDBFromPiece extends ExpectedPackageDBBase { + +export interface ExpectedPackageIngestSourcePiece { fromPieceType: ExpectedPackageDBType.PIECE | ExpectedPackageDBType.ADLIB_PIECE /** The Piece this package belongs to */ pieceId: PieceId @@ -76,86 +86,123 @@ export interface ExpectedPackageDBFromPiece extends ExpectedPackageDBBase { partId: PartId /** The Segment this package belongs to */ segmentId: SegmentId - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId } - -export interface ExpectedPackageDBFromBaselineAdLibPiece extends ExpectedPackageDBBase { - fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_PIECE - /** The Piece this package belongs to */ - pieceId: PieceId - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId -} - -export interface ExpectedPackageDBFromAdLibAction extends ExpectedPackageDBBase { +export interface ExpectedPackageIngestSourceAdlibAction { fromPieceType: ExpectedPackageDBType.ADLIB_ACTION - /** The Adlib Action this package belongs to */ + /** The Piece this package belongs to */ pieceId: AdLibActionId /** The Part this package belongs to */ partId: PartId /** The Segment this package belongs to */ segmentId: SegmentId - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId } -export interface ExpectedPackageDBFromBaselineAdLibAction extends ExpectedPackageDBBase { +export interface ExpectedPackageIngestSourceBaselinePiece { + fromPieceType: ExpectedPackageDBType.BASELINE_PIECE + /** The Piece this package belongs to */ + pieceId: PieceId +} +export interface ExpectedPackageIngestSourceBaselineAdlibPiece { + fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_PIECE + /** The Piece this package belongs to */ + pieceId: PieceId +} +export interface ExpectedPackageIngestSourceBaselineAdlibAction { fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_ACTION /** The Piece this package belongs to */ pieceId: RundownBaselineAdLibActionId - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId } - -export interface ExpectedPackageDBFromRundownBaselineObjects extends ExpectedPackageDBBase { +export interface ExpectedPackageIngestSourceBaselineObjects { fromPieceType: ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS - /** The rundown of the Piece this package belongs to */ - rundownId: RundownId - pieceId: null -} -export interface ExpectedPackageDBFromStudioBaselineObjects extends ExpectedPackageDBBase { - fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS - pieceId: null -} - -export interface ExpectedPackageDBFromBucketAdLib extends ExpectedPackageDBBase { - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB - bucketId: BucketId - /** The Bucket adlib this package belongs to */ - pieceId: BucketAdLibId - /** The `externalId` of the Bucket adlib this package belongs to */ - pieceExternalId: string -} -export interface ExpectedPackageDBFromBucketAdLibAction extends ExpectedPackageDBBase { - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION - bucketId: BucketId - /** The Bucket adlib-action this package belongs to */ - pieceId: BucketAdLibActionId - /** The `externalId` of the Bucket adlib-action this package belongs to */ - pieceExternalId: string } -export function getContentVersionHash(expectedPackage: ReadonlyDeep>): string { - return hashObj({ - content: expectedPackage.content, - version: expectedPackage.version, - // todo: should expectedPackage.sources.containerId be here as well? - }) +export interface ExpectedPackageIngestSourceStudioBaseline { + // Future: Technically this is a playout source, but for now it needs to be treated as an ingest source + fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS } -export function getExpectedPackageId( +export type ExpectedPackageIngestSourcePart = ExpectedPackageIngestSourcePiece | ExpectedPackageIngestSourceAdlibAction + +export type ExpectedPackageIngestSourceBucket = + | ExpectedPackageIngestSourceBucketPiece + | ExpectedPackageIngestSourceBucketAdlibAction + +export type ExpectedPackageIngestSourceRundownBaseline = + | ExpectedPackageIngestSourceBaselinePiece + | ExpectedPackageIngestSourceBaselineAdlibPiece + | ExpectedPackageIngestSourceBaselineAdlibAction + | ExpectedPackageIngestSourceBaselineObjects + +export type ExpectedPackageIngestSource = + | ExpectedPackageIngestSourcePart + | ExpectedPackageIngestSourceRundownBaseline + | ExpectedPackageIngestSourceBucket + | ExpectedPackageIngestSourceStudioBaseline + +/** + * Generate the expectedPackageId for the given piece instance. + * Note: This will soon be replaced with a new flow based on the contentVersionHash once shared ownership is implemented. + */ +export function getExpectedPackageIdForPieceInstance( /** _id of the owner (the piece, adlib etc..) */ - ownerId: - | PieceId - | PieceInstanceId - | AdLibActionId - | RundownBaselineAdLibActionId - | BucketAdLibId - | BucketAdLibActionId - | RundownId - | StudioId, + ownerId: PieceInstanceId, /** The locally unique id of the expectedPackage */ localExpectedPackageId: ExpectedPackage.Base['_id'] ): ExpectedPackageId { return protectString(`${ownerId}_${getHash(localExpectedPackageId)}`) } + +/** + * Generate the temporary expectedPackageId for the given package. + * Note: This will soon be replaced with a new flow based on the contentVersionHash once shared ownership is implemented. + */ +export function getExpectedPackageIdFromIngestSource( + /** Preferably a RundownId or BucketId, but StudioId is allowed when not owned by a rundown or bucket */ + parentId: RundownId | StudioId | BucketId, + source: ExpectedPackageIngestSource, + /** The locally unique id of the expectedPackage */ + localExpectedPackageId: ExpectedPackage.Base['_id'] +): ExpectedPackageId { + let ownerId: string + const ownerPieceType = source.fromPieceType + switch (source.fromPieceType) { + case ExpectedPackageDBType.PIECE: + case ExpectedPackageDBType.ADLIB_PIECE: + case ExpectedPackageDBType.ADLIB_ACTION: + case ExpectedPackageDBType.BASELINE_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: + ownerId = unprotectString(source.pieceId) + break + case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: + ownerId = 'rundownBaselineObjects' + break + case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: + ownerId = 'studioBaseline' + break + case ExpectedPackageDBType.BUCKET_ADLIB: + case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + ownerId = unprotectString(source.pieceId) + break + + default: + assertNever(source) + throw new Error(`Unknown fromPieceType "${ownerPieceType}"`) + } + return protectString(`${parentId}_${ownerId}_${getHash(localExpectedPackageId)}`) +} + +// Future implementation of id generation, once shared ownership is implemented +// export function getExpectedPackageIdNew( +// /** _id of the rundown*/ +// rundownId: RundownId, +// /** The locally unique id of the expectedPackage */ +// expectedPackage: ReadonlyDeep +// ): ExpectedPackageId { +// // This may be too agressive, but we don't know how to merge some of the properties +// const objHash = hashObj({ +// ...expectedPackage, +// listenToPackageInfoUpdates: false, // Not relevant for the hash +// } satisfies ReadonlyDeep) + +// return protectString(`${rundownId}_${getHash(objHash)}`) +// } diff --git a/packages/corelib/src/dataModel/Old/ExpectedPackagesR52.ts b/packages/corelib/src/dataModel/Old/ExpectedPackagesR52.ts new file mode 100644 index 0000000000..7e74aadc4b --- /dev/null +++ b/packages/corelib/src/dataModel/Old/ExpectedPackagesR52.ts @@ -0,0 +1,137 @@ +import type { ExpectedPackage, Time } from '@sofie-automation/blueprints-integration' +import type { + AdLibActionId, + BucketAdLibActionId, + BucketAdLibId, + BucketId, + ExpectedPackageId, + PartId, + PieceId, + RundownBaselineAdLibActionId, + RundownId, + SegmentId, + StudioId, +} from '../Ids.js' + +/** + * Warning: This is a snapshot of the ExpectedPackage interface from before the rework in R53. + * This should not be modified and should only be used in code performing fixup operations. + */ + +/* + Expected Packages are created from Pieces in the rundown. + A "Package" is a generic term for a "thing that can be played", such as media files, audio, graphics etc.. + The blueprints generate Pieces with expectedPackages on them. + These are then picked up by a Package Manager who then tries to fullfill the expectations. + Example: An ExpectedPackage could be a "Media file to be present on the location used by a playout device". + The Package Manager will then copy the file to the right place. +*/ + +export type ExpectedPackageFromRundown = ExpectedPackageDBFromPiece | ExpectedPackageDBFromAdLibAction + +export type ExpectedPackageFromRundownBaseline = + | ExpectedPackageDBFromBaselineAdLibAction + | ExpectedPackageDBFromBaselineAdLibPiece + | ExpectedPackageDBFromRundownBaselineObjects + +export type ExpectedPackageDBFromBucket = ExpectedPackageDBFromBucketAdLib | ExpectedPackageDBFromBucketAdLibAction + +export type ExpectedPackageDB = + | ExpectedPackageFromRundown + | ExpectedPackageDBFromBucket + | ExpectedPackageFromRundownBaseline + | ExpectedPackageDBFromStudioBaselineObjects + +export enum ExpectedPackageDBType { + PIECE = 'piece', + ADLIB_PIECE = 'adlib_piece', + ADLIB_ACTION = 'adlib_action', + BASELINE_ADLIB_PIECE = 'baseline_adlib_piece', + BASELINE_ADLIB_ACTION = 'baseline_adlib_action', + BUCKET_ADLIB = 'bucket_adlib', + BUCKET_ADLIB_ACTION = 'bucket_adlib_action', + RUNDOWN_BASELINE_OBJECTS = 'rundown_baseline_objects', + STUDIO_BASELINE_OBJECTS = 'studio_baseline_objects', +} +export interface ExpectedPackageDBBase extends Omit { + _id: ExpectedPackageId + /** The local package id - as given by the blueprints */ + blueprintPackageId: string + + /** The studio of the Rundown of the Piece this package belongs to */ + studioId: StudioId + + /** Hash that changes whenever the content or version changes. See getContentVersionHash() */ + contentVersionHash: string + + // pieceId: ProtectedString | null + fromPieceType: ExpectedPackageDBType + + created: Time +} +export interface ExpectedPackageDBFromPiece extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.PIECE | ExpectedPackageDBType.ADLIB_PIECE + /** The Piece this package belongs to */ + pieceId: PieceId + /** The Part this package belongs to */ + partId: PartId + /** The Segment this package belongs to */ + segmentId: SegmentId + /** The rundown of the Piece this package belongs to */ + rundownId: RundownId +} + +export interface ExpectedPackageDBFromBaselineAdLibPiece extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_PIECE + /** The Piece this package belongs to */ + pieceId: PieceId + /** The rundown of the Piece this package belongs to */ + rundownId: RundownId +} + +export interface ExpectedPackageDBFromAdLibAction extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.ADLIB_ACTION + /** The Adlib Action this package belongs to */ + pieceId: AdLibActionId + /** The Part this package belongs to */ + partId: PartId + /** The Segment this package belongs to */ + segmentId: SegmentId + /** The rundown of the Piece this package belongs to */ + rundownId: RundownId +} +export interface ExpectedPackageDBFromBaselineAdLibAction extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_ACTION + /** The Piece this package belongs to */ + pieceId: RundownBaselineAdLibActionId + /** The rundown of the Piece this package belongs to */ + rundownId: RundownId +} + +export interface ExpectedPackageDBFromRundownBaselineObjects extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS + /** The rundown of the Piece this package belongs to */ + rundownId: RundownId + pieceId: null +} +export interface ExpectedPackageDBFromStudioBaselineObjects extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS + pieceId: null +} + +export interface ExpectedPackageDBFromBucketAdLib extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB + bucketId: BucketId + /** The Bucket adlib this package belongs to */ + pieceId: BucketAdLibId + /** The `externalId` of the Bucket adlib this package belongs to */ + pieceExternalId: string +} +export interface ExpectedPackageDBFromBucketAdLibAction extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION + bucketId: BucketId + /** The Bucket adlib-action this package belongs to */ + pieceId: BucketAdLibActionId + /** The `externalId` of the Bucket adlib-action this package belongs to */ + pieceExternalId: string +} diff --git a/packages/corelib/src/dataModel/PackageInfos.ts b/packages/corelib/src/dataModel/PackageInfos.ts index 879875be8a..4305aacd26 100644 --- a/packages/corelib/src/dataModel/PackageInfos.ts +++ b/packages/corelib/src/dataModel/PackageInfos.ts @@ -1,6 +1,5 @@ import { PackageInfo, Time } from '@sofie-automation/blueprints-integration' import { protectString } from '../protectedString.js' -import { ExpectedPackageDB } from './ExpectedPackages.js' import { ExpectedPackageId, PackageInfoId, PeripheralDeviceId, StudioId } from './Ids.js' /** @@ -14,7 +13,7 @@ export interface PackageInfoDB extends PackageInfo.Base { /** Reference to the Package this document has info about */ packageId: ExpectedPackageId /** Reference to the contentVersionHash of the ExpectedPackage, used to reference the expected content+version of the Package */ - expectedContentVersionHash: ExpectedPackageDB['contentVersionHash'] + expectedContentVersionHash: string /** Referring to the actual contentVersionHash of the Package, used to reference the exact content+version of the Package */ actualContentVersionHash: string diff --git a/packages/corelib/src/dataModel/Piece.ts b/packages/corelib/src/dataModel/Piece.ts index 6afc05519f..def89318e9 100644 --- a/packages/corelib/src/dataModel/Piece.ts +++ b/packages/corelib/src/dataModel/Piece.ts @@ -53,6 +53,15 @@ export interface PieceGeneric extends Omit { export interface Piece extends PieceGeneric, Omit { + /** Timeline enabler. When the piece should be active on the timeline. */ + enable: { + start: number | 'now' // TODO - now will be removed from this eventually, but as it is not an acceptable value 99% of the time, that is not really breaking + duration?: number + + // Pieces owned by the Rundown should always be absolute + isAbsolute?: boolean + } + /** * This is the id of the rundown this piece starts playing in. * Currently this is the only rundown the piece could be playing in @@ -62,12 +71,12 @@ export interface Piece * This is the id of the segment this piece starts playing in. * It is the only segment the piece could be playing in, unless the piece has a lifespan which spans beyond the segment */ - startSegmentId: SegmentId + startSegmentId: SegmentId | null /** * This is the id of the part this piece starts playing in. * If the lifespan is WithinPart, it is the only part the piece could be playing in. */ - startPartId: PartId + startPartId: PartId | null /** Whether this piece is a special piece */ pieceType: IBlueprintPieceType diff --git a/packages/corelib/src/dataModel/PieceInstance.ts b/packages/corelib/src/dataModel/PieceInstance.ts index c21d3716ca..b7f2e3f67a 100644 --- a/packages/corelib/src/dataModel/PieceInstance.ts +++ b/packages/corelib/src/dataModel/PieceInstance.ts @@ -34,7 +34,7 @@ export interface PieceInstance { _id: PieceInstanceId /** The rundown this piece belongs to */ rundownId: RundownId - /** The part instace this piece belongs to */ + /** The part instance this piece belongs to. */ partInstanceId: PartInstanceId /** Whether this PieceInstance is a temprorary wrapping of a Piece */ diff --git a/packages/corelib/src/dataModel/Rundown.ts b/packages/corelib/src/dataModel/Rundown.ts index 61b1159eb9..a3f7a3a38a 100644 --- a/packages/corelib/src/dataModel/Rundown.ts +++ b/packages/corelib/src/dataModel/Rundown.ts @@ -57,9 +57,6 @@ export interface Rundown { */ orphaned?: RundownOrphanedReason - /** Last sent storyStatus to ingestDevice (MOS) */ - notifiedCurrentPlayingPartExternalId?: string - /** Holds notes (warnings / errors) thrown by the blueprints during creation */ notes?: Array diff --git a/packages/corelib/src/playout/__tests__/processAndPrune.test.ts b/packages/corelib/src/playout/__tests__/processAndPrune.test.ts index 582223f75c..9ad5cdf4c0 100644 --- a/packages/corelib/src/playout/__tests__/processAndPrune.test.ts +++ b/packages/corelib/src/playout/__tests__/processAndPrune.test.ts @@ -5,6 +5,8 @@ import { PieceInstance, PieceInstancePiece, ResolvedPieceInstance } from '../../ import { literal } from '../../lib.js' import { protectString } from '../../protectedString.js' import { + createPartCurrentTimes, + PartCurrentTimes, PieceInstanceWithTimings, processAndPrunePieceInstanceTimings, resolvePrunedPieceInstance, @@ -44,7 +46,7 @@ describe('processAndPrunePieceInstanceTimings', () => { }) } - function runAndTidyResult(pieceInstances: PieceInstance[], nowInPart: number, includeVirtual?: boolean) { + function runAndTidyResult(pieceInstances: PieceInstance[], partTimes: PartCurrentTimes, includeVirtual?: boolean) { const resolvedInstances = processAndPrunePieceInstanceTimings( { one: { @@ -61,7 +63,7 @@ describe('processAndPrunePieceInstanceTimings', () => { }, }, pieceInstances, - nowInPart, + partTimes, undefined, includeVirtual ) @@ -79,7 +81,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('two', { start: 1000 }, 'two', PieceLifespan.OutOnRundownEnd), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'one', @@ -101,7 +103,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('two', { start: 1000, duration: 5000 }, 'one', PieceLifespan.OutOnRundownEnd), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'one', @@ -127,7 +129,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('five', { start: 4000 }, 'one', PieceLifespan.OutOnShowStyleEnd), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'zero', @@ -177,7 +179,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('zero', { start: 6000 }, 'one', PieceLifespan.OutOnShowStyleEnd, true), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'zero', @@ -209,7 +211,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('five', { start: 6000 }, 'one', PieceLifespan.OutOnShowStyleEnd, true), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500, true) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0), true) expect(resolvedInstances).toEqual([ { _id: 'zero', @@ -259,7 +261,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('five', { start: 6000 }, 'one', PieceLifespan.OutOnShowStyleEnd), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'zero', @@ -305,7 +307,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('two', { start: 1000 }, 'one', PieceLifespan.OutOnSegmentEnd, 5500), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'one', @@ -323,7 +325,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('four', { start: 1000 }, 'one', PieceLifespan.OutOnRundownChange, 4000), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'three', @@ -339,7 +341,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('two', { start: 1000 }, 'one', PieceLifespan.OutOnShowStyleEnd, 5500), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'one', @@ -366,7 +368,7 @@ describe('processAndPrunePieceInstanceTimings', () => { }), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'one', @@ -399,7 +401,7 @@ describe('processAndPrunePieceInstanceTimings', () => { }), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'two', @@ -427,7 +429,7 @@ describe('processAndPrunePieceInstanceTimings', () => { pieceInstances[1].piece.virtual = true - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) // don't expect virtual Pieces in the results, but 'one' should be pruned too expect(resolvedInstances).toEqual([]) @@ -457,7 +459,7 @@ describe('processAndPrunePieceInstanceTimings', () => { pieceInstances[0].piece.prerollDuration = 200 pieceInstances[1].piece.prerollDuration = 200 - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { @@ -468,6 +470,100 @@ describe('processAndPrunePieceInstanceTimings', () => { }, ]) }) + + describe('absolute timed (rundown owned) pieces', () => { + test('simple collision', () => { + const now = 9000 + const partStart = 8000 + + const pieceInstances = [ + createPieceInstance('one', { start: 0 }, 'one', PieceLifespan.OutOnRundownChange), + createPieceInstance( + 'two', + { start: now + 2000, isAbsolute: true }, + 'one', + PieceLifespan.OutOnRundownChange + ), + createPieceInstance('three', { start: 6000 }, 'one', PieceLifespan.OutOnRundownChange), + ] + + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(now, partStart)) + expect(resolvedInstances).toEqual([ + { + _id: 'one', + priority: 5, + start: 0, + end: 3000, + }, + { + _id: 'two', + priority: 5, + start: partStart + 3000, + end: partStart + 6000, + }, + { + _id: 'three', + priority: 5, + start: 6000, + end: undefined, + }, + ]) + }) + + test('collision with same start time', () => { + const now = 9000 + const partStart = 8000 + + const pieceInstances = [ + createPieceInstance('one', { start: 0 }, 'one', PieceLifespan.OutOnRundownChange), + createPieceInstance( + 'two', + { start: partStart + 2000, isAbsolute: true }, + 'one', + PieceLifespan.OutOnRundownChange + ), + createPieceInstance('three', { start: 2000 }, 'one', PieceLifespan.OutOnRundownChange), + ] + + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(now, partStart)) + expect(resolvedInstances).toEqual([ + { + _id: 'one', + priority: 5, + start: 0, + end: 2000, + }, + { + _id: 'two', + priority: 5, + start: partStart + 2000, + end: undefined, + }, + ]) + + { + // check stability + pieceInstances[1].piece.enable = { start: 2000 } + pieceInstances[2].piece.enable = { start: partStart + 2000, isAbsolute: true } + + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(now, partStart)) + expect(resolvedInstances).toEqual([ + { + _id: 'one', + priority: 5, + start: 0, + end: 2000, + }, + { + _id: 'three', + priority: 5, + start: partStart + 2000, + end: undefined, + }, + ]) + } + }) + }) }) describe('resolvePrunedPieceInstances', () => { @@ -503,10 +599,10 @@ describe('resolvePrunedPieceInstances', () => { } test('numeric start, no duration', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 2000 }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, resolvedStart: 2000, @@ -515,10 +611,10 @@ describe('resolvePrunedPieceInstances', () => { }) test('numeric start, with planned duration', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 2000, duration: 3400 }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, resolvedStart: 2000, @@ -527,127 +623,127 @@ describe('resolvePrunedPieceInstances', () => { }) test('now start, no duration', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now' }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, + resolvedStart: partTimes.nowInPart, resolvedDuration: undefined, } satisfies ResolvedPieceInstance) }) test('now start, with planned duration', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now', duration: 3400 }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, + resolvedStart: partTimes.nowInPart, resolvedDuration: 3400, } satisfies ResolvedPieceInstance) }) test('now start, with end cap', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now' }, 5000) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, - resolvedDuration: 5000 - nowInPart, + resolvedStart: partTimes.nowInPart, + resolvedDuration: 5000 - partTimes.nowInPart, } satisfies ResolvedPieceInstance) }) test('now start, with end cap and longer planned duration', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now', duration: 6000 }, 5000) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, - resolvedDuration: 5000 - nowInPart, + resolvedStart: partTimes.nowInPart, + resolvedDuration: 5000 - partTimes.nowInPart, } satisfies ResolvedPieceInstance) }) test('now start, with end cap and shorter planned duration', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now', duration: 3000 }, 5000) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, + resolvedStart: partTimes.nowInPart, resolvedDuration: 3000, } satisfies ResolvedPieceInstance) }) test('now start, with userDuration.endRelativeToPart', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now' }, undefined, { endRelativeToPart: 4000, }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, - resolvedDuration: 4000 - nowInPart, + resolvedStart: partTimes.nowInPart, + resolvedDuration: 4000 - partTimes.nowInPart, } satisfies ResolvedPieceInstance) }) test('numeric start, with userDuration.endRelativeToNow', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 500 }, undefined, { endRelativeToNow: 4000, }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, resolvedStart: 500, - resolvedDuration: 4000 - 500 + nowInPart, + resolvedDuration: 4000 - 500 + partTimes.nowInPart, } satisfies ResolvedPieceInstance) }) test('now start, with userDuration.endRelativeToNow', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now' }, undefined, { endRelativeToNow: 4000, }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, + resolvedStart: partTimes.nowInPart, resolvedDuration: 4000, } satisfies ResolvedPieceInstance) }) test('now start, with end cap, planned duration and userDuration.endRelativeToPart', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now', duration: 3000 }, 5000, { endRelativeToPart: 2800 }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, - resolvedDuration: 2800 - nowInPart, + resolvedStart: partTimes.nowInPart, + resolvedDuration: 2800 - partTimes.nowInPart, } satisfies ResolvedPieceInstance) }) test('now start, with end cap, planned duration and userDuration.endRelativeToNow', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now', duration: 3000 }, 5000, { endRelativeToNow: 2800 }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, + resolvedStart: partTimes.nowInPart, resolvedDuration: 2800, } satisfies ResolvedPieceInstance) }) diff --git a/packages/corelib/src/playout/infinites.ts b/packages/corelib/src/playout/infinites.ts index ccda649691..9d136974d4 100644 --- a/packages/corelib/src/playout/infinites.ts +++ b/packages/corelib/src/playout/infinites.ts @@ -206,6 +206,7 @@ export function getPlayheadTrackingInfinitesForPart( case PieceLifespan.OutOnSegmentEnd: isValid = currentPartInstance.segmentId === intoPart.segmentId && + !!candidatePiece.piece.startPartId && partsToReceiveOnSegmentEndFromSet.has(candidatePiece.piece.startPartId) break case PieceLifespan.OutOnRundownEnd: @@ -243,13 +244,16 @@ export function getPlayheadTrackingInfinitesForPart( markPieceInstanceAsContinuation(p, instance) if (p.infinite) { - // This was copied from before, so we know we can force the time to 0 - instance.piece = { - ...instance.piece, - enable: { - start: 0, - }, + if (!instance.piece.enable.isAbsolute) { + // This was copied from before, so we know we can force the time to 0 + instance.piece = { + ...instance.piece, + enable: { + start: 0, + }, + } } + instance.infinite = { ...p.infinite, infiniteInstanceIndex: p.infinite.infiniteInstanceIndex + 1, @@ -299,11 +303,16 @@ export function isPiecePotentiallyActiveInPart( return false case PieceLifespan.OutOnSegmentEnd: return ( + !!pieceToCheck.startPartId && pieceToCheck.startSegmentId === part.segmentId && partsToReceiveOnSegmentEndFrom.has(pieceToCheck.startPartId) ) case PieceLifespan.OutOnRundownEnd: - if (pieceToCheck.startRundownId === part.rundownId) { + if ( + pieceToCheck.startRundownId === part.rundownId && + pieceToCheck.startPartId && + pieceToCheck.startSegmentId + ) { if (pieceToCheck.startSegmentId === part.segmentId) { return partsToReceiveOnSegmentEndFrom.has(pieceToCheck.startPartId) } else { @@ -320,6 +329,7 @@ export function isPiecePotentiallyActiveInPart( } else { // Predicting what will happen at arbitrary point in the future return ( + !!pieceToCheck.startPartId && pieceToCheck.startSegmentId === part.segmentId && partsToReceiveOnSegmentEndFrom.has(pieceToCheck.startPartId) ) @@ -332,6 +342,7 @@ export function isPiecePotentiallyActiveInPart( } else { // Predicting what will happen at arbitrary point in the future return ( + !!pieceToCheck.startSegmentId && pieceToCheck.startRundownId === part.rundownId && segmentsToReceiveOnRundownEndFrom.has(pieceToCheck.startSegmentId) ) @@ -395,8 +406,8 @@ export function getPieceInstancesForPart( if (pieceA.startPartId === pieceB.startPartId) { return pieceA.enable.start < pieceB.enable.start } - const pieceAIndex = orderedPartIds.indexOf(pieceA.startPartId) - const pieceBIndex = orderedPartIds.indexOf(pieceB.startPartId) + const pieceAIndex = pieceA.startPartId === null ? -2 : orderedPartIds.indexOf(pieceA.startPartId) + const pieceBIndex = pieceB.startPartId === null ? -2 : orderedPartIds.indexOf(pieceB.startPartId) if (pieceAIndex === -1) { return false @@ -535,6 +546,16 @@ export function isCandidateMoreImportant( best: ReadonlyDeep, candidate: ReadonlyDeep ): boolean | undefined { + // If one is absolute timed, prefer that + if (best.piece.enable.isAbsolute && !candidate.piece.enable.isAbsolute) { + // Prefer the absolute best + return false + } + if (!best.piece.enable.isAbsolute && candidate.piece.enable.isAbsolute) { + // Prefer the absolute candidate + return true + } + // Prioritise the one from this part over previous part if (best.infinite?.fromPreviousPart && !candidate.infinite?.fromPreviousPart) { // Prefer the candidate as it is not from previous diff --git a/packages/corelib/src/playout/processAndPrune.ts b/packages/corelib/src/playout/processAndPrune.ts index 5fac61e44b..239c8a96ae 100644 --- a/packages/corelib/src/playout/processAndPrune.ts +++ b/packages/corelib/src/playout/processAndPrune.ts @@ -10,12 +10,49 @@ import { ReadonlyDeep } from 'type-fest' /** * Get the `enable: { start: ?? }` for the new piece in terms that can be used as an `end` for another object */ -function getPieceStartTimeAsReference(newPieceStart: number | 'now'): number | RelativeResolvedEndCap { - return typeof newPieceStart === 'number' ? newPieceStart : { offsetFromNow: 0 } +function getPieceStartTimeAsReference( + newPieceStart: number | 'now', + partTimes: PartCurrentTimes, + pieceToAffect: ReadonlyDeep +): number | RelativeResolvedEndCap { + if (typeof newPieceStart !== 'number') return { offsetFromNow: 0 } + + if (pieceToAffect.piece.enable.isAbsolute) { + // If the piece is absolute timed, then the end needs to be adjusted to be absolute + if (pieceToAffect.piece.enable.start === 'now') { + return { offsetFromNow: newPieceStart } + } else { + // Translate to an absolute timestamp + return partTimes.currentTime - partTimes.nowInPart + newPieceStart + } + } + + return newPieceStart } -function getPieceStartTimeWithinPart(p: ReadonlyDeep): 'now' | number { - return p.piece.enable.start +function getPieceStartTimeWithinPart(p: ReadonlyDeep, partTimes: PartCurrentTimes): 'now' | number { + const pieceEnable = p.piece.enable + if (pieceEnable.isAbsolute) { + // Note: these can't be adlibbed, so we don't need to consider adding the preroll + + if (pieceEnable.start === 'now') { + // Should never happen, but just in case + return pieceEnable.start + } else { + // Translate this to the part + return pieceEnable.start - partTimes.currentTime + partTimes.nowInPart + } + } + + // If the piece is dynamically inserted, then its preroll should be factored into its start time, but not for any infinite continuations + const isStartOfAdlib = + !!p.dynamicallyInserted && !(p.infinite?.fromPreviousPart || p.infinite?.fromPreviousPlayhead) + + if (isStartOfAdlib && pieceEnable.start !== 'now') { + return pieceEnable.start + (p.piece.prerollDuration ?? 0) + } else { + return pieceEnable.start + } } function isClear(piece?: ReadonlyDeep): boolean { @@ -51,24 +88,43 @@ export interface PieceInstanceWithTimings extends ReadonlyDeep { * This is a maximum end point of the pieceInstance. * If the pieceInstance also has a enable.duration or userDuration set then the shortest one will need to be used * This can be: - * - 'now', if it was stopped by something that does not need a preroll (or is virtual) - * - '#something.start + 100', if it was stopped by something that needs a preroll - * - '100', if not relative to now at all + * - '100', if relative to the start of the part + * - { offsetFromNow: 100 }, if stopped by an absolute time */ resolvedEndCap?: number | RelativeResolvedEndCap priority: number } +export interface PartCurrentTimes { + /** The current time when this was sampled */ + readonly currentTime: number + /** The time the part started playback, if it has begun */ + readonly partStartTime: number | null + /** An approximate current time within the part */ + readonly nowInPart: number +} + +export function createPartCurrentTimes( + currentTime: number, + partStartTime: number | undefined | null +): PartCurrentTimes { + return { + currentTime, + partStartTime: partStartTime ?? null, + nowInPart: typeof partStartTime === 'number' ? currentTime - partStartTime : 0, + } +} + /** * Process the infinite pieces to determine the start time and a maximum end time for each. * Any pieces which have no chance of being shown (duplicate start times) are pruned * The stacking order of infinites is considered, to define the stop times - * Note: `nowInPart` is only needed to order the PieceInstances. The result of this can be cached until that order changes + * Note: `nowInPart` is only needed to order the PieceInstances. The result of this can be cached until that order changes. */ export function processAndPrunePieceInstanceTimings( sourceLayers: SourceLayers, pieces: ReadonlyDeep, - nowInPart: number, + partTimes: PartCurrentTimes, keepDisabledPieces?: boolean, includeVirtual?: boolean ): PieceInstanceWithTimings[] { @@ -82,7 +138,7 @@ export function processAndPrunePieceInstanceTimings( } } - const groupedPieces = groupByToMapFunc( + const piecesGroupedByExclusiveGroupOrLayer = groupByToMapFunc( keepDisabledPieces ? pieces : pieces.filter((p) => !p.disabled), // At this stage, if a Piece is disabled, the `keepDisabledPieces` must be turned on. If that's the case // we split out the disabled Pieces onto the sourceLayerId they actually exist on, instead of putting them @@ -91,13 +147,16 @@ export function processAndPrunePieceInstanceTimings( (p) => p.disabled ? p.piece.sourceLayerId : exclusiveGroupMap.get(p.piece.sourceLayerId) || p.piece.sourceLayerId ) - for (const pieces of groupedPieces.values()) { - // Group and sort the pieces so that we can step through each point in time + for (const piecesInExclusiveGroupOrLayer of piecesGroupedByExclusiveGroupOrLayer.values()) { + // Group and sort the pieces so that we can step through each point in time in order + const piecesByStartMap = groupByToMapFunc(piecesInExclusiveGroupOrLayer, (p) => + getPieceStartTimeWithinPart(p, partTimes) + ) const piecesByStart: Array<[number | 'now', ReadonlyDeep]> = _.sortBy( - Array.from(groupByToMapFunc(pieces, (p) => getPieceStartTimeWithinPart(p)).entries()).map(([k, v]) => + Array.from(piecesByStartMap.entries()).map(([k, v]) => literal<[number | 'now', ReadonlyDeep]>([k === 'now' ? 'now' : Number(k), v]) ), - ([k]) => (k === 'now' ? nowInPart : k) + ([k]) => (k === 'now' ? partTimes.nowInPart : k) ) // Step through time @@ -107,10 +166,34 @@ export function processAndPrunePieceInstanceTimings( // Apply the updates // Note: order is important, the higher layers must be done first - updateWithNewPieces(results, activePieces, newPieces, newPiecesStart, includeVirtual, 'other') - updateWithNewPieces(results, activePieces, newPieces, newPiecesStart, includeVirtual, 'onSegmentEnd') - updateWithNewPieces(results, activePieces, newPieces, newPiecesStart, includeVirtual, 'onRundownEnd') - updateWithNewPieces(results, activePieces, newPieces, newPiecesStart, includeVirtual, 'onShowStyleEnd') + updateWithNewPieces(results, partTimes, activePieces, newPieces, newPiecesStart, includeVirtual, 'other') + updateWithNewPieces( + results, + partTimes, + activePieces, + newPieces, + newPiecesStart, + includeVirtual, + 'onSegmentEnd' + ) + updateWithNewPieces( + results, + partTimes, + activePieces, + newPieces, + newPiecesStart, + includeVirtual, + 'onRundownEnd' + ) + updateWithNewPieces( + results, + partTimes, + activePieces, + newPieces, + newPiecesStart, + includeVirtual, + 'onShowStyleEnd' + ) } } @@ -119,6 +202,7 @@ export function processAndPrunePieceInstanceTimings( } function updateWithNewPieces( results: PieceInstanceWithTimings[], + partTimes: PartCurrentTimes, activePieces: PieceInstanceOnInfiniteLayers, newPieces: PieceInstanceOnInfiniteLayers, newPiecesStart: number | 'now', @@ -129,7 +213,7 @@ function updateWithNewPieces( if (newPiece) { const activePiece = activePieces[key] if (activePiece) { - activePiece.resolvedEndCap = getPieceStartTimeAsReference(newPiecesStart) + activePiece.resolvedEndCap = getPieceStartTimeAsReference(newPiecesStart, partTimes, activePiece) } // track the new piece activePieces[key] = newPiece @@ -154,7 +238,11 @@ function updateWithNewPieces( (newPiecesStart !== 0 || isCandidateBetterToBeContinued(activePieces.other, newPiece)) ) { // These modes should stop the 'other' when they start if not hidden behind a higher priority onEnd - activePieces.other.resolvedEndCap = getPieceStartTimeAsReference(newPiecesStart) + activePieces.other.resolvedEndCap = getPieceStartTimeAsReference( + newPiecesStart, + partTimes, + activePieces.other + ) activePieces.other = undefined } } @@ -221,21 +309,25 @@ function findPieceInstancesOnInfiniteLayers(pieces: ReadonlyDeep expectedMediaItems: Array expectedPlayoutItems: Array - expectedPackages: Array + expectedPackages: Array // Note: when reading, this could be in the old format timeline?: TimelineComplete } diff --git a/packages/corelib/src/worker/events.ts b/packages/corelib/src/worker/events.ts index 2c1eb31a8f..9efbb50a96 100644 --- a/packages/corelib/src/worker/events.ts +++ b/packages/corelib/src/worker/events.ts @@ -3,7 +3,6 @@ import { PartInstanceId, RundownId, RundownPlaylistId, StudioId } from '../dataM export enum EventsJobs { PartInstanceTimings = 'partInstanceTimings', RundownDataChanged = 'rundownDataChanged', - NotifyCurrentlyPlayingPart = 'notifyCurrentlyPlayingPart', } export interface PartInstanceTimingsProps { @@ -16,12 +15,6 @@ export interface RundownDataChangedProps { rundownId: RundownId } -export interface NotifyCurrentlyPlayingPartProps { - rundownId: RundownId - isRehearsal: boolean - partExternalId: string | null -} - /** * Set of valid functions, of form: * `id: (data) => return` @@ -29,7 +22,6 @@ export interface NotifyCurrentlyPlayingPartProps { export type EventsJobFunc = { [EventsJobs.PartInstanceTimings]: (data: PartInstanceTimingsProps) => void [EventsJobs.RundownDataChanged]: (data: RundownDataChangedProps) => void - [EventsJobs.NotifyCurrentlyPlayingPart]: (data: NotifyCurrentlyPlayingPartProps) => void } export function getEventsQueueName(id: StudioId): string { diff --git a/packages/corelib/src/worker/ingest.ts b/packages/corelib/src/worker/ingest.ts index 3e27a13bc1..ad2c081939 100644 --- a/packages/corelib/src/worker/ingest.ts +++ b/packages/corelib/src/worker/ingest.ts @@ -104,10 +104,6 @@ export enum IngestJobs { */ MosSwapStory = 'mosSwapStory', - /** - * Debug: Regenerate ExpectedPackages for a Rundown - */ - ExpectedPackagesRegenerate = 'expectedPackagesRegenerate', /** * Some PackageInfos have been updated, regenerate any Parts which depend on these PackageInfos */ @@ -229,9 +225,6 @@ export interface MosSwapStoryProps extends IngestPropsBase { story1: MOS.IMOSString128 } -export interface ExpectedPackagesRegenerateProps { - rundownId: RundownId -} export interface PackageInfosUpdatedRundownProps extends IngestPropsBase { packageIds: ExpectedPackageId[] } @@ -312,7 +305,6 @@ export type IngestJobFunc = { [IngestJobs.MosMoveStory]: (data: MosMoveStoryProps) => void [IngestJobs.MosSwapStory]: (data: MosSwapStoryProps) => void - [IngestJobs.ExpectedPackagesRegenerate]: (data: ExpectedPackagesRegenerateProps) => void [IngestJobs.PackageInfosUpdatedRundown]: (data: PackageInfosUpdatedRundownProps) => void [IngestJobs.UserRemoveRundown]: (data: UserRemoveRundownProps) => void diff --git a/packages/job-worker/src/__mocks__/context.ts b/packages/job-worker/src/__mocks__/context.ts index 0a96d7aa02..d11c3c5431 100644 --- a/packages/job-worker/src/__mocks__/context.ts +++ b/packages/job-worker/src/__mocks__/context.ts @@ -356,6 +356,7 @@ const MockShowStyleBlueprint: () => ShowStyleBlueprintManifest = () => ({ rundown, globalAdLibPieces: [], globalActions: [], + globalPieces: [], baseline: { timelineObjects: [] }, } }, diff --git a/packages/job-worker/src/blueprints/__tests__/context-events.test.ts b/packages/job-worker/src/blueprints/__tests__/context-events.test.ts index ba5e7ac60b..67324f3e72 100644 --- a/packages/job-worker/src/blueprints/__tests__/context-events.test.ts +++ b/packages/job-worker/src/blueprints/__tests__/context-events.test.ts @@ -273,6 +273,7 @@ describe('Test blueprint api context', () => { rundownId, })) as PieceInstance expect(pieceInstance).toBeTruthy() + expect(pieceInstance.partInstanceId).toBe(partInstance._id) // Check what was generated const context = await getContext(rundown, undefined, partInstance, undefined) diff --git a/packages/job-worker/src/blueprints/context/lib.ts b/packages/job-worker/src/blueprints/context/lib.ts index b1bd5dea2b..b413c2c6d0 100644 --- a/packages/job-worker/src/blueprints/context/lib.ts +++ b/packages/job-worker/src/blueprints/context/lib.ts @@ -2,7 +2,11 @@ import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibActio import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' -import { deserializePieceTimelineObjectsBlob, PieceGeneric } from '@sofie-automation/corelib/dist/dataModel/Piece' +import { + deserializePieceTimelineObjectsBlob, + Piece, + PieceGeneric, +} from '@sofie-automation/corelib/dist/dataModel/Piece' import { PieceInstance, PieceInstancePiece, @@ -39,6 +43,7 @@ import { IBlueprintPieceInstance, IBlueprintResolvedPieceInstance, IBlueprintRundownDB, + IBlueprintRundownPieceDB, IBlueprintRundownPlaylist, IBlueprintSegmentDB, IBlueprintSegmentRundown, @@ -64,6 +69,7 @@ import { } from '@sofie-automation/blueprints-integration/dist/userEditing' import type { PlayoutMutatablePart } from '../../playout/model/PlayoutPartInstanceModel.js' import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration/dist/context/quickLoopInfo' +import { IngestPartNotifyItemReady } from '@sofie-automation/shared-lib/dist/ingest/rundownStatus' /** * Convert an object to have all the values of all keys (including optionals) be 'true' @@ -119,6 +125,9 @@ export const PlayoutMutatablePartSampleKeys = allKeysOfObject return obj } +/** + * Convert a Rundown owned Piece into IBlueprintAdLibPieceDB, for passing into the blueprints + * Note: This does not check whether has the correct ownership + * @param piece the Piece to convert + * @returns a cloned complete and clean IBlueprintRundownPieceDB + */ +export function convertRundownPieceToBlueprints(piece: ReadonlyDeep): IBlueprintRundownPieceDB { + const obj: Complete = { + ...convertPieceGenericToBlueprintsInner(piece), + _id: unprotectString(piece._id), + enable: { + ...piece.enable, + start: piece.enable.start === 'now' ? 0 : piece.enable.start, + isAbsolute: true, + }, + virtual: piece.virtual, + notInVision: piece.notInVision, + } + return obj +} + /** * Convert a DBPart into IBlueprintPartDB, for passing into the blueprints * @param part the Part to convert @@ -280,6 +310,9 @@ export function convertPartToBlueprints(part: ReadonlyDeep): IBlueprintP expectedDuration: part.expectedDuration, holdMode: part.holdMode, shouldNotifyCurrentPlayingPart: part.shouldNotifyCurrentPlayingPart, + ingestNotifyPartExternalId: part.ingestNotifyPartExternalId, + ingestNotifyPartReady: part.ingestNotifyPartReady, + ingestNotifyItemsReady: clone(part.ingestNotifyItemsReady), classes: clone(part.classes), classesForNext: clone(part.classesForNext), displayDurationGroup: part.displayDurationGroup, @@ -666,6 +699,7 @@ export function convertPartialBlueprintMutablePartToCore( return playoutUpdatePart } + export function createBlueprintQuickLoopInfo(playlist: ReadonlyDeep): BlueprintQuickLookInfo | null { const playlistLoopProps = playlist.quickLoop if (!playlistLoopProps) return null diff --git a/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts b/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts index 2e183391ae..f18e2e3a0c 100644 --- a/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts +++ b/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts @@ -255,6 +255,9 @@ export class PartAndPieceInstanceActionService { }) if (!pieceDB) throw new Error(`Cannot find Piece ${piece._id}`) + if (!pieceDB.startPartId || !pieceDB.startSegmentId) + throw new Error(`Piece ${piece._id} does not belong to a part`) + const rundown = this._playoutModel.getRundown(pieceDB.startRundownId) const segment = rundown?.getSegment(pieceDB.startSegmentId) const part = segment?.getPart(pieceDB.startPartId) @@ -534,6 +537,7 @@ export async function applyActionSideEffects( await syncPlayheadInfinitesForNextPartInstance( context, playoutModel, + undefined, playoutModel.currentPartInstance, playoutModel.nextPartInstance ) diff --git a/packages/job-worker/src/blueprints/context/services/__tests__/PartAndPieceInstanceActionService.test.ts b/packages/job-worker/src/blueprints/context/services/__tests__/PartAndPieceInstanceActionService.test.ts index ebee49813b..823082a7bb 100644 --- a/packages/job-worker/src/blueprints/context/services/__tests__/PartAndPieceInstanceActionService.test.ts +++ b/packages/job-worker/src/blueprints/context/services/__tests__/PartAndPieceInstanceActionService.test.ts @@ -58,6 +58,7 @@ import { postProcessPieces, postProcessTimelineObjects } from '../../../postProc import { ActionPartChange, PartAndPieceInstanceActionService } from '../PartAndPieceInstanceActionService.js' import { mock } from 'jest-mock-extended' import { QuickLoopService } from '../../../../playout/model/services/QuickLoopService.js' +import { SelectedPartInstance } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' const { postProcessPieces: postProcessPiecesOrig, postProcessTimelineObjects: postProcessTimelineObjectsOrig } = jest.requireActual('../../../postProcess') @@ -238,7 +239,9 @@ describe('Test blueprint api context', () => { nextPartInstance: PlayoutPartInstanceModel | DBPartInstance | PieceInstance | undefined | null, previousPartInstance?: PlayoutPartInstanceModel | DBPartInstance | PieceInstance | null ) { - const convertInfo = (info: PlayoutPartInstanceModel | DBPartInstance | PieceInstance | null) => { + const convertInfo = ( + info: PlayoutPartInstanceModel | DBPartInstance | PieceInstance | null + ): SelectedPartInstance | null => { if (!info) { return null } else if ('partInstanceId' in info) { diff --git a/packages/job-worker/src/blueprints/context/watchedPackages.ts b/packages/job-worker/src/blueprints/context/watchedPackages.ts index 29d1e8901f..edf97c9377 100644 --- a/packages/job-worker/src/blueprints/context/watchedPackages.ts +++ b/packages/job-worker/src/blueprints/context/watchedPackages.ts @@ -1,25 +1,25 @@ -import { - ExpectedPackageDB, - ExpectedPackageDBBase, - ExpectedPackageFromRundown, -} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { PackageInfoDB } from '@sofie-automation/corelib/dist/dataModel/PackageInfos' import { JobContext } from '../../jobs/index.js' -import { ExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { BucketId, ExpectedPackageId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { Filter as FilterQuery } from 'mongodb' import { PackageInfo } from '@sofie-automation/blueprints-integration' import { unprotectObjectArray } from '@sofie-automation/corelib/dist/protectedString' -import { ExpectedPackageForIngestModel, IngestModelReadonly } from '../../ingest/model/IngestModel.js' +import { IngestModelReadonly } from '../../ingest/model/IngestModel.js' import { ReadonlyDeep } from 'type-fest' +import type { IngestExpectedPackage } from '../../ingest/model/IngestExpectedPackage.js' +import type { ExpectedPackageIngestSource } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' /** * This is a helper class to simplify exposing packageInfo to various places in the blueprints */ export class WatchedPackagesHelper { - private readonly packages = new Map>() + private readonly packages = new Map< + ExpectedPackageId, + ReadonlyDeep> + >() private constructor( - packages: ReadonlyDeep, + packages: ReadonlyDeep[]>, private readonly packageInfos: ReadonlyDeep ) { for (const pkg of packages) { @@ -39,21 +39,41 @@ export class WatchedPackagesHelper { * @param studioId The studio this is for * @param filter A mongo query to specify the packages that should be included */ - static async create( + static async create( context: JobContext, - filter: FilterQuery> + rundownId: RundownId | null, + bucketId: BucketId | null, + filterIngestSources: FilterQuery ): Promise { // Load all the packages and the infos that are watched const watchedPackages = await context.directCollections.ExpectedPackages.findFetch({ - ...filter, studioId: context.studioId, - } as any) // TODO: don't use any here + rundownId: rundownId, + bucketId: bucketId, + ingestSources: { + $elemMatch: filterIngestSources, + }, + }) const watchedPackageInfos = await context.directCollections.PackageInfos.findFetch({ studioId: context.studioId, packageId: { $in: watchedPackages.map((p) => p._id) }, }) - return new WatchedPackagesHelper(watchedPackages, watchedPackageInfos) + const watchedIngestPackages: IngestExpectedPackage[] = watchedPackages.flatMap( + (expectedPackage) => { + // Split into a package per source + return expectedPackage.ingestSources.map( + (source) => + ({ + _id: expectedPackage._id, + package: expectedPackage.package, + source: source, + }) satisfies IngestExpectedPackage + ) + } + ) + + return new WatchedPackagesHelper(watchedIngestPackages, watchedPackageInfos) } /** @@ -65,7 +85,7 @@ export class WatchedPackagesHelper { context: JobContext, ingestModel: IngestModelReadonly ): Promise { - const packages: ReadonlyDeep[] = [] + const packages: ReadonlyDeep[] = [] packages.push(...ingestModel.expectedPackagesForRundownBaseline) @@ -77,7 +97,7 @@ export class WatchedPackagesHelper { return this.#createFromPackages( context, - packages.filter((pkg) => !!pkg.listenToPackageInfoUpdates) + packages.filter((pkg) => !!pkg.package.listenToPackageInfoUpdates) ) } @@ -92,7 +112,7 @@ export class WatchedPackagesHelper { ingestModel: IngestModelReadonly, segmentExternalIds: string[] ): Promise { - const packages: ReadonlyDeep[] = [] + const packages: ReadonlyDeep[] = [] for (const externalId of segmentExternalIds) { const segment = ingestModel.getSegmentByExternalId(externalId) @@ -105,11 +125,11 @@ export class WatchedPackagesHelper { return this.#createFromPackages( context, - packages.filter((pkg) => !!pkg.listenToPackageInfoUpdates) + packages.filter((pkg) => !!pkg.package.listenToPackageInfoUpdates) ) } - static async #createFromPackages(context: JobContext, packages: ReadonlyDeep[]) { + static async #createFromPackages(context: JobContext, packages: ReadonlyDeep[]) { // Load all the packages and the infos that are watched const watchedPackageInfos = packages.length > 0 @@ -127,8 +147,11 @@ export class WatchedPackagesHelper { * This is useful so that all the data for a rundown can be loaded at the start of an ingest operation, and then subsets can be taken for particular blueprint methods without needing to do more db operations. * @param func A filter to check if each package should be included */ - filter(_context: JobContext, func: (pkg: ReadonlyDeep) => boolean): WatchedPackagesHelper { - const watchedPackages: ReadonlyDeep[] = [] + filter( + _context: JobContext, + func: (pkg: ReadonlyDeep>) => boolean + ): WatchedPackagesHelper { + const watchedPackages: ReadonlyDeep>[] = [] for (const pkg of this.packages.values()) { if (func(pkg)) watchedPackages.push(pkg) } @@ -139,13 +162,13 @@ export class WatchedPackagesHelper { return new WatchedPackagesHelper(watchedPackages, watchedPackageInfos) } - getPackage(packageId: ExpectedPackageId): ReadonlyDeep | undefined { - return this.packages.get(packageId) + hasPackage(packageId: ExpectedPackageId): boolean { + return this.packages.has(packageId) } getPackageInfo(packageId: string): Readonly> { for (const pkg of this.packages.values()) { - if (pkg.blueprintPackageId === packageId) { + if (pkg.package._id === packageId) { const info = this.packageInfos.filter((p) => p.packageId === pkg._id) return unprotectObjectArray(info) } diff --git a/packages/job-worker/src/blueprints/postProcess.ts b/packages/job-worker/src/blueprints/postProcess.ts index 5ec47e2f4f..5c17bb1a3c 100644 --- a/packages/job-worker/src/blueprints/postProcess.ts +++ b/packages/job-worker/src/blueprints/postProcess.ts @@ -13,6 +13,7 @@ import { PieceLifespan, IBlueprintPieceType, ITranslatableMessage, + IBlueprintRundownPiece, } from '@sofie-automation/blueprints-integration' import { BlueprintId, @@ -358,6 +359,85 @@ export function postProcessAdLibActions( }) } +/** + * Process and validate some IBlueprintRundownPiece into Piece + * @param context Context from the job queue + * @param pieces IBlueprintPiece to process + * @param blueprintId Id of the Blueprint the Pieces are from + * @param rundownId Id of the Rundown the Pieces belong to + * @param setInvalid If true all Pieces will be marked as `invalid`, this should be set to match the owning Part + */ +export function postProcessGlobalPieces( + context: JobContext, + pieces: Array, + blueprintId: BlueprintId, + rundownId: RundownId, + setInvalid?: boolean +): Piece[] { + const span = context.startSpan('blueprints.postProcess.postProcessPieces') + + const uniqueIds = new Map() + const timelineUniqueIds = new Set() + + const processedPieces = pieces.map((orgPiece: IBlueprintRundownPiece) => { + if (!orgPiece.externalId) + throw new Error( + `Error in blueprint "${blueprintId}" externalId not set for rundown piece ("${orgPiece.name}")` + ) + + const docId = getIdHash( + 'Piece', + uniqueIds, + `${rundownId}_${blueprintId}_rundown_piece_${orgPiece.sourceLayerId}_${orgPiece.externalId}` + ) + + const piece: Piece = { + ...orgPiece, + content: omit(orgPiece.content, 'timelineObjects'), + + pieceType: IBlueprintPieceType.Normal, + lifespan: PieceLifespan.OutOnRundownChange, + + _id: protectString(docId), + startRundownId: rundownId, + startSegmentId: null, + startPartId: null, + invalid: setInvalid ?? false, + timelineObjectsString: EmptyPieceTimelineObjectsBlob, + } + + if (piece.pieceType !== IBlueprintPieceType.Normal) { + // transition pieces must not be infinite, lets enforce that + piece.lifespan = PieceLifespan.WithinPart + } + if (piece.extendOnHold) { + // HOLD pieces must not be infinite, as they become that when being held + piece.lifespan = PieceLifespan.WithinPart + } + + if (piece.enable.start === 'now') + throw new Error( + `Error in blueprint "${blueprintId}" rundown piece cannot have a start of 'now'! ("${piece.name}")` + ) + + const timelineObjects = postProcessTimelineObjects( + piece._id, + blueprintId, + orgPiece.content.timelineObjects, + timelineUniqueIds + ) + piece.timelineObjectsString = serializePieceTimelineObjectsBlob(timelineObjects) + + // Fill in ids of unnamed expectedPackages + setDefaultIdOnExpectedPackages(piece.expectedPackages) + + return piece + }) + + span?.end() + return processedPieces +} + /** * Process and validate TSRTimelineObj for the StudioBaseline into TimelineObjRundown * @param blueprintId Id of the Blueprint the TSRTimelineObj are from diff --git a/packages/job-worker/src/events/handle.ts b/packages/job-worker/src/events/handle.ts index 7103a6e800..10476bfe61 100644 --- a/packages/job-worker/src/events/handle.ts +++ b/packages/job-worker/src/events/handle.ts @@ -1,8 +1,4 @@ -import { - NotifyCurrentlyPlayingPartProps, - PartInstanceTimingsProps, - RundownDataChangedProps, -} from '@sofie-automation/corelib/dist/worker/events' +import { PartInstanceTimingsProps, RundownDataChangedProps } from '@sofie-automation/corelib/dist/worker/events' import { getCurrentTime } from '../lib/index.js' import { JobContext } from '../jobs/index.js' import { logger } from '../logging.js' @@ -17,16 +13,7 @@ import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyE import { ExternalMessageQueueObj } from '@sofie-automation/corelib/dist/dataModel/ExternalMessageQueue' import { ICollection, MongoModifier } from '../db/index.js' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { ExternalMessageQueueObjId, PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { runWithRundownLock } from '../ingest/lock.js' -import { - PeripheralDevice, - PeripheralDeviceCategory, - PeripheralDeviceType, -} from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { MOS } from '@sofie-automation/corelib' -import { executePeripheralDeviceFunction } from '../peripheralDevice.js' -import { DEFAULT_MOS_TIMEOUT_TIME } from '@sofie-automation/shared-lib/dist/core/constants' +import { ExternalMessageQueueObjId } from '@sofie-automation/corelib/dist/dataModel/Ids' async function getBlueprintAndDependencies(context: JobContext, rundown: ReadonlyDeep) { const pShowStyle = context.getShowStyleCompound(rundown.showStyleVariantId, rundown.showStyleBaseId) @@ -226,120 +213,3 @@ export async function handleRundownDataHasChanged(context: JobContext, data: Run logger.error(`Error in showStyleBlueprint.onRundownDataChangedEvent: ${stringifyError(err)}`) } } - -export async function handleNotifyCurrentlyPlayingPart( - context: JobContext, - data: NotifyCurrentlyPlayingPartProps -): Promise { - const rundown = await context.directCollections.Rundowns.findOne(data.rundownId) - if (!rundown) { - logger.warn(`Rundown "${data.rundownId} is missing. Skipping notifyCurrentPlayingPart`) - return - } - - if (rundown.source.type !== 'nrcs') { - logger.warn(`Rundown "${rundown._id} has no peripheralDevice. Skipping notifyCurrentPlayingPart`) - return - } - - const device = await context.directCollections.PeripheralDevices.findOne({ - _id: rundown.source.peripheralDeviceId, - // Future: we really should be constraining this to the studio, but that is often only defined on the parent of this device - // studioId: context.studioId, - parentDeviceId: { $exists: true }, - }) - if (!device || !device.parentDeviceId) { - logger.warn( - `PeripheralDevice "${rundown.source.peripheralDeviceId}" for Rundown "${rundown._id} not found. Skipping notifyCurrentPlayingPart` - ) - return - } - const parentDevice = await context.directCollections.PeripheralDevices.findOne({ - _id: device.parentDeviceId, - 'studioAndConfigId.studioId': context.studioId, - parentDeviceId: { $exists: false }, - }) - if (!parentDevice) { - logger.warn( - `PeripheralDevice "${rundown.source.peripheralDeviceId}" for Rundown "${rundown._id} not found. Skipping notifyCurrentPlayingPart` - ) - return - } - - const previousPlayingPartExternalId: string | null = rundown.notifiedCurrentPlayingPartExternalId || null - const currentPlayingPartExternalId: string | null = data.isRehearsal ? null : data.partExternalId - - // Lock the rundown so that we are allowed to write to it - // This is technically a bit of a race condition, but is really low risk and low impact if it does - await runWithRundownLock(context, rundown._id, async (rundown0) => { - if (rundown0) { - if (currentPlayingPartExternalId) { - await context.directCollections.Rundowns.update(rundown._id, { - $set: { - notifiedCurrentPlayingPartExternalId: currentPlayingPartExternalId, - }, - }) - } else { - await context.directCollections.Rundowns.update(rundown._id, { - $unset: { - notifiedCurrentPlayingPartExternalId: 1, - }, - }) - } - } - }) - - // TODO: refactor this to be non-mos centric - if (device.category === PeripheralDeviceCategory.INGEST && device.type === PeripheralDeviceType.MOS) { - // Note: rundown may not be up to date anymore - await notifyCurrentPlayingPartMOS( - context, - device, - rundown.externalId, - previousPlayingPartExternalId, - currentPlayingPartExternalId - ) - } -} - -async function notifyCurrentPlayingPartMOS( - context: JobContext, - peripheralDevice: PeripheralDevice, - rundownExternalId: string, - oldPlayingPartExternalId: string | null, - newPlayingPartExternalId: string | null -): Promise { - if (oldPlayingPartExternalId !== newPlayingPartExternalId) { - // New implementation 2022 only sends PLAY, never stop, after getting advice from AP - // Reason 1: NRK ENPS "sendt tid" (elapsed time) stopped working in ENPS 8/9 when doing STOP prior to PLAY - // Reason 2: there's a delay between the STOP (yellow line disappears) and PLAY (yellow line re-appears), which annoys the users - if (newPlayingPartExternalId) { - try { - await setStoryStatusMOS( - context, - peripheralDevice._id, - rundownExternalId, - newPlayingPartExternalId, - MOS.IMOSObjectStatus.PLAY - ) - } catch (error) { - logger.error(`Error in setStoryStatus PLAY: ${stringifyError(error)}`) - } - } - } -} - -async function setStoryStatusMOS( - context: JobContext, - deviceId: PeripheralDeviceId, - rundownExternalId: string, - storyId: string, - status: MOS.IMOSObjectStatus -): Promise { - logger.debug('setStoryStatus', { deviceId, externalId: rundownExternalId, storyId, status }) - return executePeripheralDeviceFunction(context, deviceId, DEFAULT_MOS_TIMEOUT_TIME + 1000, 'setStoryStatus', [ - rundownExternalId, - storyId, - status, - ]) -} diff --git a/packages/job-worker/src/ingest/__tests__/expectedPackages.test.ts b/packages/job-worker/src/ingest/__tests__/expectedPackages.test.ts index 075e8b5287..f6428f6090 100644 --- a/packages/job-worker/src/ingest/__tests__/expectedPackages.test.ts +++ b/packages/job-worker/src/ingest/__tests__/expectedPackages.test.ts @@ -6,7 +6,7 @@ import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { defaultPart, defaultPiece, defaultAdLibPiece } from '../../__mocks__/defaultCollectionObjects.js' import { LAYER_IDS } from '../../__mocks__/presetCollections.js' import { ExpectedPackage, PieceLifespan, VTContent } from '@sofie-automation/blueprints-integration' -import { updateExpectedPackagesForPartModel } from '../expectedPackages.js' +import { updateExpectedMediaAndPlayoutItemsForPartModel } from '../expectedPackages.js' import { MockJobContext, setupDefaultJobEnvironment } from '../../__mocks__/context.js' import { ReadonlyDeep } from 'type-fest' import { IngestPartModel } from '../model/IngestPartModel.js' @@ -120,7 +120,6 @@ describe('Expected Media Items', () => { test('Generates ExpectedPackages(/ExpectedMediaItems) for a Part', async () => { const setExpectedMediaItems = jest.fn() const setExpectedPlayoutItems = jest.fn() - const setExpectedPackages = jest.fn() const { part, pieces, adLibPieces } = getMockPartContent() @@ -135,16 +134,12 @@ describe('Expected Media Items', () => { setExpectedMediaItems, setExpectedPlayoutItems, - setExpectedPackages, setInvalid: function (_invalid: boolean): void { throw new Error('Function not implemented.') }, } - updateExpectedPackagesForPartModel(context, partModel) - - expect(setExpectedPackages).toHaveBeenCalledTimes(1) - expect(setExpectedPackages.mock.calls[0][0]).toHaveLength(4) + updateExpectedMediaAndPlayoutItemsForPartModel(context, partModel) expect(setExpectedPlayoutItems).toHaveBeenCalledTimes(1) expect(setExpectedPlayoutItems).toHaveBeenCalledWith([]) diff --git a/packages/job-worker/src/ingest/__tests__/syncChangesToPartInstance.test.ts b/packages/job-worker/src/ingest/__tests__/syncChangesToPartInstance.test.ts index 498802d0d0..f372c3830b 100644 --- a/packages/job-worker/src/ingest/__tests__/syncChangesToPartInstance.test.ts +++ b/packages/job-worker/src/ingest/__tests__/syncChangesToPartInstance.test.ts @@ -104,6 +104,7 @@ describe('SyncChangesToPartInstancesWorker', () => { return mock( { findPart: jest.fn(() => undefined), + getGlobalPieces: jest.fn(() => []), }, mockOptions ) @@ -338,6 +339,7 @@ describe('SyncChangesToPartInstancesWorker', () => { return mock( { findPart: jest.fn(() => undefined), + getGlobalPieces: jest.fn(() => []), }, mockOptions ) diff --git a/packages/job-worker/src/ingest/bucket/bucketAdlibs.ts b/packages/job-worker/src/ingest/bucket/bucketAdlibs.ts index 2fccb96cfc..dd19f14023 100644 --- a/packages/job-worker/src/ingest/bucket/bucketAdlibs.ts +++ b/packages/job-worker/src/ingest/bucket/bucketAdlibs.ts @@ -23,7 +23,6 @@ import { import { omit } from '@sofie-automation/corelib/dist/lib' import { BucketAdLib } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibPiece' import { BucketAdLibAction } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibAction' -import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { MongoQuery } from '../../db/index.js' export async function handleBucketRemoveAdlibPiece( @@ -41,7 +40,7 @@ export async function handleBucketRemoveAdlibPiece( await Promise.all([ context.directCollections.BucketAdLibPieces.remove({ _id: { $in: idsToUpdate } }), cleanUpExpectedMediaItemForBucketAdLibPiece(context, idsToUpdate), - cleanUpExpectedPackagesForBucketAdLibs(context, idsToUpdate), + cleanUpExpectedPackagesForBucketAdLibs(context, piece.bucketId, idsToUpdate), ]) } @@ -60,7 +59,7 @@ export async function handleBucketRemoveAdlibAction( await Promise.all([ context.directCollections.BucketAdLibActions.remove({ _id: { $in: idsToUpdate } }), cleanUpExpectedMediaItemForBucketAdLibActions(context, idsToUpdate), - cleanUpExpectedPackagesForBucketAdLibsActions(context, idsToUpdate), + cleanUpExpectedPackagesForBucketAdLibsActions(context, action.bucketId, idsToUpdate), ]) } @@ -73,12 +72,6 @@ export async function handleBucketEmpty(context: JobContext, data: BucketEmptyPr context.directCollections.ExpectedMediaItems.remove({ bucketId: id, studioId: context.studioId }), context.directCollections.ExpectedPackages.remove({ studioId: context.studioId, - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB, - bucketId: id, - }), - context.directCollections.ExpectedPackages.remove({ - studioId: context.studioId, - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION, bucketId: id, }), ]) diff --git a/packages/job-worker/src/ingest/bucket/import.ts b/packages/job-worker/src/ingest/bucket/import.ts index 1ae1d9c2ce..5698d5975f 100644 --- a/packages/job-worker/src/ingest/bucket/import.ts +++ b/packages/job-worker/src/ingest/bucket/import.ts @@ -161,7 +161,13 @@ async function regenerateBucketItemFromIngestInfo( if (!showStyleCompound) throw new Error(`Unable to create a ShowStyleCompound for ${showStyleBase._id}, ${showStyleVariant._id} `) - const rawAdlib = await generateBucketAdlibForVariant(context, blueprint, showStyleCompound, ingestInfo.payload) + const rawAdlib = await generateBucketAdlibForVariant( + context, + blueprint, + showStyleCompound, + bucketId, + ingestInfo.payload + ) if (rawAdlib) { const importVersions: RundownImportVersions = { @@ -238,7 +244,7 @@ async function regenerateBucketItemFromIngestInfo( ps.push( cleanUpExpectedMediaItemForBucketAdLibPiece(context, adlibIdsToRemoveArray), - cleanUpExpectedPackagesForBucketAdLibs(context, adlibIdsToRemoveArray), + cleanUpExpectedPackagesForBucketAdLibs(context, bucketId, adlibIdsToRemoveArray), context.directCollections.BucketAdLibPieces.remove({ _id: { $in: adlibIdsToRemoveArray } }) ) } @@ -247,7 +253,7 @@ async function regenerateBucketItemFromIngestInfo( ps.push( cleanUpExpectedMediaItemForBucketAdLibActions(context, actionIdsToRemoveArray), - cleanUpExpectedPackagesForBucketAdLibsActions(context, actionIdsToRemoveArray), + cleanUpExpectedPackagesForBucketAdLibsActions(context, bucketId, actionIdsToRemoveArray), context.directCollections.BucketAdLibActions.remove({ _id: { $in: actionIdsToRemoveArray } }) ) } @@ -258,17 +264,18 @@ async function generateBucketAdlibForVariant( context: JobContext, blueprint: ReadonlyDeep, showStyleCompound: ReadonlyDeep, + bucketId: BucketId, // pieceId: BucketAdLibId | BucketAdLibActionId, payload: IngestAdlib ): Promise { if (!blueprint.blueprint.getAdlibItem) return null - const watchedPackages = await WatchedPackagesHelper.create(context, { - // We don't know what the `pieceId` will be, but we do know the `externalId` - pieceExternalId: payload.externalId, + const watchedPackages = await WatchedPackagesHelper.create(context, null, bucketId, { fromPieceType: { $in: [ExpectedPackageDBType.BUCKET_ADLIB, ExpectedPackageDBType.BUCKET_ADLIB_ACTION], }, + // We don't know what the `pieceId` will be, but we do know the `externalId` + pieceExternalId: payload.externalId, }) const contextForVariant = new ShowStyleUserContext( diff --git a/packages/job-worker/src/ingest/commit.ts b/packages/job-worker/src/ingest/commit.ts index 8ea4d4680f..b7abb04a01 100644 --- a/packages/job-worker/src/ingest/commit.ts +++ b/packages/job-worker/src/ingest/commit.ts @@ -19,7 +19,7 @@ import { removeRundownFromDb, } from '../rundownPlaylists.js' import { ReadonlyDeep } from 'type-fest' -import { IngestModel, IngestModelReadonly } from './model/IngestModel.js' +import { IngestDatabasePersistedModel, IngestModel, IngestModelReadonly } from './model/IngestModel.js' import { JobContext } from '../jobs/index.js' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' @@ -40,7 +40,6 @@ import { PlayoutRundownModelImpl } from '../playout/model/implementation/Playout import { PlayoutSegmentModelImpl } from '../playout/model/implementation/PlayoutSegmentModelImpl.js' import { createPlayoutModelFromIngestModel } from '../playout/model/implementation/LoadPlayoutModel.js' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' -import { DatabasePersistedModel } from '../modelBase.js' import { updateSegmentIdsForAdlibbedPartInstances } from './commit/updateSegmentIdsForAdlibbedPartInstances.js' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { AnyBulkWriteOperation } from 'mongodb' @@ -64,7 +63,7 @@ interface PlaylistIdPair { */ export async function CommitIngestOperation( context: JobContext, - ingestModel: IngestModel & DatabasePersistedModel, + ingestModel: IngestModel & IngestDatabasePersistedModel, beforeRundown: ReadonlyDeep | undefined, beforePartMap: BeforeIngestOperationPartMap, data: ReadonlyDeep @@ -223,7 +222,7 @@ export async function CommitIngestOperation( ) // Start the save - const pSaveIngest = ingestModel.saveAllToDatabase() + const pSaveIngest = ingestModel.saveAllToDatabase(playlistLock) pSaveIngest.catch(() => null) // Ensure promise isn't reported as unhandled await validateAdlibTestingSegment(context, playoutModel) diff --git a/packages/job-worker/src/ingest/expectedMediaItems.ts b/packages/job-worker/src/ingest/expectedMediaItems.ts index 8deb2d9213..afc3b468d8 100644 --- a/packages/job-worker/src/ingest/expectedMediaItems.ts +++ b/packages/job-worker/src/ingest/expectedMediaItems.ts @@ -89,7 +89,7 @@ function generateExpectedMediaItemsFull( ...generateExpectedMediaItems( doc._id, { - partId: doc.startPartId, + partId: doc.startPartId ?? undefined, rundownId: doc.startRundownId, }, studioId, @@ -254,7 +254,7 @@ export async function updateExpectedMediaItemsForRundownBaseline( const expectedMediaItems = generateExpectedMediaItemsFull( context.studio._id, ingestModel.rundownId, - [], + ingestModel.getGlobalPieces(), baselineAdlibPieces, baselineAdlibActions ) diff --git a/packages/job-worker/src/ingest/expectedPackages.ts b/packages/job-worker/src/ingest/expectedPackages.ts index 0ded178dd1..e5f346cd7d 100644 --- a/packages/job-worker/src/ingest/expectedPackages.ts +++ b/packages/job-worker/src/ingest/expectedPackages.ts @@ -1,35 +1,12 @@ -import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' -import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { BucketAdLibAction } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibAction' import { BucketAdLib } from '@sofie-automation/corelib/dist/dataModel/BucketAdLibPiece' import { ExpectedPackageDBType, - ExpectedPackageDBFromPiece, - ExpectedPackageDBFromBaselineAdLibPiece, - ExpectedPackageDBFromAdLibAction, - ExpectedPackageDBFromBaselineAdLibAction, - ExpectedPackageDBFromBucketAdLib, - ExpectedPackageDBFromBucketAdLibAction, - ExpectedPackageDBBase, - ExpectedPackageDBFromRundownBaselineObjects, - ExpectedPackageDBFromStudioBaselineObjects, - getContentVersionHash, - getExpectedPackageId, - ExpectedPackageFromRundown, + ExpectedPackageDB, + getExpectedPackageIdFromIngestSource, + ExpectedPackageIngestSource, } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -import { - SegmentId, - RundownId, - AdLibActionId, - PieceId, - RundownBaselineAdLibActionId, - BucketAdLibActionId, - BucketAdLibId, - StudioId, -} from '@sofie-automation/corelib/dist/dataModel/Ids' -import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' -import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibAction' -import { RundownBaselineAdLibItem } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibPiece' +import { BucketId, BucketAdLibId, BucketAdLibActionId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { saveIntoDb } from '../db/changes.js' import { PlayoutModel } from '../playout/model/PlayoutModel.js' import { StudioPlayoutModel } from '../studio/model/StudioPlayoutModel.js' @@ -45,280 +22,90 @@ import { updateExpectedPlayoutItemsForRundownBaseline, } from './expectedPlayoutItems.js' import { JobContext, JobStudio } from '../jobs/index.js' -import { ExpectedPackageForIngestModelBaseline, IngestModel } from './model/IngestModel.js' +import { IngestModel } from './model/IngestModel.js' import { IngestPartModel } from './model/IngestPartModel.js' -import { clone } from '@sofie-automation/corelib/dist/lib' +import { clone, hashObj } from '@sofie-automation/corelib/dist/lib' -export function updateExpectedPackagesForPartModel(context: JobContext, part: IngestPartModel): void { +export function updateExpectedMediaAndPlayoutItemsForPartModel(context: JobContext, part: IngestPartModel): void { updateExpectedMediaItemsForPartModel(context, part) updateExpectedPlayoutItemsForPartModel(context, part) - - const expectedPackages: ExpectedPackageFromRundown[] = [ - ...generateExpectedPackagesForPiece( - context.studio, - part.part.rundownId, - part.part.segmentId, - part.pieces, - ExpectedPackageDBType.PIECE - ), - ...generateExpectedPackagesForPiece( - context.studio, - part.part.rundownId, - part.part.segmentId, - part.adLibPieces, - ExpectedPackageDBType.ADLIB_PIECE - ), - ...generateExpectedPackagesForAdlibAction( - context.studio, - part.part.rundownId, - part.part.segmentId, - part.adLibActions - ), - ] - - part.setExpectedPackages(expectedPackages) } -export async function updateExpectedPackagesForRundownBaseline( +export async function updateExpectedMediaAndPlayoutItemsForRundownBaseline( context: JobContext, ingestModel: IngestModel, - baseline: BlueprintResultBaseline | undefined, - forceBaseline = false + baseline: BlueprintResultBaseline | undefined ): Promise { await updateExpectedMediaItemsForRundownBaseline(context, ingestModel) await updateExpectedPlayoutItemsForRundownBaseline(context, ingestModel, baseline) +} - const expectedPackages: ExpectedPackageForIngestModelBaseline[] = [] - - const preserveTypesDuringSave = new Set() +function generateExpectedPackagesForBucketAdlib(studio: ReadonlyDeep, adlib: BucketAdLib) { + const packages: ExpectedPackageDB[] = [] - // Only regenerate the baseline types if they are already loaded into memory - // If the data isn't already loaded, then we haven't made any changes to the baseline adlibs - // This means we can skip regenerating them as it is guaranteed there will be no changes - const baselineAdlibPieceCache = forceBaseline - ? await ingestModel.rundownBaselineAdLibPieces.get() - : ingestModel.rundownBaselineAdLibPieces.getIfLoaded() - if (baselineAdlibPieceCache) { - expectedPackages.push( - ...generateExpectedPackagesForBaselineAdlibPiece( - context.studio, - ingestModel.rundownId, - baselineAdlibPieceCache - ) - ) - } else { - // We haven't regenerated anything, so preserve the values in the save - preserveTypesDuringSave.add(ExpectedPackageDBType.BASELINE_ADLIB_PIECE) - } - const baselineAdlibActionCache = forceBaseline - ? await ingestModel.rundownBaselineAdLibActions.get() - : ingestModel.rundownBaselineAdLibActions.getIfLoaded() - if (baselineAdlibActionCache) { - expectedPackages.push( - ...generateExpectedPackagesForBaselineAdlibAction( - context.studio, - ingestModel.rundownId, - baselineAdlibActionCache + if (adlib.expectedPackages) { + packages.push( + ...generateBucketExpectedPackages( + studio, + adlib.bucketId, + { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB, + pieceId: adlib._id, + pieceExternalId: adlib.externalId, + }, + adlib.expectedPackages ) ) - } else { - // We haven't regenerated anything, so preserve the values in the save - preserveTypesDuringSave.add(ExpectedPackageDBType.BASELINE_ADLIB_ACTION) - } - - if (baseline) { - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(baseline.expectedPackages) - - const bases = generateExpectedPackageBases( - context.studio, - ingestModel.rundownId, - baseline.expectedPackages ?? [] - ) - - expectedPackages.push( - ...bases.map((item): ExpectedPackageDBFromRundownBaselineObjects => { - return { - ...item, - fromPieceType: ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS, - rundownId: ingestModel.rundownId, - pieceId: null, - } - }) - ) - } else { - // We haven't regenerated anything, so preserve the values in the save - preserveTypesDuringSave.add(ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS) - } - - // Preserve anything existing - for (const expectedPackage of ingestModel.expectedPackagesForRundownBaseline) { - if (preserveTypesDuringSave.has(expectedPackage.fromPieceType)) { - expectedPackages.push(clone(expectedPackage)) - } } - ingestModel.setExpectedPackagesForRundownBaseline(expectedPackages) -} - -function generateExpectedPackagesForPiece( - studio: ReadonlyDeep, - rundownId: RundownId, - segmentId: SegmentId, - pieces: ReadonlyDeep[], - type: ExpectedPackageDBType.PIECE | ExpectedPackageDBType.ADLIB_PIECE -) { - const packages: ExpectedPackageDBFromPiece[] = [] - for (const piece of pieces) { - const partId = 'startPartId' in piece ? piece.startPartId : piece.partId - if (piece.expectedPackages && partId) { - const bases = generateExpectedPackageBases(studio, piece._id, piece.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - rundownId, - segmentId, - partId, - pieceId: piece._id, - fromPieceType: type, - }) - } - } - } - return packages -} -function generateExpectedPackagesForBaselineAdlibPiece( - studio: ReadonlyDeep, - rundownId: RundownId, - pieces: ReadonlyDeep -) { - const packages: ExpectedPackageDBFromBaselineAdLibPiece[] = [] - for (const piece of pieces) { - if (piece.expectedPackages) { - const bases = generateExpectedPackageBases(studio, piece._id, piece.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - rundownId, - pieceId: piece._id, - fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_PIECE, - }) - } - } - } - return packages -} -function generateExpectedPackagesForAdlibAction( - studio: ReadonlyDeep, - rundownId: RundownId, - segmentId: SegmentId, - actions: ReadonlyDeep -) { - const packages: ExpectedPackageDBFromAdLibAction[] = [] - for (const action of actions) { - if (action.expectedPackages) { - const bases = generateExpectedPackageBases(studio, action._id, action.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - rundownId, - segmentId, - partId: action.partId, - pieceId: action._id, - fromPieceType: ExpectedPackageDBType.ADLIB_ACTION, - }) - } - } - } - return packages -} -function generateExpectedPackagesForBaselineAdlibAction( - studio: ReadonlyDeep, - rundownId: RundownId, - actions: ReadonlyDeep -) { - const packages: ExpectedPackageDBFromBaselineAdLibAction[] = [] - for (const action of actions) { - if (action.expectedPackages) { - const bases = generateExpectedPackageBases(studio, action._id, action.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - rundownId, - pieceId: action._id, - fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_ACTION, - }) - } - } - } - return packages -} -function generateExpectedPackagesForBucketAdlib(studio: ReadonlyDeep, adlibs: BucketAdLib[]) { - const packages: ExpectedPackageDBFromBucketAdLib[] = [] - for (const adlib of adlibs) { - if (adlib.expectedPackages) { - const bases = generateExpectedPackageBases(studio, adlib._id, adlib.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - bucketId: adlib.bucketId, - pieceId: adlib._id, - pieceExternalId: adlib.externalId, - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB, - }) - } - } - } return packages } -function generateExpectedPackagesForBucketAdlibAction( - studio: ReadonlyDeep, - adlibActions: BucketAdLibAction[] -) { - const packages: ExpectedPackageDBFromBucketAdLibAction[] = [] - for (const action of adlibActions) { - if (action.expectedPackages) { - const bases = generateExpectedPackageBases(studio, action._id, action.expectedPackages) - for (const base of bases) { - packages.push({ - ...base, - bucketId: action.bucketId, +function generateExpectedPackagesForBucketAdlibAction(studio: ReadonlyDeep, action: BucketAdLibAction) { + const packages: ExpectedPackageDB[] = [] + + if (action.expectedPackages) { + packages.push( + ...generateBucketExpectedPackages( + studio, + action.bucketId, + { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION, pieceId: action._id, pieceExternalId: action.externalId, - fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION, - }) - } - } + }, + action.expectedPackages + ) + ) } + return packages } -function generateExpectedPackageBases( +function generateBucketExpectedPackages( studio: ReadonlyDeep, - ownerId: - | PieceId - | AdLibActionId - | RundownBaselineAdLibActionId - | BucketAdLibId - | BucketAdLibActionId - | RundownId - | StudioId, + bucketId: BucketId, + source: ExpectedPackageIngestSource, expectedPackages: ReadonlyDeep -) { - const bases: Omit[] = [] +): ExpectedPackageDB[] { + const bases: ExpectedPackageDB[] = [] for (let i = 0; i < expectedPackages.length; i++) { const expectedPackage = expectedPackages[i] const id = expectedPackage._id || '__unnamed' + i bases.push({ - ...clone(expectedPackage), - _id: getExpectedPackageId(ownerId, id), - blueprintPackageId: id, - contentVersionHash: getContentVersionHash(expectedPackage), + _id: getExpectedPackageIdFromIngestSource(bucketId, source, id), + package: { + ...clone(expectedPackage), + _id: id, + }, studioId: studio._id, - created: Date.now(), + rundownId: null, + bucketId: bucketId, + created: Date.now(), // This will be preserved during the `saveIntoDb` + ingestSources: [source], }) } + return bases } @@ -326,39 +113,102 @@ export async function updateExpectedPackagesForBucketAdLibPiece( context: JobContext, adlib: BucketAdLib ): Promise { - const packages = generateExpectedPackagesForBucketAdlib(context.studio, [adlib]) - - await saveIntoDb(context, context.directCollections.ExpectedPackages, { pieceId: adlib._id }, packages) + const packages = generateExpectedPackagesForBucketAdlib(context.studio, adlib) + + await saveIntoDb( + context, + context.directCollections.ExpectedPackages, + { + studioId: context.studioId, + bucketId: adlib.bucketId, + // Note: This assumes that there is only one ingest source for each piece + ingestSources: { + $elemMatch: { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB, + pieceId: adlib._id, + }, + }, + }, + packages, + { + beforeDiff: (obj, oldObj) => { + return { + ...obj, + // Preserve old created timestamp + created: oldObj.created, + } + }, + } + ) } export async function updateExpectedPackagesForBucketAdLibAction( context: JobContext, action: BucketAdLibAction ): Promise { - const packages = generateExpectedPackagesForBucketAdlibAction(context.studio, [action]) - - await saveIntoDb(context, context.directCollections.ExpectedPackages, { pieceId: action._id }, packages) + const packages = generateExpectedPackagesForBucketAdlibAction(context.studio, action) + + await saveIntoDb( + context, + context.directCollections.ExpectedPackages, + { + studioId: context.studioId, + bucketId: action.bucketId, + // Note: This assumes that there is only one ingest source for each piece + ingestSources: { + $elemMatch: { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION, + pieceId: action._id, + }, + }, + }, + packages, + { + beforeDiff: (obj, oldObj) => { + return { + ...obj, + // Preserve old created timestamp + created: oldObj.created, + } + }, + } + ) } + export async function cleanUpExpectedPackagesForBucketAdLibs( context: JobContext, + bucketId: BucketId, adLibIds: BucketAdLibId[] ): Promise { if (adLibIds.length > 0) { await context.directCollections.ExpectedPackages.remove({ - pieceId: { - $in: adLibIds, + studioId: context.studioId, + bucketId: bucketId, + // Note: This assumes that there is only one ingest source for each piece + ingestSources: { + $elemMatch: { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB, + pieceId: { $in: adLibIds }, + }, }, }) } } export async function cleanUpExpectedPackagesForBucketAdLibsActions( context: JobContext, + bucketId: BucketId, adLibIds: BucketAdLibActionId[] ): Promise { if (adLibIds.length > 0) { await context.directCollections.ExpectedPackages.remove({ - pieceId: { - $in: adLibIds, + studioId: context.studioId, + bucketId: bucketId, + // Note: This assumes that there is only one ingest source for each piece + ingestSources: { + $elemMatch: { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION, + pieceId: { $in: adLibIds }, + }, }, }) } @@ -371,19 +221,7 @@ export function updateBaselineExpectedPackagesOnStudio( ): void { updateBaselineExpectedPlayoutItemsOnStudio(context, playoutModel, baseline.expectedPlayoutItems ?? []) - // Fill in ids of unnamed expectedPackages - setDefaultIdOnExpectedPackages(baseline.expectedPackages) - - const bases = generateExpectedPackageBases(context.studio, context.studio._id, baseline.expectedPackages ?? []) - playoutModel.setExpectedPackagesForStudioBaseline( - bases.map((item): ExpectedPackageDBFromStudioBaselineObjects => { - return { - ...item, - fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS, - pieceId: null, - } - }) - ) + playoutModel.setExpectedPackagesForStudioBaseline(baseline.expectedPackages ?? []) } export function setDefaultIdOnExpectedPackages(expectedPackages: ExpectedPackage.Any[] | undefined): void { @@ -394,6 +232,16 @@ export function setDefaultIdOnExpectedPackages(expectedPackages: ExpectedPackage if (!expectedPackage._id) { expectedPackage._id = `__index${i}` } + + expectedPackage.contentVersionHash = getContentVersionHash(expectedPackage) } } } + +function getContentVersionHash(expectedPackage: ReadonlyDeep>): string { + return hashObj({ + content: expectedPackage.content, + version: expectedPackage.version, + // todo: should expectedPackage.sources.containerId be here as well? + }) +} diff --git a/packages/job-worker/src/ingest/expectedPlayoutItems.ts b/packages/job-worker/src/ingest/expectedPlayoutItems.ts index 6451f00b65..2e436e5b81 100644 --- a/packages/job-worker/src/ingest/expectedPlayoutItems.ts +++ b/packages/job-worker/src/ingest/expectedPlayoutItems.ts @@ -62,6 +62,9 @@ export async function updateExpectedPlayoutItemsForRundownBaseline( for (const action of baselineAdlibActions) { baselineExpectedPlayoutItems.push(...extractExpectedPlayoutItems(studioId, rundownId, undefined, action)) } + for (const piece of ingestModel.getGlobalPieces()) { + baselineExpectedPlayoutItems.push(...extractExpectedPlayoutItems(studioId, rundownId, undefined, piece)) + } if (baseline) { for (const item of baseline.expectedPlayoutItems ?? []) { @@ -93,7 +96,7 @@ export function updateExpectedPlayoutItemsForPartModel(context: JobContext, part const expectedPlayoutItems: ExpectedPlayoutItemRundown[] = [] for (const piece of part.pieces) { expectedPlayoutItems.push( - ...extractExpectedPlayoutItems(studioId, part.part.rundownId, piece.startPartId, piece) + ...extractExpectedPlayoutItems(studioId, part.part.rundownId, piece.startPartId ?? undefined, piece) ) } for (const piece of part.adLibPieces) { diff --git a/packages/job-worker/src/ingest/generationRundown.ts b/packages/job-worker/src/ingest/generationRundown.ts index fc5641faf2..81a9c04bff 100644 --- a/packages/job-worker/src/ingest/generationRundown.ts +++ b/packages/job-worker/src/ingest/generationRundown.ts @@ -1,7 +1,11 @@ -import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -import { BlueprintId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + ExpectedPackageDBType, + ExpectedPackageIngestSourceRundownBaseline, + getExpectedPackageIdFromIngestSource, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { BlueprintId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { RundownNote } from '@sofie-automation/corelib/dist/dataModel/Notes' -import { serializePieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' +import { Piece, serializePieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import { DBRundown, RundownSource } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { literal } from '@sofie-automation/corelib/dist/lib' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' @@ -11,6 +15,7 @@ import { WatchedPackagesHelper } from '../blueprints/context/watchedPackages.js' import { postProcessAdLibPieces, postProcessGlobalAdLibActions, + postProcessGlobalPieces, postProcessRundownBaselineItems, } from '../blueprints/postProcess.js' import { logger } from '../logging.js' @@ -20,13 +25,20 @@ import { extendIngestRundownCore, canRundownBeUpdated } from './lib.js' import { JobContext } from '../jobs/index.js' import { CommitIngestData } from './lock.js' import { SelectedShowStyleVariant, selectShowStyleVariant } from './selectShowStyleVariant.js' -import { updateExpectedPackagesForRundownBaseline } from './expectedPackages.js' +import { updateExpectedMediaAndPlayoutItemsForRundownBaseline } from './expectedPackages.js' import { ReadonlyDeep } from 'type-fest' -import { BlueprintResultRundown, ExtendedIngestRundown } from '@sofie-automation/blueprints-integration' +import { + BlueprintResultRundown, + ExpectedPackage, + ExtendedIngestRundown, +} from '@sofie-automation/blueprints-integration' import { wrapTranslatableMessageFromBlueprints } from '@sofie-automation/corelib/dist/TranslatableMessage' import { convertRundownToBlueprintSegmentRundown, translateUserEditsFromBlueprint } from '../blueprints/context/lib.js' import { calculateSegmentsAndRemovalsFromIngestData } from './generationSegment.js' import { SofieIngestRundownWithSource } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' +import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' +import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibAction' +import { IngestExpectedPackage } from './model/IngestExpectedPackage.js' export enum GenerateRundownMode { Create = 'create', @@ -160,8 +172,6 @@ export async function updateRundownFromIngestDataInner( return null } - // TODO - store notes from rundownNotesContext - let regenerateAllContents = true if (generateMode == GenerateRundownMode.MetadataChange) { regenerateAllContents = @@ -206,8 +216,8 @@ export async function regenerateRundownAndBaselineFromIngestData( const rundownBaselinePackages = allRundownWatchedPackages.filter( context, (pkg) => - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_ACTION || - pkg.fromPieceType === ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS + pkg.source.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_ACTION || + pkg.source.fromPieceType === ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS ) const blueprintContext = new GetRundownContext( @@ -295,6 +305,7 @@ export async function regenerateRundownAndBaselineFromIngestData( logger.info(`... got ${rundownRes.baseline.timelineObjects.length} objects from baseline.`) logger.info(`... got ${rundownRes.globalAdLibPieces.length} adLib objects from baseline.`) logger.info(`... got ${(rundownRes.globalActions || []).length} adLib actions from baseline.`) + logger.info(`... got ${(rundownRes.globalPieces || []).length} global pieces from baseline.`) const timelineObjectsBlob = serializePieceTimelineObjectsBlob( postProcessRundownBaselineItems(showStyle.base.blueprintId, rundownRes.baseline.timelineObjects) @@ -312,10 +323,92 @@ export async function regenerateRundownAndBaselineFromIngestData( dbRundown._id, rundownRes.globalActions || [] ) + const globalPieces = postProcessGlobalPieces( + context, + rundownRes.globalPieces || [], + showStyle.base.blueprintId, + dbRundown._id + ) + + const expectedPackages = generateExpectedPackagesForBaseline( + dbRundown._id, + adlibPieces, + adlibActions, + globalPieces, + rundownRes.baseline.expectedPackages ?? [] + ) - await ingestModel.setRundownBaseline(timelineObjectsBlob, adlibPieces, adlibActions) + await ingestModel.setRundownBaseline(timelineObjectsBlob, adlibPieces, adlibActions, globalPieces, expectedPackages) - await updateExpectedPackagesForRundownBaseline(context, ingestModel, rundownRes.baseline) + await updateExpectedMediaAndPlayoutItemsForRundownBaseline(context, ingestModel, rundownRes.baseline) return dbRundown } + +function generateExpectedPackagesForBaseline( + rundownId: RundownId, + adLibPieces: AdLibPiece[], + adLibActions: RundownBaselineAdLibAction[], + globalPieces: Piece[], + expectedPackages: ExpectedPackage.Any[] +): IngestExpectedPackage[] { + const packages: IngestExpectedPackage[] = [] + + const wrapPackage = ( + expectedPackage: ReadonlyDeep, + source: ExpectedPackageIngestSourceRundownBaseline + ): IngestExpectedPackage => { + return { + _id: getExpectedPackageIdFromIngestSource(rundownId, source, expectedPackage._id), + + package: expectedPackage, + + source: source, + } + } + + // Future: this will need to deduplicate packages with the same content + // For now, we just generate a package for each expectedPackage + + for (const expectedPackage of expectedPackages) { + packages.push( + wrapPackage(expectedPackage, { + fromPieceType: ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS, + }) + ) + } + + // Populate the ingestSources + for (const piece of adLibPieces) { + for (const expectedPackage of piece.expectedPackages || []) { + packages.push( + wrapPackage(expectedPackage, { + fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_PIECE, + pieceId: piece._id, + }) + ) + } + } + for (const piece of adLibActions) { + for (const expectedPackage of piece.expectedPackages || []) { + packages.push( + wrapPackage(expectedPackage, { + fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_ACTION, + pieceId: piece._id, + }) + ) + } + } + for (const piece of globalPieces) { + for (const expectedPackage of piece.expectedPackages || []) { + packages.push( + wrapPackage(expectedPackage, { + fromPieceType: ExpectedPackageDBType.BASELINE_PIECE, + pieceId: piece._id, + }) + ) + } + } + + return packages +} diff --git a/packages/job-worker/src/ingest/generationSegment.ts b/packages/job-worker/src/ingest/generationSegment.ts index 5b679eebe8..dc1c7743b8 100644 --- a/packages/job-worker/src/ingest/generationSegment.ts +++ b/packages/job-worker/src/ingest/generationSegment.ts @@ -19,7 +19,7 @@ import { SofieIngestSegment, } from '@sofie-automation/blueprints-integration' import { wrapTranslatableMessageFromBlueprints } from '@sofie-automation/corelib/dist/TranslatableMessage' -import { updateExpectedPackagesForPartModel } from './expectedPackages.js' +import { updateExpectedMediaAndPlayoutItemsForPartModel } from './expectedPackages.js' import { IngestReplacePartType, IngestSegmentModel } from './model/IngestSegmentModel.js' import { ReadonlyDeep } from 'type-fest' import { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' @@ -107,7 +107,7 @@ async function regenerateSegmentAndUpdateModelFull( const segmentId = ingestModel.getSegmentIdFromExternalId(ingestSegment.externalId) const segmentWatchedPackages = allRundownWatchedPackages.filter( context, - (p) => 'segmentId' in p && p.segmentId === segmentId + (p) => 'segmentId' in p.source && p.source.segmentId === segmentId ) let updatedSegmentModel = await regenerateSegmentAndUpdateModel( @@ -191,9 +191,8 @@ async function checkIfSegmentReferencesUnloadedPackageInfos( // check if there are any updates right away? for (const part of segmentModel.parts) { for (const expectedPackage of part.expectedPackages) { - if (expectedPackage.listenToPackageInfoUpdates) { - const loadedPackage = segmentWatchedPackages.getPackage(expectedPackage._id) - if (!loadedPackage) { + if (expectedPackage.package.listenToPackageInfoUpdates) { + if (!segmentWatchedPackages.hasPackage(expectedPackage._id)) { // The package didn't exist prior to the blueprint running expectedPackageIdsToCheck.add(expectedPackage._id) } @@ -411,7 +410,7 @@ function updateModelWithGeneratedPart( ) const partModel = segmentModel.replacePart(part, processedPieces, adlibPieces, adlibActions) - updateExpectedPackagesForPartModel(context, partModel) + updateExpectedMediaAndPlayoutItemsForPartModel(context, partModel) } /** diff --git a/packages/job-worker/src/ingest/model/IngestExpectedPackage.ts b/packages/job-worker/src/ingest/model/IngestExpectedPackage.ts new file mode 100644 index 0000000000..9450a7bd5d --- /dev/null +++ b/packages/job-worker/src/ingest/model/IngestExpectedPackage.ts @@ -0,0 +1,24 @@ +import type { ExpectedPackage } from '@sofie-automation/blueprints-integration' +import type { + ExpectedPackageDBType, + ExpectedPackageIngestSourcePart, + ExpectedPackageIngestSourceRundownBaseline, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import type { ExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import type { ReadonlyDeep } from 'type-fest' + +/** + * A simpler form of ExpectedPackageDB that is scoped to the properties relevant to ingest. + * This is limited to be owned by one source, during the save process the documents will be merged + */ +export interface IngestExpectedPackage< + TPackageSource extends { fromPieceType: ExpectedPackageDBType } = + | ExpectedPackageIngestSourcePart + | ExpectedPackageIngestSourceRundownBaseline, +> { + _id: ExpectedPackageId + + package: ReadonlyDeep + + source: TPackageSource +} diff --git a/packages/job-worker/src/ingest/model/IngestModel.ts b/packages/job-worker/src/ingest/model/IngestModel.ts index 9098cd7225..f6e413465e 100644 --- a/packages/job-worker/src/ingest/model/IngestModel.ts +++ b/packages/job-worker/src/ingest/model/IngestModel.ts @@ -1,10 +1,5 @@ import { ExpectedMediaItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedMediaItem' -import { - ExpectedPackageDBFromBaselineAdLibAction, - ExpectedPackageDBFromBaselineAdLibPiece, - ExpectedPackageDBFromRundownBaselineObjects, - ExpectedPackageFromRundown, -} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import type { ExpectedPackageIngestSource } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { ExpectedPackageId, @@ -19,7 +14,7 @@ import { CoreUserEditingDefinition } from '@sofie-automation/corelib/dist/dataMo import { RundownBaselineAdLibAction } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibAction' import { RundownBaselineAdLibItem } from '@sofie-automation/corelib/dist/dataModel/RundownBaselineAdLibPiece' import { LazyInitialiseReadonly } from '../../lib/lazy.js' -import { RundownLock } from '../../jobs/lock.js' +import type { PlaylistLock, RundownLock } from '../../jobs/lock.js' import { IngestSegmentModel, IngestSegmentModelReadonly } from './IngestSegmentModel.js' import { IngestPartModel, IngestPartModelReadonly } from './IngestPartModel.js' import { ReadonlyDeep } from 'type-fest' @@ -32,12 +27,7 @@ import { ProcessedShowStyleBase, ProcessedShowStyleVariant } from '../../jobs/sh import { WrappedShowStyleBlueprint } from '../../blueprints/cache.js' import { IBlueprintRundown } from '@sofie-automation/blueprints-integration' import type { INotificationsModel } from '../../notifications/NotificationsModel.js' - -export type ExpectedPackageForIngestModelBaseline = - | ExpectedPackageDBFromBaselineAdLibAction - | ExpectedPackageDBFromBaselineAdLibPiece - | ExpectedPackageDBFromRundownBaselineObjects -export type ExpectedPackageForIngestModel = ExpectedPackageFromRundown | ExpectedPackageForIngestModelBaseline +import type { IngestExpectedPackage } from './IngestExpectedPackage.js' export interface IngestModelReadonly { /** @@ -66,7 +56,7 @@ export interface IngestModelReadonly { /** * The ExpectedPackages for the baseline of this Rundown */ - readonly expectedPackagesForRundownBaseline: ReadonlyDeep[] + readonly expectedPackagesForRundownBaseline: ReadonlyDeep[] /** * The baseline Timeline objects of this Rundown @@ -130,6 +120,11 @@ export interface IngestModelReadonly { */ getAllPieces(): ReadonlyDeep[] + /** + * Get the Pieces which belong to the Rundown, not a Part + */ + getGlobalPieces(): ReadonlyDeep[] + /** * Search for a Part through the whole Rundown * @param id Id of the Part @@ -146,7 +141,7 @@ export interface IngestModelReadonly { * Search for an ExpectedPackage through the whole Rundown * @param id Id of the ExpectedPackage */ - findExpectedPackage(packageId: ExpectedPackageId): ReadonlyDeep | undefined + findExpectedPackageIngestSources(packageId: ExpectedPackageId): ReadonlyDeep[] } export interface IngestModel extends IngestModelReadonly, BaseModel, INotificationsModel { @@ -214,12 +209,6 @@ export interface IngestModel extends IngestModelReadonly, BaseModel, INotificati */ setExpectedMediaItemsForRundownBaseline(expectedMediaItems: ExpectedMediaItemRundown[]): void - /** - * Set the ExpectedPackages for the baseline of this Rundown - * @param expectedPackages The new ExpectedPackages - */ - setExpectedPackagesForRundownBaseline(expectedPackages: ExpectedPackageForIngestModelBaseline[]): void - /** * Set the data for this Rundown. * This will either update or create the Rundown @@ -245,11 +234,14 @@ export interface IngestModel extends IngestModelReadonly, BaseModel, INotificati * @param timelineObjectsBlob Rundown baseline timeline objects * @param adlibPieces Rundown adlib pieces * @param adlibActions Rundown adlib actions + * @param pieces Rundown owned pieces */ setRundownBaseline( timelineObjectsBlob: PieceTimelineObjectsBlob, adlibPieces: RundownBaselineAdLibItem[], - adlibActions: RundownBaselineAdLibAction[] + adlibActions: RundownBaselineAdLibAction[], + pieces: Piece[], + expectedPackages: IngestExpectedPackage[] ): Promise /** @@ -274,3 +266,10 @@ export interface IngestModel extends IngestModelReadonly, BaseModel, INotificati } export type IngestReplaceSegmentType = Omit + +export interface IngestDatabasePersistedModel { + /** + * Issue a save of the contents of this model to the database + */ + saveAllToDatabase(lock: PlaylistLock): Promise +} diff --git a/packages/job-worker/src/ingest/model/IngestPartModel.ts b/packages/job-worker/src/ingest/model/IngestPartModel.ts index 610be862c6..352eb17b72 100644 --- a/packages/job-worker/src/ingest/model/IngestPartModel.ts +++ b/packages/job-worker/src/ingest/model/IngestPartModel.ts @@ -3,9 +3,9 @@ import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { ExpectedMediaItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedMediaItem' -import { ExpectedPackageFromRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' +import { IngestExpectedPackage } from './IngestExpectedPackage.js' export interface IngestPartModelReadonly { /** @@ -37,7 +37,7 @@ export interface IngestPartModelReadonly { /** * The ExpectedPackages belonging to this Part */ - readonly expectedPackages: ReadonlyDeep[] + readonly expectedPackages: ReadonlyDeep[] } /** * Wrap a Part and its contents in a view for Ingest operations @@ -60,10 +60,4 @@ export interface IngestPartModel extends IngestPartModelReadonly { * @param expectedMediaItems The new ExpectedMediaItems */ setExpectedMediaItems(expectedMediaItems: ExpectedMediaItemRundown[]): void - - /** - * Set the ExpectedPackages for the contents of this Part - * @param expectedPackages The new ExpectedPackages - */ - setExpectedPackages(expectedPackages: ExpectedPackageFromRundown[]): void } diff --git a/packages/job-worker/src/ingest/model/implementation/DocumentChangeTracker.ts b/packages/job-worker/src/ingest/model/implementation/DocumentChangeTracker.ts index c3396f04be..a45e3e19c4 100644 --- a/packages/job-worker/src/ingest/model/implementation/DocumentChangeTracker.ts +++ b/packages/job-worker/src/ingest/model/implementation/DocumentChangeTracker.ts @@ -98,6 +98,10 @@ export class DocumentChangeTracker }> { return Array.from(this.#deletedIds.values()) } + getDocumentsToSave(): ReadonlyMap { + return this.#documentsToSave + } + /** * Generate the mongodb BulkWrite operations for the documents known to this tracker * @returns mongodb BulkWrite operations diff --git a/packages/job-worker/src/ingest/model/implementation/ExpectedPackagesStore.ts b/packages/job-worker/src/ingest/model/implementation/ExpectedPackagesStore.ts index 59eda4a782..f1b64dd5f3 100644 --- a/packages/job-worker/src/ingest/model/implementation/ExpectedPackagesStore.ts +++ b/packages/job-worker/src/ingest/model/implementation/ExpectedPackagesStore.ts @@ -1,5 +1,4 @@ import { ExpectedMediaItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedMediaItem' -import { ExpectedPackageDBBase } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { ExpectedMediaItemId, @@ -7,26 +6,22 @@ import { ExpectedPlayoutItemId, PartId, RundownId, - SegmentId, } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReadonlyDeep } from 'type-fest' -import { diffAndReturnLatestObjects, DocumentChanges, getDocumentChanges, setValuesAndTrackChanges } from './utils.js' - -function mutateExpectedPackage( - oldObj: ExpectedPackageType, - newObj: ExpectedPackageType -): ExpectedPackageType { - return { - ...newObj, - // Retain the created property - created: oldObj.created, - } -} - -export class ExpectedPackagesStore { +import { + diffAndReturnLatestObjects, + DocumentChanges, + getDocumentChanges, + setValuesAndTrackChanges, + setValuesAndTrackChangesFunc, +} from './utils.js' +import type { IngestExpectedPackage } from '../IngestExpectedPackage.js' +import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' + +export class ExpectedPackagesStore { #expectedMediaItems: ExpectedMediaItemRundown[] #expectedPlayoutItems: ExpectedPlayoutItemRundown[] - #expectedPackages: ExpectedPackageType[] + #expectedPackages: IngestExpectedPackage[] #expectedMediaItemsWithChanges = new Set() #expectedPlayoutItemsWithChanges = new Set() @@ -38,8 +33,8 @@ export class ExpectedPackagesStore { return this.#expectedPlayoutItems } - get expectedPackages(): ReadonlyDeep { - // Typescript is not happy with turning ExpectedPackageType into ReadonlyDeep because it can be a union + get expectedPackages(): ReadonlyDeep[]> { + // Typescript is not happy because of the generic return this.#expectedPackages as any } @@ -57,7 +52,7 @@ export class ExpectedPackagesStore { return getDocumentChanges(this.#expectedPlayoutItemsWithChanges, this.#expectedPlayoutItems) } - get expectedPackagesChanges(): DocumentChanges { + get expectedPackagesChanges(): DocumentChanges> { return getDocumentChanges(this.#expectedPackagesWithChanges, this.#expectedPackages) } @@ -68,20 +63,17 @@ export class ExpectedPackagesStore[] ) { this.#rundownId = rundownId - this.#segmentId = segmentId this.#partId = partId this.#expectedMediaItems = expectedMediaItems @@ -102,9 +94,12 @@ export class ExpectedPackagesStore boolean + ): void { this.#rundownId = rundownId - this.#segmentId = segmentId this.#partId = partId setValuesAndTrackChanges(this.#expectedPlayoutItemsWithChanges, this.#expectedPlayoutItems, { @@ -115,15 +110,12 @@ export class ExpectedPackagesStore + updatePackageSource(pkg.source) + ) } - compareToPreviousData(oldStore: ExpectedPackagesStore): void { + compareToPreviousData(oldStore: ExpectedPackagesStore): void { // Diff the objects, but don't update the stored copies diffAndReturnLatestObjects( this.#expectedPlayoutItemsWithChanges, @@ -138,8 +130,7 @@ export class ExpectedPackagesStore ({ - ...pkg, - partId: this.#partId, - segmentId: this.#segmentId, - rundownId: this.#rundownId, - })) - + setExpectedPackages(expectedPackages: IngestExpectedPackage[]): void { this.#expectedPackages = diffAndReturnLatestObjects( this.#expectedPackagesWithChanges, this.#expectedPackages, - newExpectedPackages, - mutateExpectedPackage + expectedPackages ) } } diff --git a/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts b/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts index 5748a9e210..60076c6951 100644 --- a/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts +++ b/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts @@ -4,7 +4,9 @@ import { ExpectedMediaItemRundown } from '@sofie-automation/corelib/dist/dataMod import { ExpectedPackageDB, ExpectedPackageDBType, - ExpectedPackageFromRundown, + ExpectedPackageIngestSource, + ExpectedPackageIngestSourcePart, + ExpectedPackageIngestSourceRundownBaseline, } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { @@ -28,11 +30,12 @@ import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { JobContext, ProcessedShowStyleBase, ProcessedShowStyleVariant } from '../../../jobs/index.js' import { LazyInitialise, LazyInitialiseReadonly } from '../../../lib/lazy.js' import { getRundownId, getSegmentId } from '../../lib.js' -import { RundownLock } from '../../../jobs/lock.js' +import { PlaylistLock, RundownLock } from '../../../jobs/lock.js' import { IngestSegmentModel } from '../IngestSegmentModel.js' import { IngestSegmentModelImpl } from './IngestSegmentModelImpl.js' import { IngestPartModel } from '../IngestPartModel.js' import { + assertNever, clone, Complete, deleteAllUndefinedProperties, @@ -41,15 +44,9 @@ import { literal, } from '@sofie-automation/corelib/dist/lib' import { IngestPartModelImpl } from './IngestPartModelImpl.js' -import { DatabasePersistedModel } from '../../../modelBase.js' import { ExpectedPackagesStore } from './ExpectedPackagesStore.js' import { ReadonlyDeep } from 'type-fest' -import { - ExpectedPackageForIngestModel, - ExpectedPackageForIngestModelBaseline, - IngestModel, - IngestReplaceSegmentType, -} from '../IngestModel.js' +import { IngestDatabasePersistedModel, IngestModel, IngestReplaceSegmentType } from '../IngestModel.js' import { RundownNote } from '@sofie-automation/corelib/dist/dataModel/Notes' import { diffAndReturnLatestObjects } from './utils.js' import _ from 'underscore' @@ -62,6 +59,7 @@ import { generateWriteOpsForLazyDocuments } from './DocumentChangeTracker.js' import { IS_PRODUCTION } from '../../../environment.js' import { logger } from '../../../logging.js' import { NotificationsModelHelper } from '../../../notifications/NotificationsModelHelper.js' +import { IngestExpectedPackage } from '../IngestExpectedPackage.js' export interface IngestModelImplExistingData { rundown: DBRundown @@ -86,7 +84,7 @@ interface SegmentWrapper { /** * Cache of relevant documents for an Ingest Operation */ -export class IngestModelImpl implements IngestModel, DatabasePersistedModel { +export class IngestModelImpl implements IngestModel, IngestDatabasePersistedModel { public readonly isIngest = true public readonly rundownLock: RundownLock @@ -116,8 +114,10 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { } protected readonly segmentsImpl: Map + readonly #piecesWithChanges = new Set() + #piecesImpl: ReadonlyArray - readonly #rundownBaselineExpectedPackagesStore: ExpectedPackagesStore + readonly #rundownBaselineExpectedPackagesStore: ExpectedPackagesStore get rundownBaselineTimelineObjects(): LazyInitialiseReadonly { // Return a simplified view of what we store, of just `timelineObjectsString` @@ -149,7 +149,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { get expectedPlayoutItemsForRundownBaseline(): ReadonlyDeep[] { return [...this.#rundownBaselineExpectedPackagesStore.expectedPlayoutItems] } - get expectedPackagesForRundownBaseline(): ReadonlyDeep[] { + get expectedPackagesForRundownBaseline(): ReadonlyDeep[] { return [...this.#rundownBaselineExpectedPackagesStore.expectedPackages] } @@ -176,25 +176,14 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { const groupedExpectedMediaItems = groupByToMap(existingData.expectedMediaItems, 'partId') const groupedExpectedPlayoutItems = groupByToMap(existingData.expectedPlayoutItems, 'partId') - const rundownExpectedPackages = existingData.expectedPackages.filter( - (pkg): pkg is ExpectedPackageFromRundown => - pkg.fromPieceType === ExpectedPackageDBType.PIECE || - pkg.fromPieceType === ExpectedPackageDBType.ADLIB_PIECE || - pkg.fromPieceType === ExpectedPackageDBType.ADLIB_ACTION - ) - const groupedExpectedPackages = groupByToMap(rundownExpectedPackages, 'partId') - const baselineExpectedPackages = existingData.expectedPackages.filter( - (pkg): pkg is ExpectedPackageForIngestModelBaseline => - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_ACTION || - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_PIECE || - pkg.fromPieceType === ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS + const { baselineExpectedPackages, groupedExpectedPackagesByPart } = groupExpectedPackages( + existingData.expectedPackages ) this.#rundownBaselineExpectedPackagesStore = new ExpectedPackagesStore( false, this.rundownId, undefined, - undefined, groupedExpectedMediaItems.get(undefined) ?? [], groupedExpectedPlayoutItems.get(undefined) ?? [], baselineExpectedPackages @@ -215,7 +204,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { groupedAdLibActions.get(part._id) ?? [], groupedExpectedMediaItems.get(part._id) ?? [], groupedExpectedPlayoutItems.get(part._id) ?? [], - groupedExpectedPackages.get(part._id) ?? [] + groupedExpectedPackagesByPart.get(part._id) ?? [] ) ) this.segmentsImpl.set(segment._id, { @@ -224,6 +213,8 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { }) } + this.#piecesImpl = groupedPieces.get(null) ?? [] + this.#rundownBaselineObjs = new LazyInitialise(async () => context.directCollections.RundownBaselineObjects.findFetch({ rundownId: this.rundownId, @@ -246,13 +237,13 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { true, this.rundownId, undefined, - undefined, [], [], [] ) this.segmentsImpl = new Map() + this.#piecesImpl = [] this.#rundownBaselineObjs = new LazyInitialise(async () => []) this.#rundownBaselineAdLibPieces = new LazyInitialise(async () => []) @@ -334,6 +325,10 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { return this.getAllOrderedParts().flatMap((part) => part.pieces) } + getGlobalPieces(): ReadonlyDeep[] { + return [...this.#piecesImpl] + } + findPart(partId: PartId): IngestPartModel | undefined { for (const segment of this.segmentsImpl.values()) { if (!segment || segment.deleted) continue @@ -352,18 +347,20 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { return undefined } - findExpectedPackage(packageId: ExpectedPackageId): ReadonlyDeep | undefined { + findExpectedPackageIngestSources(packageId: ExpectedPackageId): ReadonlyDeep[] { + const sources: ReadonlyDeep[] = [] + const baselinePackage = this.#rundownBaselineExpectedPackagesStore.expectedPackages.find( (pkg) => pkg._id === packageId ) - if (baselinePackage) return baselinePackage + if (baselinePackage) sources.push(baselinePackage.source) for (const part of this.getAllOrderedParts()) { const partPackage = part.expectedPackages.find((pkg) => pkg._id === packageId) - if (partPackage) return partPackage + if (partPackage) sources.push(partPackage.source) } - return undefined + return sources } removeSegment(id: SegmentId): void { @@ -416,10 +413,6 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { setExpectedMediaItemsForRundownBaseline(expectedMediaItems: ExpectedMediaItemRundown[]): void { this.#rundownBaselineExpectedPackagesStore.setExpectedMediaItems(expectedMediaItems) } - setExpectedPackagesForRundownBaseline(expectedPackages: ExpectedPackageForIngestModelBaseline[]): void { - // Future: should these be here, or held as part of each adlib? - this.#rundownBaselineExpectedPackagesStore.setExpectedPackages(expectedPackages) - } setRundownData( rundownData: IBlueprintRundown, @@ -462,7 +455,6 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { // owned by elsewhere airStatus: this.#rundownImpl?.airStatus, status: this.#rundownImpl?.status, - notifiedCurrentPlayingPartExternalId: this.#rundownImpl?.notifiedCurrentPlayingPartExternalId, }) deleteAllUndefinedProperties(newRundown) @@ -477,7 +469,9 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { async setRundownBaseline( timelineObjectsBlob: PieceTimelineObjectsBlob, adlibPieces: RundownBaselineAdLibItem[], - adlibActions: RundownBaselineAdLibAction[] + adlibActions: RundownBaselineAdLibAction[], + pieces: Piece[], + expectedPackages: IngestExpectedPackage[] ): Promise { const [loadedRundownBaselineObjs, loadedRundownBaselineAdLibPieces, loadedRundownBaselineAdLibActions] = await Promise.all([ @@ -499,11 +493,13 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { ) // Compare and update the adlibPieces - const newAdlibPieces = adlibPieces.map((piece) => ({ - ...clone(piece), - partId: undefined, - rundownId: this.rundownId, - })) + const newAdlibPieces = adlibPieces.map( + (piece): AdLibPiece => ({ + ...clone(piece), + partId: undefined, + rundownId: this.rundownId, + }) + ) this.#rundownBaselineAdLibPieces.setValue( diffAndReturnLatestObjects( this.#rundownBaselineAdLibPiecesWithChanges, @@ -513,11 +509,13 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { ) // Compare and update the adlibActions - const newAdlibActions = adlibActions.map((action) => ({ - ...clone(action), - partId: undefined, - rundownId: this.rundownId, - })) + const newAdlibActions = adlibActions.map( + (action): RundownBaselineAdLibAction => ({ + ...clone(action), + partId: undefined, + rundownId: this.rundownId, + }) + ) this.#rundownBaselineAdLibActions.setValue( diffAndReturnLatestObjects( this.#rundownBaselineAdLibActionsWithChanges, @@ -525,6 +523,20 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { newAdlibActions ) ) + + // Compare and update the rundown pieces + const newPieces = pieces.map( + (piece): Piece => ({ + ...clone(piece), + startRundownId: this.rundownId, + startPartId: null, + startSegmentId: null, + }) + ) + this.#piecesImpl = diffAndReturnLatestObjects(this.#piecesWithChanges, this.#piecesImpl, newPieces) + + // Future: should these be here, or held as part of each adlib? + this.#rundownBaselineExpectedPackagesStore.setExpectedPackages(expectedPackages) } setRundownOrphaned(orphaned: RundownOrphanedReason | undefined): void { @@ -628,9 +640,26 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { for (const segment of this.segmentsImpl.values()) { if (segment.deleted) { logOrThrowError(new Error(`Failed no changes in model assertion, Segment has been changed`)) + break } else { const err = segment.segmentModel.checkNoChanges() - if (err) logOrThrowError(err) + if (err) { + logOrThrowError(err) + break + } + } + } + + if (this.#piecesWithChanges.size) { + logOrThrowError(new Error(`Failed no changes in model assertion, Rundown Pieces have been changed`)) + } else { + for (const piece of this.#piecesImpl.values()) { + if (!piece) { + logOrThrowError( + new Error(`Failed no changes in model assertion, Rundown Pieces have been changed`) + ) + break + } } } } finally { @@ -645,7 +674,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { this.#disposed = true } - async saveAllToDatabase(): Promise { + async saveAllToDatabase(playlistLock: PlaylistLock): Promise { if (this.#disposed) { throw new Error('Cannot save disposed IngestModel') } @@ -654,6 +683,10 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { throw new Error('Cannot save changes with released RundownLock') } + if (this.#rundownImpl && playlistLock.playlistId !== this.#rundownImpl.playlistId) { + throw new Error('Cannot save changes with incorrect PlaylistLock') + } + const span = this.context.startSpan('IngestModelImpl.saveAllToDatabase') // Ensure there are no duplicate part ids @@ -663,7 +696,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { partIds.add(part.part._id) } - const saveHelper = new SaveIngestModelHelper() + const saveHelper = new SaveIngestModelHelper(this.rundownId) for (const [segmentId, segment] of this.segmentsImpl.entries()) { saveHelper.addSegment(segment.segmentModel, segment.deleted) if (segment.deleted) { @@ -688,6 +721,8 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { saveHelper.addExpectedPackagesStore(this.#rundownBaselineExpectedPackagesStore) this.#rundownBaselineExpectedPackagesStore.clearChangedFlags() + saveHelper.addChangedPieces(this.#piecesImpl, this.#piecesWithChanges) + await Promise.all([ this.#rundownHasChanged && this.#rundownImpl ? this.context.directCollections.Rundowns.replace(this.#rundownImpl) @@ -704,3 +739,88 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { span?.end() } } + +function groupExpectedPackages(expectedPackages: ExpectedPackageDB[]) { + const baselineExpectedPackages: IngestExpectedPackage[] = [] + const groupedExpectedPackagesByPart = new Map[]>() + + for (const expectedPackage of expectedPackages) { + // Future: this is a temporary flow for a single owner + const src = expectedPackage.ingestSources[0] + switch (src.fromPieceType) { + case ExpectedPackageDBType.BASELINE_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: + case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: + baselineExpectedPackages.push({ + _id: expectedPackage._id, + package: expectedPackage.package, + source: src, + }) + break + case ExpectedPackageDBType.PIECE: + case ExpectedPackageDBType.ADLIB_PIECE: + case ExpectedPackageDBType.ADLIB_ACTION: { + const partPackages = groupedExpectedPackagesByPart.get(src.partId) ?? [] + partPackages.push({ + _id: expectedPackage._id, + package: expectedPackage.package, + source: src, + }) + groupedExpectedPackagesByPart.set(src.partId, partPackages) + break + } + case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: + case ExpectedPackageDBType.BUCKET_ADLIB: + case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + // Ignore + break + default: + assertNever(src) + break + } + + // Future: once this supports multiple owners + // const baselineIngestSources: ExpectedPackageIngestSourceRundownBaseline[] = [] + // const rundownIngestSources: ExpectedPackageIngestSourcePart[] = [] + // for (const src of expectedPackage.ingestSources) { + // switch (src.fromPieceType) { + // case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: + // case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + // case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: + // baselineIngestSources.push(src) + // break + // case ExpectedPackageDBType.PIECE: + // case ExpectedPackageDBType.ADLIB_PIECE: + // case ExpectedPackageDBType.ADLIB_ACTION: + // rundownIngestSources.push(src) + // break + // default: + // assertNever(src) + // break + // } + // } + + // if (baselineIngestSources.length > 0) { + // baselineExpectedPackages.push({ + // ...expectedPackage, + // ingestSources: baselineIngestSources, + // }) + // } + + // const sourcesByPartId = groupByToMapFunc(rundownIngestSources, (src) => src.partId) + // for (const [partId, sources] of sourcesByPartId.entries()) { + // const partPackages = groupedExpectedPackagesByPart.get(partId) ?? [] + // partPackages.push({ + // ...expectedPackage, + // ingestSources: sources, + // }) + // groupedExpectedPackagesByPart.set(partId, partPackages) + // } + } + + return { + baselineExpectedPackages, + groupedExpectedPackagesByPart, + } +} diff --git a/packages/job-worker/src/ingest/model/implementation/IngestPartModelImpl.ts b/packages/job-worker/src/ingest/model/implementation/IngestPartModelImpl.ts index 2693b20b30..3c6b5492b6 100644 --- a/packages/job-worker/src/ingest/model/implementation/IngestPartModelImpl.ts +++ b/packages/job-worker/src/ingest/model/implementation/IngestPartModelImpl.ts @@ -7,7 +7,6 @@ import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { ExpectedMediaItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedMediaItem' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' -import { ExpectedPackageFromRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' import { ExpectedPackagesStore } from './ExpectedPackagesStore.js' import { @@ -17,13 +16,15 @@ import { getDocumentChanges, setValuesAndTrackChanges, } from './utils.js' +import type { IngestExpectedPackage } from '../IngestExpectedPackage.js' +import { ExpectedPackageIngestSourcePart } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' export class IngestPartModelImpl implements IngestPartModel { readonly partImpl: DBPart readonly #pieces: Piece[] readonly #adLibPieces: AdLibPiece[] readonly #adLibActions: AdLibAction[] - readonly expectedPackagesStore: ExpectedPackagesStore + readonly expectedPackagesStore: ExpectedPackagesStore #setPartValue(key: T, newValue: DBPart[T]): void { if (newValue === undefined) { @@ -90,7 +91,7 @@ export class IngestPartModelImpl implements IngestPartModel { get expectedPlayoutItems(): ReadonlyDeep[] { return [...this.expectedPackagesStore.expectedPlayoutItems] } - get expectedPackages(): ReadonlyDeep[] { + get expectedPackages(): ReadonlyDeep[] { return [...this.expectedPackagesStore.expectedPackages] } @@ -140,7 +141,7 @@ export class IngestPartModelImpl implements IngestPartModel { adLibActions: AdLibAction[], expectedMediaItems: ExpectedMediaItemRundown[], expectedPlayoutItems: ExpectedPlayoutItemRundown[], - expectedPackages: ExpectedPackageFromRundown[] + expectedPackages: IngestExpectedPackage[] ) { this.partImpl = part this.#pieces = pieces @@ -164,7 +165,6 @@ export class IngestPartModelImpl implements IngestPartModel { this.expectedPackagesStore = new ExpectedPackagesStore( isBeingCreated, part.rundownId, - part.segmentId, part._id, expectedMediaItems, expectedPlayoutItems, @@ -211,7 +211,14 @@ export class IngestPartModelImpl implements IngestPartModel { this.#compareAndSetPartValue('segmentId', segmentId) this.#compareAndSetPartValue('rundownId', rundownId) - this.expectedPackagesStore.setOwnerIds(rundownId, segmentId, this.part._id) + this.expectedPackagesStore.setOwnerIds(rundownId, this.part._id, (pkgSource) => { + if (pkgSource.partId !== this.part._id || pkgSource.segmentId !== segmentId) { + pkgSource.partId = this.part._id + pkgSource.segmentId = segmentId + return true + } + return false + }) setValuesAndTrackChanges(this.#piecesWithChanges, this.#pieces, { startRundownId: rundownId, @@ -234,8 +241,4 @@ export class IngestPartModelImpl implements IngestPartModel { setExpectedMediaItems(expectedMediaItems: ExpectedMediaItemRundown[]): void { this.expectedPackagesStore.setExpectedMediaItems(expectedMediaItems) } - setExpectedPackages(expectedPackages: ExpectedPackageFromRundown[]): void { - // Future: should these be here, or held as part of each adlib/piece? - this.expectedPackagesStore.setExpectedPackages(expectedPackages) - } } diff --git a/packages/job-worker/src/ingest/model/implementation/IngestSegmentModelImpl.ts b/packages/job-worker/src/ingest/model/implementation/IngestSegmentModelImpl.ts index 5f004454c8..3cbb11e3b7 100644 --- a/packages/job-worker/src/ingest/model/implementation/IngestSegmentModelImpl.ts +++ b/packages/job-worker/src/ingest/model/implementation/IngestSegmentModelImpl.ts @@ -1,4 +1,4 @@ -import { PartId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PartId, RundownId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { ReadonlyDeep } from 'type-fest' import { DBSegment, SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' import { IngestReplacePartType, IngestSegmentModel } from '../IngestSegmentModel.js' @@ -12,6 +12,13 @@ import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' import { calculatePartExpectedDurationWithTransition } from '@sofie-automation/corelib/dist/playout/timings' import { clone } from '@sofie-automation/corelib/dist/lib' import { getPartId } from '../../lib.js' +import { + ExpectedPackageDBType, + getExpectedPackageIdFromIngestSource, + ExpectedPackageIngestSourcePart, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import type { ExpectedPackage } from '@sofie-automation/blueprints-integration' +import type { IngestExpectedPackage } from '../IngestExpectedPackage.js' /** * A light wrapper around the IngestPartModel, so that we can track the deletions while still accessing the contents @@ -207,7 +214,7 @@ export class IngestSegmentModelImpl implements IngestSegmentModel { replacePart( rawPart: IngestReplacePartType, pieces: Piece[], - adLibPiece: AdLibPiece[], + adLibPieces: AdLibPiece[], adLibActions: AdLibAction[] ): IngestPartModel { const part: DBPart = { @@ -224,15 +231,24 @@ export class IngestSegmentModelImpl implements IngestSegmentModel { const oldPart = this.partsImpl.get(part._id) + const expectedPackages = generateExpectedPackagesForPart( + part.rundownId, + part.segmentId, + part._id, + pieces, + adLibPieces, + adLibActions + ) + const partModel = new IngestPartModelImpl( !oldPart, clone(part), clone(pieces), - clone(adLibPiece), + clone(adLibPieces), clone(adLibActions), [], [], - [] + expectedPackages ) partModel.setOwnerIds(this.segment.rundownId, this.segment._id) @@ -243,3 +259,70 @@ export class IngestSegmentModelImpl implements IngestSegmentModel { return partModel } } + +function generateExpectedPackagesForPart( + rundownId: RundownId, + segmentId: SegmentId, + partId: PartId, + pieces: Piece[], + adLibPieces: AdLibPiece[], + adLibActions: AdLibAction[] +): IngestExpectedPackage[] { + const packages: IngestExpectedPackage[] = [] + + const wrapPackage = ( + expectedPackage: ReadonlyDeep, + source: ExpectedPackageIngestSourcePart + ): IngestExpectedPackage => { + return { + _id: getExpectedPackageIdFromIngestSource(rundownId, source, expectedPackage._id), + + package: expectedPackage, + + source: source, + } + } + + // Future: this will need to deduplicate packages with the same content + // For now, we just generate a package for each expectedPackage + + // Populate the ingestSources + for (const piece of pieces) { + for (const expectedPackage of piece.expectedPackages || []) { + packages.push( + wrapPackage(expectedPackage, { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: piece._id, + partId: partId, + segmentId: segmentId, + }) + ) + } + } + for (const piece of adLibPieces) { + for (const expectedPackage of piece.expectedPackages || []) { + packages.push( + wrapPackage(expectedPackage, { + fromPieceType: ExpectedPackageDBType.ADLIB_PIECE, + pieceId: piece._id, + partId: partId, + segmentId: segmentId, + }) + ) + } + } + for (const piece of adLibActions) { + for (const expectedPackage of piece.expectedPackages || []) { + packages.push( + wrapPackage(expectedPackage, { + fromPieceType: ExpectedPackageDBType.ADLIB_ACTION, + pieceId: piece._id, + partId: partId, + segmentId: segmentId, + }) + ) + } + } + + return packages +} diff --git a/packages/job-worker/src/ingest/model/implementation/LoadIngestModel.ts b/packages/job-worker/src/ingest/model/implementation/LoadIngestModel.ts index dd444141af..4d3e0226ea 100644 --- a/packages/job-worker/src/ingest/model/implementation/LoadIngestModel.ts +++ b/packages/job-worker/src/ingest/model/implementation/LoadIngestModel.ts @@ -3,8 +3,7 @@ import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { JobContext } from '../../../jobs/index.js' import { ReadonlyDeep } from 'type-fest' import { RundownLock } from '../../../jobs/lock.js' -import { IngestModel } from '../IngestModel.js' -import { DatabasePersistedModel } from '../../../modelBase.js' +import { IngestDatabasePersistedModel, IngestModel } from '../IngestModel.js' import { getRundownId } from '../../lib.js' import { ExpectedMediaItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedMediaItem' import { ExpectedPlayoutItemRundown } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' @@ -24,7 +23,7 @@ export async function loadIngestModelFromRundown( context: JobContext, rundownLock: RundownLock, rundown: ReadonlyDeep -): Promise { +): Promise { const span = context.startSpan('IngestModel.loadFromRundown') if (span) span.setLabel('rundownId', unprotectString(rundown._id)) @@ -58,7 +57,7 @@ export async function loadIngestModelFromRundownExternalId( context: JobContext, rundownLock: RundownLock, rundownExternalId: string -): Promise { +): Promise { const span = context.startSpan('IngestModel.loadFromExternalId') if (span) span.setLabel('externalId', rundownExternalId) diff --git a/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts b/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts index bb6fe2b817..4c1b29ea02 100644 --- a/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts +++ b/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts @@ -1,9 +1,9 @@ import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibAction' import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { ExpectedMediaItem } from '@sofie-automation/corelib/dist/dataModel/ExpectedMediaItem' -import { ExpectedPackageDB } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { ExpectedPackageDB, ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItem } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' -import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PieceId, ExpectedPackageId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' @@ -13,9 +13,14 @@ import { IngestSegmentModelImpl } from './IngestSegmentModelImpl.js' import { DocumentChangeTracker } from './DocumentChangeTracker.js' import { logger } from '../../../logging.js' import { ProtectedString } from '@sofie-automation/corelib/dist/protectedString' +import { IngestExpectedPackage } from '../IngestExpectedPackage.js' +import { AnyBulkWriteOperation } from 'mongodb' +import { Complete, normalizeArrayToMap } from '@sofie-automation/corelib/dist/lib' export class SaveIngestModelHelper { - #expectedPackages = new DocumentChangeTracker() + readonly #rundownId: RundownId + + #expectedPackages = new DocumentChangeTracker>() #expectedPlayoutItems = new DocumentChangeTracker() #expectedMediaItems = new DocumentChangeTracker() @@ -25,8 +30,12 @@ export class SaveIngestModelHelper { #adLibPieces = new DocumentChangeTracker() #adLibActions = new DocumentChangeTracker() - addExpectedPackagesStore( - store: ExpectedPackagesStore, + constructor(rundownId: RundownId) { + this.#rundownId = rundownId + } + + addExpectedPackagesStore( + store: ExpectedPackagesStore, deleteAll?: boolean ): void { this.#expectedPackages.addChanges(store.expectedPackagesChanges, deleteAll ?? false) @@ -56,6 +65,19 @@ export class SaveIngestModelHelper { } } + addChangedPieces(pieces: ReadonlyArray, changedPieceIds: Set): void { + for (const piece of pieces) { + this.#pieces.addDocument(piece, changedPieceIds.has(piece._id)) + } + + const currentPieceIds = new Set(pieces.map((p) => p._id)) + for (const changedPieceId of changedPieceIds) { + if (!currentPieceIds.has(changedPieceId)) { + this.#pieces.deleteDocument(changedPieceId) + } + } + } + commit(context: JobContext): Array> { // Log deleted ids: const deletedIds: { [key: string]: ProtectedString[] } = { @@ -75,7 +97,11 @@ export class SaveIngestModelHelper { } return [ - context.directCollections.ExpectedPackages.bulkWrite(this.#expectedPackages.generateWriteOps()), + writeExpectedPackagesChangesForRundown( + context, + this.#rundownId, + Array.from(this.#expectedPackages.getDocumentsToSave().values()) + ), context.directCollections.ExpectedPlayoutItems.bulkWrite(this.#expectedPlayoutItems.generateWriteOps()), context.directCollections.ExpectedMediaItems.bulkWrite(this.#expectedMediaItems.generateWriteOps()), @@ -87,3 +113,87 @@ export class SaveIngestModelHelper { ] } } + +export async function writeExpectedPackagesChangesForRundown( + context: JobContext, + rundownId: RundownId | null, + documentsToSave: IngestExpectedPackage[] +): Promise { + const existingDocs = (await context.directCollections.ExpectedPackages.findFetch( + { + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + }, + { + projection: { + _id: 1, + // Future: playoutSources + }, + } + )) as Pick[] + const existingDocsMap = normalizeArrayToMap(existingDocs, '_id') + + // Generate any insert and update operations + const ops: AnyBulkWriteOperation[] = [] + for (const doc of documentsToSave) { + const newDbDoc: Complete> = { + // Future: omit 'playoutSources from this doc + studioId: context.studioId, + rundownId: rundownId, + bucketId: null, + created: Date.now(), + package: doc.package, + ingestSources: [doc.source], + } + + const existingDoc = existingDocsMap.get(doc._id) + if (existingDoc) { + // Document already exists, perform an update to preserve other fields + ops.push({ + updateOne: { + filter: { _id: doc._id }, + update: { + $set: { + // Update every field that we want to define + ...newDbDoc, + }, + }, + }, + }) + } else { + // Insert this new document + ops.push({ + insertOne: { + document: { + ...newDbDoc, + _id: doc._id, + }, + }, + }) + } + } + + // Look over the existing documents, and see is no longer referenced + const documentsToSaveMap = normalizeArrayToMap(documentsToSave, '_id') + const idsToDelete: ExpectedPackageId[] = [] + + for (const doc of existingDocs) { + // Skip if this document is in the list of documents to save + if (documentsToSaveMap.has(doc._id)) continue + + // Future: check for playoutSources + idsToDelete.push(doc._id) + } + + // const idsToDelete = changeTracker.getDeletedIds() + if (idsToDelete.length > 0) { + ops.push({ + deleteMany: { + filter: { _id: { $in: idsToDelete as any } }, + }, + }) + } + + if (ops.length > 0) await context.directCollections.ExpectedPackages.bulkWrite(ops) +} diff --git a/packages/job-worker/src/ingest/model/implementation/utils.ts b/packages/job-worker/src/ingest/model/implementation/utils.ts index cec36172f1..a3cfc4a3f9 100644 --- a/packages/job-worker/src/ingest/model/implementation/utils.ts +++ b/packages/job-worker/src/ingest/model/implementation/utils.ts @@ -67,6 +67,19 @@ export function setValuesAndTrackChanges } } } +export function setValuesAndTrackChangesFunc }>( + changedIds: Set, + objects: readonly T[], + mutator: (obj: T) => boolean +): void { + for (const obj of objects) { + const mutatorChanged = mutator(obj) + + // The doc changed, track it as such + if (mutatorChanged) changedIds.add(obj._id) + } +} + export function addManyToSet(set: Set, iter: Iterable): void { for (const val of iter) { set.add(val) diff --git a/packages/job-worker/src/ingest/packageInfo.ts b/packages/job-worker/src/ingest/packageInfo.ts index 6816a9d4d4..824f752ba9 100644 --- a/packages/job-worker/src/ingest/packageInfo.ts +++ b/packages/job-worker/src/ingest/packageInfo.ts @@ -1,38 +1,11 @@ import { ExpectedPackageDBType } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { - ExpectedPackagesRegenerateProps, - PackageInfosUpdatedRundownProps, -} from '@sofie-automation/corelib/dist/worker/ingest' +import { PackageInfosUpdatedRundownProps } from '@sofie-automation/corelib/dist/worker/ingest' import { logger } from '../logging.js' import { JobContext } from '../jobs/index.js' import { regenerateSegmentsFromIngestData } from './generationSegment.js' -import { runWithRundownLock } from './lock.js' -import { updateExpectedPackagesForPartModel, updateExpectedPackagesForRundownBaseline } from './expectedPackages.js' -import { loadIngestModelFromRundown } from './model/implementation/LoadIngestModel.js' import { runCustomIngestUpdateOperation } from './runOperation.js' - -/** - * Debug: Regenerate ExpectedPackages for a Rundown - */ -export async function handleExpectedPackagesRegenerate( - context: JobContext, - data: ExpectedPackagesRegenerateProps -): Promise { - return runWithRundownLock(context, data.rundownId, async (rundown, rundownLock) => { - if (!rundown) throw new Error(`Rundown "${data.rundownId}" not found`) - - const ingestModel = await loadIngestModelFromRundown(context, rundownLock, rundown) - - for (const part of ingestModel.getAllOrderedParts()) { - updateExpectedPackagesForPartModel(context, part) - } - - await updateExpectedPackagesForRundownBaseline(context, ingestModel, undefined, true) - - await ingestModel.saveAllToDatabase() - }) -} +import { assertNever } from '@sofie-automation/corelib/dist/lib' /** * Some PackageInfos have been updated, regenerate any Parts which depend on these PackageInfos @@ -58,23 +31,32 @@ export async function handleUpdatedPackageInfoForRundown( let regenerateRundownBaseline = false for (const packageId of data.packageIds) { - const pkg = ingestModel.findExpectedPackage(packageId) - if (pkg) { - if ( - pkg.fromPieceType === ExpectedPackageDBType.PIECE || - pkg.fromPieceType === ExpectedPackageDBType.ADLIB_PIECE || - pkg.fromPieceType === ExpectedPackageDBType.ADLIB_ACTION - ) { - segmentsToUpdate.add(pkg.segmentId) - } else if ( - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_ACTION || - pkg.fromPieceType === ExpectedPackageDBType.BASELINE_ADLIB_PIECE || - pkg.fromPieceType === ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS - ) { - regenerateRundownBaseline = true + const pkgIngestSources = ingestModel.findExpectedPackageIngestSources(packageId) + for (const source of pkgIngestSources) { + switch (source.fromPieceType) { + case ExpectedPackageDBType.PIECE: + case ExpectedPackageDBType.ADLIB_PIECE: + case ExpectedPackageDBType.ADLIB_ACTION: + segmentsToUpdate.add(source.segmentId) + break + + case ExpectedPackageDBType.BASELINE_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: + case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: + regenerateRundownBaseline = true + break + case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: + case ExpectedPackageDBType.BUCKET_ADLIB: + case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + // Ignore + break + default: + assertNever(source) } - } else { - logger.warn(`onUpdatedPackageInfoForRundown: Missing package: "${packageId}"`) + } + if (pkgIngestSources.length === 0) { + logger.warn(`onUpdatedPackageInfoForRundown: Missing ingestSources for package: "${packageId}"`) } } diff --git a/packages/job-worker/src/ingest/runOperation.ts b/packages/job-worker/src/ingest/runOperation.ts index 86716a7bab..9aaafd124d 100644 --- a/packages/job-worker/src/ingest/runOperation.ts +++ b/packages/job-worker/src/ingest/runOperation.ts @@ -1,4 +1,4 @@ -import { IngestModel, IngestModelReadonly } from './model/IngestModel.js' +import { IngestDatabasePersistedModel, IngestModel, IngestModelReadonly } from './model/IngestModel.js' import { BeforeIngestOperationPartMap, CommitIngestOperation } from './commit.js' import { SofieIngestRundownDataCache, SofieIngestRundownDataCacheGenerator } from './sofieIngestCache.js' import { canRundownBeUpdated, getRundownId, getSegmentId } from './lib.js' @@ -8,7 +8,6 @@ import { UserError, UserErrorMessage } from '@sofie-automation/corelib/dist/erro import { loadIngestModelFromRundownExternalId } from './model/implementation/LoadIngestModel.js' import { Complete, clone } from '@sofie-automation/corelib/dist/lib' import { CommitIngestData, runWithRundownLockWithoutFetchingRundown } from './lock.js' -import { DatabasePersistedModel } from '../modelBase.js' import { NrcsIngestChangeDetails, IngestRundown, @@ -352,7 +351,7 @@ function sortIngestRundown(rundown: IngestRundown): void { async function updateSofieRundownModel( context: JobContext, - pIngestModel: Promise, + pIngestModel: Promise, computedIngestChanges: ComputedIngestChanges | null ) { const ingestModel = await pIngestModel diff --git a/packages/job-worker/src/ingest/syncChangesToPartInstance.ts b/packages/job-worker/src/ingest/syncChangesToPartInstance.ts index 9e87733c1b..32324cd12d 100644 --- a/packages/job-worker/src/ingest/syncChangesToPartInstance.ts +++ b/packages/job-worker/src/ingest/syncChangesToPartInstance.ts @@ -24,6 +24,7 @@ import { convertPartInstanceToBlueprints, convertPartToBlueprints, convertPieceInstanceToBlueprints, + convertRundownPieceToBlueprints, } from '../blueprints/context/lib.js' import { validateAdlibTestingPartInstanceProperties } from '../playout/adlibTesting.js' import { ReadonlyDeep } from 'type-fest' @@ -50,7 +51,7 @@ export interface PartInstanceToSync { * This defers out to the Blueprints to do the syncing * @param context Context of the job being run * @param playoutModel Playout model containing containing the Rundown being ingested - * @param ingestModel Ingest model for the Rundown + * @param ingestModel Ingest model for the Rundown. This is being written to mongodb while this method runs */ export async function syncChangesToPartInstances( context: JobContext, @@ -181,6 +182,7 @@ export class SyncChangesToPartInstancesWorker { await syncPlayheadInfinitesForNextPartInstance( this.#context, this.#playoutModel, + this.#ingestModel, this.#playoutModel.currentPartInstance, this.#playoutModel.nextPartInstance ) @@ -210,6 +212,7 @@ export class SyncChangesToPartInstancesWorker { actions: instanceToSync.newPart && ingestPart ? ingestPart.adLibActions.map(convertAdLibActionToBlueprints) : [], referencedAdlibs: referencedAdlibs, + rundownPieces: this.#ingestModel.getGlobalPieces().map(convertRundownPieceToBlueprints), } } diff --git a/packages/job-worker/src/jobs/studio.ts b/packages/job-worker/src/jobs/studio.ts index 16368fec1a..5a257fc7d1 100644 --- a/packages/job-worker/src/jobs/studio.ts +++ b/packages/job-worker/src/jobs/studio.ts @@ -67,6 +67,6 @@ export function convertStudioToJobStudio(studio: DBStudio): JobStudio { routeSetExclusivityGroups: studio.routeSetExclusivityGroupsWithOverrides ? applyAndValidateOverrides(studio.routeSetExclusivityGroupsWithOverrides).obj : (studio as any).routeSetExclusivityGroups || {}, - // packageContainers: studio.packageContainersWithOverrides ? applyAndValidateOverrides(studio.packageContainersWithOverrides).obj : (studio as any).packageContainers || {}, + // packageContainers: applyAndValidateOverrides(studio.packageContainersWithOverrides).obj, } } diff --git a/packages/job-worker/src/playout/__tests__/playout.test.ts b/packages/job-worker/src/playout/__tests__/playout.test.ts index 369f445397..912b118cc2 100644 --- a/packages/job-worker/src/playout/__tests__/playout.test.ts +++ b/packages/job-worker/src/playout/__tests__/playout.test.ts @@ -608,7 +608,7 @@ describe('Playout API', () => { : now) + Math.random() * TIME_RANDOM, }, - } + } satisfies PlayoutChangedResult }), ], }) @@ -701,7 +701,7 @@ describe('Playout API', () => { : now) + Math.random() * TIME_RANDOM, }, - } + } satisfies PlayoutChangedResult }), ], }) diff --git a/packages/job-worker/src/playout/__tests__/resolvedPieces.test.ts b/packages/job-worker/src/playout/__tests__/resolvedPieces.test.ts index 42da0af9e9..76898419ed 100644 --- a/packages/job-worker/src/playout/__tests__/resolvedPieces.test.ts +++ b/packages/job-worker/src/playout/__tests__/resolvedPieces.test.ts @@ -13,6 +13,8 @@ import { } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { EmptyPieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import { + createPartCurrentTimes, + PartCurrentTimes, processAndPrunePieceInstanceTimings, resolvePrunedPieceInstance, } from '@sofie-automation/corelib/dist/playout/processAndPrune' @@ -93,8 +95,9 @@ describe('Resolved Pieces', () => { nowInPart: number | null, pieceInstances: PieceInstance[] ): ResolvedPieceInstance[] { - const preprocessedPieces = processAndPrunePieceInstanceTimings(sourceLayers, pieceInstances, nowInPart ?? 0) - return preprocessedPieces.map((instance) => resolvePrunedPieceInstance(nowInPart ?? 0, instance)) + const partTimes = createPartCurrentTimes(5000, nowInPart) + const preprocessedPieces = processAndPrunePieceInstanceTimings(sourceLayers, pieceInstances, partTimes) + return preprocessedPieces.map((instance) => resolvePrunedPieceInstance(partTimes, instance)) } test('simple single piece', async () => { @@ -398,18 +401,18 @@ describe('Resolved Pieces', () => { } function createPartInstanceInfo( - partStarted: number, - nowInPart: number, + partTimes: PartCurrentTimes, + // partStarted: number, + // nowInPart: number, partInstance: DBPartInstance, currentPieces: PieceInstance[] ): SelectedPartInstanceTimelineInfo { - const pieceInstances = processAndPrunePieceInstanceTimings(sourceLayers, currentPieces, nowInPart) + const pieceInstances = processAndPrunePieceInstanceTimings(sourceLayers, currentPieces, partTimes) return { partInstance, pieceInstances, - nowInPart, - partStarted, + partTimes, // Approximate `calculatedTimings`, for the partInstances which already have it cached calculatedTimings: getPartTimingsOrDefaults(partInstance, pieceInstances), regenerateTimelineAt: undefined, @@ -421,9 +424,10 @@ describe('Resolved Pieces', () => { expect(sourceLayerId).toBeTruthy() const now = 990000 + const partTimes = createPartCurrentTimes(now, now) const piece001 = createPieceInstance(sourceLayerId, { start: 0 }) - const currentPartInfo = createPartInstanceInfo(now, 0, createPartInstance(), [piece001]) + const currentPartInfo = createPartInstanceInfo(partTimes, createPartInstance(), [piece001]) const resolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, @@ -456,13 +460,9 @@ describe('Resolved Pieces', () => { ) const now = 990000 - const nowInPart = 2000 - const partStarted = now - nowInPart + const partTimes = createPartCurrentTimes(now, now - 2000) - const currentPartInfo = createPartInstanceInfo(partStarted, nowInPart, createPartInstance(), [ - piece001, - virtualPiece, - ]) + const currentPartInfo = createPartInstanceInfo(partTimes, createPartInstance(), [piece001, virtualPiece]) // Check the result const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( @@ -473,8 +473,8 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: partStarted, - resolvedDuration: nowInPart, + resolvedStart: partTimes.partStartTime!, + resolvedDuration: partTimes.nowInPart, }, { // TODO - this object should not be present? @@ -501,13 +501,9 @@ describe('Resolved Pieces', () => { ) const now = 990000 - const nowInPart = 2000 - const partStarted = now - nowInPart + const partTimes = createPartCurrentTimes(now, now - 2000) - const currentPartInfo = createPartInstanceInfo(partStarted, nowInPart, createPartInstance(), [ - piece001, - virtualPiece, - ]) + const currentPartInfo = createPartInstanceInfo(partTimes, createPartInstance(), [piece001, virtualPiece]) const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, @@ -517,13 +513,13 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: partStarted, + resolvedStart: partTimes.partStartTime!, resolvedDuration: 7000, }, { // TODO - this object should not be present? _id: virtualPiece._id, - resolvedStart: partStarted + 7000, + resolvedStart: partTimes.partStartTime! + 7000, resolvedDuration: undefined, }, ] satisfies StrippedResult) @@ -536,10 +532,9 @@ describe('Resolved Pieces', () => { const piece001 = createPieceInstance(sourceLayerId, { start: 0, duration: 0 }) const now = 990000 - const nowInPart = 2000 - const partStarted = now - nowInPart + const partTimes = createPartCurrentTimes(now, now - 2000) - const currentPartInfo = createPartInstanceInfo(partStarted, nowInPart, createPartInstance(), [piece001]) + const currentPartInfo = createPartInstanceInfo(partTimes, createPartInstance(), [piece001]) const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, @@ -549,7 +544,7 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: partStarted, + resolvedStart: partTimes.partStartTime!, resolvedDuration: 0, }, ] satisfies StrippedResult) @@ -577,10 +572,9 @@ describe('Resolved Pieces', () => { ) const now = 990000 - const nowInPart = 2000 - const partStarted = now - nowInPart + const partTimes = createPartCurrentTimes(now, now - 2000) - const currentPartInfo = createPartInstanceInfo(partStarted, nowInPart, createPartInstance(), [ + const currentPartInfo = createPartInstanceInfo(partTimes, createPartInstance(), [ piece001, infinite1, infinite2, @@ -594,17 +588,17 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: infinite1._id, - resolvedStart: partStarted + 1000, + resolvedStart: partTimes.partStartTime! + 1000, resolvedDuration: 4000, }, { _id: piece001._id, - resolvedStart: partStarted + 3000, + resolvedStart: partTimes.partStartTime! + 3000, resolvedDuration: 2000, }, { _id: infinite2._id, - resolvedStart: partStarted + 5000, + resolvedStart: partTimes.partStartTime! + 5000, resolvedDuration: undefined, }, ] satisfies StrippedResult) @@ -626,10 +620,9 @@ describe('Resolved Pieces', () => { ) const now = 990000 - const nowInPart = 2000 - const partStarted = now - nowInPart + const partTimes = createPartCurrentTimes(now, now - 2000) - const currentPartInfo = createPartInstanceInfo(partStarted, nowInPart, createPartInstance(), [piece001]) + const currentPartInfo = createPartInstanceInfo(partTimes, createPartInstance(), [piece001]) const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, @@ -639,7 +632,7 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: partStarted + 3000, + resolvedStart: partTimes.partStartTime! + 3000, resolvedDuration: 1200, }, ] satisfies StrippedResult) @@ -661,10 +654,9 @@ describe('Resolved Pieces', () => { ) const now = 990000 - const nowInPart = 7000 - const partStarted = now - nowInPart + const partTimes = createPartCurrentTimes(now, now - 7000) - const currentPartInfo = createPartInstanceInfo(partStarted, nowInPart, createPartInstance(), [piece001]) + const currentPartInfo = createPartInstanceInfo(partTimes, createPartInstance(), [piece001]) const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, @@ -674,7 +666,7 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: partStarted + 4000, + resolvedStart: partTimes.partStartTime! + 4000, resolvedDuration: -4000 + 7000 + 1300, }, ] satisfies StrippedResult) @@ -689,20 +681,12 @@ describe('Resolved Pieces', () => { const piece010 = createPieceInstance(sourceLayerId, { start: 0 }) const now = 990000 - const nowInPart = 2000 - const currentPartStarted = now - nowInPart - const previousPartStarted = currentPartStarted - 5000 - - const previousPartInfo = createPartInstanceInfo( - previousPartStarted, - nowInPart + 5000, - createPartInstance(), - [piece001] - ) + const currentPartTimes = createPartCurrentTimes(now, now - 2000) + const previousPartTimes = createPartCurrentTimes(now, now - 7000) - const currentPartInfo = createPartInstanceInfo(currentPartStarted, nowInPart, createPartInstance(), [ - piece010, - ]) + const previousPartInfo = createPartInstanceInfo(previousPartTimes, createPartInstance(), [piece001]) + + const currentPartInfo = createPartInstanceInfo(currentPartTimes, createPartInstance(), [piece010]) const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, @@ -715,12 +699,12 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: previousPartStarted, + resolvedStart: previousPartTimes.partStartTime!, resolvedDuration: 5000, }, { _id: piece010._id, - resolvedStart: currentPartStarted, + resolvedStart: currentPartTimes.partStartTime!, resolvedDuration: undefined, }, ] satisfies StrippedResult) @@ -743,21 +727,16 @@ describe('Resolved Pieces', () => { ) const now = 990000 - const nowInPart = 2000 - const currentPartStarted = now - nowInPart - const previousPartStarted = currentPartStarted - 5000 - - const previousPartInfo = createPartInstanceInfo( - previousPartStarted, - nowInPart + 5000, - createPartInstance(), - [piece001, cappedInfinitePiece] - ) + const currentPartTimes = createPartCurrentTimes(now, now - 2000) + const previousPartTimes = createPartCurrentTimes(now, now - 7000) - const currentPartInfo = createPartInstanceInfo(currentPartStarted, nowInPart, createPartInstance(), [ - piece010, + const previousPartInfo = createPartInstanceInfo(previousPartTimes, createPartInstance(), [ + piece001, + cappedInfinitePiece, ]) + const currentPartInfo = createPartInstanceInfo(currentPartTimes, createPartInstance(), [piece010]) + const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, { @@ -769,17 +748,17 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: previousPartStarted, + resolvedStart: previousPartTimes.partStartTime!, resolvedDuration: 1000, }, { _id: cappedInfinitePiece._id, - resolvedStart: previousPartStarted + 1000, + resolvedStart: previousPartTimes.partStartTime! + 1000, resolvedDuration: 4000, }, { _id: piece010._id, - resolvedStart: currentPartStarted, + resolvedStart: currentPartTimes.partStartTime!, resolvedDuration: undefined, }, ] satisfies StrippedResult) @@ -820,18 +799,15 @@ describe('Resolved Pieces', () => { } const now = 990000 - const nowInPart = 2000 - const currentPartStarted = now - nowInPart - const previousPartStarted = currentPartStarted - 5000 - - const previousPartInfo = createPartInstanceInfo( - previousPartStarted, - nowInPart + 5000, - createPartInstance(), - [piece001, startingInfinitePiece] - ) + const currentPartTimes = createPartCurrentTimes(now, now - 2000) + const previousPartTimes = createPartCurrentTimes(now, now - 7000) + + const previousPartInfo = createPartInstanceInfo(previousPartTimes, createPartInstance(), [ + piece001, + startingInfinitePiece, + ]) - const currentPartInfo = createPartInstanceInfo(currentPartStarted, nowInPart, createPartInstance(), [ + const currentPartInfo = createPartInstanceInfo(currentPartTimes, createPartInstance(), [ piece010, continuingInfinitePiece, ]) @@ -847,17 +823,17 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: previousPartStarted, + resolvedStart: previousPartTimes.partStartTime!, resolvedDuration: 1000, }, { _id: continuingInfinitePiece._id, - resolvedStart: previousPartStarted + 1000, + resolvedStart: previousPartTimes.partStartTime! + 1000, resolvedDuration: 9400, }, { _id: piece010._id, - resolvedStart: currentPartStarted, + resolvedStart: currentPartTimes.partStartTime!, resolvedDuration: undefined, }, ] satisfies StrippedResult) @@ -872,14 +848,12 @@ describe('Resolved Pieces', () => { const piece010 = createPieceInstance(sourceLayerId, { start: 0 }) const now = 990000 - const nowInPart = 2000 - const currentPartStarted = now - nowInPart + const currentPartTimes = createPartCurrentTimes(now, now - 2000) const currentPartLength = 13000 - const nextPartStart = currentPartStarted + currentPartLength + const nextPartTimes = createPartCurrentTimes(now, currentPartTimes.partStartTime! + currentPartLength) const currentPartInfo = createPartInstanceInfo( - currentPartStarted, - nowInPart, + currentPartTimes, createPartInstance({ autoNext: true, expectedDuration: currentPartLength, @@ -887,7 +861,7 @@ describe('Resolved Pieces', () => { [piece001] ) - const nextPartInfo = createPartInstanceInfo(nextPartStart, 0, createPartInstance(), [piece010]) + const nextPartInfo = createPartInstanceInfo(nextPartTimes, createPartInstance(), [piece010]) const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, @@ -900,12 +874,12 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: currentPartStarted, + resolvedStart: currentPartTimes.partStartTime!, resolvedDuration: currentPartLength, }, { _id: piece010._id, - resolvedStart: nextPartStart, + resolvedStart: nextPartTimes.partStartTime!, resolvedDuration: undefined, }, ] satisfies StrippedResult) @@ -928,14 +902,12 @@ describe('Resolved Pieces', () => { ) const now = 990000 - const nowInPart = 2000 - const currentPartStarted = now - nowInPart + const currentPartTimes = createPartCurrentTimes(now, now - 2000) const currentPartLength = 13000 - const nextPartStart = currentPartStarted + currentPartLength + const nextPartTimes = createPartCurrentTimes(now, currentPartTimes.partStartTime! + currentPartLength) const currentPartInfo = createPartInstanceInfo( - currentPartStarted, - nowInPart, + currentPartTimes, createPartInstance({ autoNext: true, expectedDuration: currentPartLength, @@ -943,7 +915,7 @@ describe('Resolved Pieces', () => { [piece001, cappedInfinitePiece] ) - const nextPartInfo = createPartInstanceInfo(nextPartStart, 0, createPartInstance(), [piece010]) + const nextPartInfo = createPartInstanceInfo(nextPartTimes, createPartInstance(), [piece010]) const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, @@ -957,17 +929,17 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: currentPartStarted, + resolvedStart: currentPartTimes.partStartTime!, resolvedDuration: 1000, }, { _id: cappedInfinitePiece._id, - resolvedStart: currentPartStarted + 1000, + resolvedStart: currentPartTimes.partStartTime! + 1000, resolvedDuration: currentPartLength - 1000, }, { _id: piece010._id, - resolvedStart: nextPartStart, + resolvedStart: nextPartTimes.partStartTime!, resolvedDuration: undefined, }, ] satisfies StrippedResult) @@ -1008,14 +980,12 @@ describe('Resolved Pieces', () => { } const now = 990000 - const nowInPart = 2000 - const currentPartStarted = now - nowInPart + const currentPartTimes = createPartCurrentTimes(now, now - 2000) const currentPartLength = 13000 - const nextPartStart = currentPartStarted + currentPartLength + const nextPartTimes = createPartCurrentTimes(now, currentPartTimes.partStartTime! + currentPartLength) const currentPartInfo = createPartInstanceInfo( - currentPartStarted, - nowInPart, + currentPartTimes, createPartInstance({ autoNext: true, expectedDuration: currentPartLength, @@ -1023,7 +993,7 @@ describe('Resolved Pieces', () => { [piece001, startingInfinitePiece] ) - const nextPartInfo = createPartInstanceInfo(nextPartStart, 0, createPartInstance(), [ + const nextPartInfo = createPartInstanceInfo(nextPartTimes, createPartInstance(), [ piece010, continuingInfinitePiece, ]) @@ -1040,17 +1010,17 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: currentPartStarted, + resolvedStart: currentPartTimes.partStartTime!, resolvedDuration: 1000, }, { _id: startingInfinitePiece._id, - resolvedStart: currentPartStarted + 1000, + resolvedStart: currentPartTimes.partStartTime! + 1000, resolvedDuration: currentPartLength - 1000 + 3400, }, { _id: piece010._id, - resolvedStart: nextPartStart, + resolvedStart: nextPartTimes.partStartTime!, resolvedDuration: undefined, }, ] satisfies StrippedResult) diff --git a/packages/job-worker/src/playout/activePlaylistActions.ts b/packages/job-worker/src/playout/activePlaylistActions.ts index c4c2dff622..81a2961f8f 100644 --- a/packages/job-worker/src/playout/activePlaylistActions.ts +++ b/packages/job-worker/src/playout/activePlaylistActions.ts @@ -145,8 +145,6 @@ export async function deactivateRundownPlaylistInner( let rundown: ReadonlyDeep | undefined if (currentPartInstance) { rundown = playoutModel.getRundown(currentPartInstance.partInstance.rundownId)?.rundown - - playoutModel.queueNotifyCurrentlyPlayingPartEvent(currentPartInstance.partInstance.rundownId, null) } else if (nextPartInstance) { rundown = playoutModel.getRundown(nextPartInstance.partInstance.rundownId)?.rundown } diff --git a/packages/job-worker/src/playout/adlibAction.ts b/packages/job-worker/src/playout/adlibAction.ts index 36eaa59a10..cc9c338241 100644 --- a/packages/job-worker/src/playout/adlibAction.ts +++ b/packages/job-worker/src/playout/adlibAction.ts @@ -76,17 +76,6 @@ export async function executeAdlibActionAndSaveModel( throw UserError.create(UserErrorMessage.ActionsNotSupported) } - const watchedPackages = await WatchedPackagesHelper.create(context, { - pieceId: data.actionDocId, - fromPieceType: { - $in: [ - ExpectedPackageDBType.ADLIB_ACTION, - ExpectedPackageDBType.BASELINE_ADLIB_ACTION, - ExpectedPackageDBType.BUCKET_ADLIB_ACTION, - ], - }, - }) - const [adLibAction, baselineAdLibAction, bucketAdLibAction] = await Promise.all([ context.directCollections.AdLibActions.findOne(data.actionDocId as AdLibActionId, { projection: { _id: 1, privateData: 1 }, @@ -103,6 +92,21 @@ export async function executeAdlibActionAndSaveModel( ]) const adLibActionDoc = adLibAction ?? baselineAdLibAction ?? bucketAdLibAction + let watchedPackages = WatchedPackagesHelper.empty(context) + if (adLibActionDoc && 'rundownId' in adLibActionDoc) { + watchedPackages = await WatchedPackagesHelper.create(context, adLibActionDoc.rundownId, null, { + fromPieceType: { + $in: [ExpectedPackageDBType.ADLIB_ACTION, ExpectedPackageDBType.BASELINE_ADLIB_ACTION], + }, + pieceId: data.actionDocId, + }) + } else if (adLibActionDoc && 'bucketId' in adLibActionDoc) { + watchedPackages = await WatchedPackagesHelper.create(context, null, adLibActionDoc.bucketId, { + fromPieceType: ExpectedPackageDBType.BUCKET_ADLIB_ACTION, + pieceId: data.actionDocId, + }) + } + const actionParameters: ExecuteActionParameters = { actionId: data.actionId, userData: data.userData, diff --git a/packages/job-worker/src/playout/adlibJobs.ts b/packages/job-worker/src/playout/adlibJobs.ts index 836ea26c31..0d0bf2e307 100644 --- a/packages/job-worker/src/playout/adlibJobs.ts +++ b/packages/job-worker/src/playout/adlibJobs.ts @@ -194,6 +194,7 @@ async function pieceTakeNowAsAdlib( await syncPlayheadInfinitesForNextPartInstance( context, playoutModel, + undefined, playoutModel.currentPartInstance, playoutModel.nextPartInstance ) @@ -373,6 +374,7 @@ export async function handleStopPiecesOnSourceLayers( await syncPlayheadInfinitesForNextPartInstance( context, playoutModel, + undefined, playoutModel.currentPartInstance, playoutModel.nextPartInstance ) diff --git a/packages/job-worker/src/playout/adlibUtils.ts b/packages/job-worker/src/playout/adlibUtils.ts index 428d60fa75..adde0ab27e 100644 --- a/packages/job-worker/src/playout/adlibUtils.ts +++ b/packages/job-worker/src/playout/adlibUtils.ts @@ -68,6 +68,7 @@ export async function innerStartOrQueueAdLibPiece( await syncPlayheadInfinitesForNextPartInstance( context, playoutModel, + undefined, currentPartInstance, playoutModel.nextPartInstance ) @@ -310,13 +311,15 @@ export function innerStopPieces( const pieceInstanceModel = playoutModel.findPieceInstance(pieceInstance._id) if (pieceInstanceModel) { - const newDuration: Required['userDuration'] = playoutModel.isMultiGatewayMode - ? { - endRelativeToNow: offsetRelativeToNow, - } - : { - endRelativeToPart: relativeStopAt, - } + const newDuration: Required['userDuration'] = + playoutModel.isMultiGatewayMode || + pieceInstanceModel.pieceInstance.pieceInstance.piece.enable.isAbsolute + ? { + endRelativeToNow: offsetRelativeToNow, + } + : { + endRelativeToPart: relativeStopAt, + } pieceInstanceModel.pieceInstance.setDuration(newDuration) diff --git a/packages/job-worker/src/playout/debug.ts b/packages/job-worker/src/playout/debug.ts index 2028b0b23c..cd6988b72d 100644 --- a/packages/job-worker/src/playout/debug.ts +++ b/packages/job-worker/src/playout/debug.ts @@ -25,6 +25,7 @@ export async function handleDebugSyncPlayheadInfinitesForNextPartInstance( await syncPlayheadInfinitesForNextPartInstance( context, playoutModel, + undefined, playoutModel.currentPartInstance, playoutModel.nextPartInstance ) diff --git a/packages/job-worker/src/playout/infinites.ts b/packages/job-worker/src/playout/infinites.ts index 4523e8cc57..0f149931ba 100644 --- a/packages/job-worker/src/playout/infinites.ts +++ b/packages/job-worker/src/playout/infinites.ts @@ -2,21 +2,24 @@ import { PartInstanceId, RundownId, ShowStyleBaseId } from '@sofie-automation/co import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' -import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' +import { PieceInstance, wrapPieceToInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { getPieceInstancesForPart as libgetPieceInstancesForPart, getPlayheadTrackingInfinitesForPart as libgetPlayheadTrackingInfinitesForPart, buildPiecesStartingInThisPartQuery, buildPastInfinitePiecesForThisPartQuery, } from '@sofie-automation/corelib/dist/playout/infinites' -import { processAndPrunePieceInstanceTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' +import { + createPartCurrentTimes, + processAndPrunePieceInstanceTimings, +} from '@sofie-automation/corelib/dist/playout/processAndPrune' import { JobContext } from '../jobs/index.js' import { ReadonlyDeep } from 'type-fest' import { PlayoutModel } from './model/PlayoutModel.js' import { PlayoutPartInstanceModel } from './model/PlayoutPartInstanceModel.js' import { PlayoutSegmentModel } from './model/PlayoutSegmentModel.js' import { getCurrentTime } from '../lib/index.js' -import { flatten } from '@sofie-automation/corelib/dist/lib' +import { clone, flatten, getRandomId } from '@sofie-automation/corelib/dist/lib' import _ from 'underscore' import { IngestModelReadonly } from '../ingest/model/IngestModel.js' import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' @@ -217,6 +220,7 @@ export async function fetchPiecesThatMayBeActiveForPart( export async function syncPlayheadInfinitesForNextPartInstance( context: JobContext, playoutModel: PlayoutModel, + unsavedIngestModel: Pick | undefined, fromPartInstance: PlayoutPartInstanceModel | null, toPartInstance: PlayoutPartInstanceModel | null ): Promise { @@ -253,11 +257,14 @@ export async function syncPlayheadInfinitesForNextPartInstance( toPartInstance.partInstance.part ) - const nowInPart = getCurrentTime() - (fromPartInstance.partInstance.timings?.plannedStartedPlayback ?? 0) + const partTimes = createPartCurrentTimes( + getCurrentTime(), + fromPartInstance.partInstance.timings?.plannedStartedPlayback + ) const prunedPieceInstances = processAndPrunePieceInstanceTimings( showStyleBase.sourceLayers, fromPartInstance.pieceInstances.map((p) => p.pieceInstance), - nowInPart, + partTimes, undefined, true ) @@ -283,6 +290,17 @@ export async function syncPlayheadInfinitesForNextPartInstance( ) toPartInstance.replaceInfinitesFromPreviousPlayhead(infinites) + } else if (toPartInstance && !fromPartInstance) { + // This is the first take of the rundown, ensure the baseline infinites are loaded + const baselineInfinites = await getBaselineInfinitesForPart( + context, + playoutModel, + unsavedIngestModel, + toPartInstance.partInstance.part, + toPartInstance.partInstance._id + ) + + toPartInstance.replaceInfinitesFromPreviousPlayhead(baselineInfinites) } if (span) span.end() } @@ -385,3 +403,38 @@ export function getPieceInstancesForPart( if (span) span.end() return res } + +export async function getBaselineInfinitesForPart( + context: JobContext, + playoutModel: PlayoutModel, + unsavedIngestModel: Pick | undefined, + part: ReadonlyDeep, + partInstanceId: PartInstanceId +): Promise { + // Find the pieces. If an ingest model is provided, use that instead of the database + const pieces = + unsavedIngestModel && unsavedIngestModel.rundownId === part.rundownId + ? unsavedIngestModel.getAllPieces().filter((p) => p.startPartId === null) + : await context.directCollections.Pieces.findFetch({ + startRundownId: part.rundownId, + startPartId: null, + }) + + const playlistActivationId = playoutModel.playlist.activationId + if (!playlistActivationId) throw new Error(`RundownPlaylist "${playoutModel.playlistId}" is not active`) + + return pieces.map((piece) => { + const instance = wrapPieceToInstance(clone(piece), playlistActivationId, partInstanceId, false) + + // All these pieces are expected to be outOnRundownChange infinites, as that is how they are ingested + + instance.infinite = { + infiniteInstanceId: getRandomId(), + infiniteInstanceIndex: 0, + infinitePieceId: instance.piece._id, + fromPreviousPart: true, + } + + return instance + }) +} diff --git a/packages/job-worker/src/playout/lookahead/__tests__/lookahead.test.ts b/packages/job-worker/src/playout/lookahead/__tests__/lookahead.test.ts index 5aeb024a98..c421f1b5c7 100644 --- a/packages/job-worker/src/playout/lookahead/__tests__/lookahead.test.ts +++ b/packages/job-worker/src/playout/lookahead/__tests__/lookahead.test.ts @@ -23,6 +23,7 @@ type TgetOrderedPartsAfterPlayhead = jest.MockedFunction { @@ -272,8 +273,7 @@ describe('Lookahead', () => { const partInstancesInfo: SelectedPartInstancesTimelineInfo = {} partInstancesInfo.previous = { partInstance: { _id: 'abc2', part: { _id: 'abc' } } as any, - nowInPart: 987, - partStarted: getCurrentTime() + 546, + partTimes: createPartCurrentTimes(getCurrentTime(), getCurrentTime() + 546), pieceInstances: ['1', '2'] as any, calculatedTimings: { inTransitionStart: null } as any, regenerateTimelineAt: undefined, @@ -282,7 +282,7 @@ describe('Lookahead', () => { const expectedPrevious = { part: partInstancesInfo.previous.partInstance, onTimeline: true, - nowInPart: partInstancesInfo.previous.nowInPart, + nowInPart: partInstancesInfo.previous.partTimes.nowInPart, allPieces: partInstancesInfo.previous.pieceInstances, calculatedTimings: partInstancesInfo.previous.calculatedTimings, } @@ -296,8 +296,7 @@ describe('Lookahead', () => { // Add a current partInstancesInfo.current = { partInstance: { _id: 'curr', part: {} } as any, - nowInPart: 56, - partStarted: getCurrentTime() + 865, + partTimes: createPartCurrentTimes(getCurrentTime(), getCurrentTime() + 865), pieceInstances: ['3', '4'] as any, calculatedTimings: { inTransitionStart: null } as any, regenerateTimelineAt: undefined, @@ -305,7 +304,7 @@ describe('Lookahead', () => { const expectedCurrent = { part: partInstancesInfo.current.partInstance, onTimeline: true, - nowInPart: partInstancesInfo.current.nowInPart, + nowInPart: partInstancesInfo.current.partTimes.nowInPart, allPieces: partInstancesInfo.current.pieceInstances, calculatedTimings: partInstancesInfo.current.calculatedTimings, } @@ -317,8 +316,7 @@ describe('Lookahead', () => { // Add a next partInstancesInfo.next = { partInstance: { _id: 'nxt2', part: { _id: 'nxt' } } as any, - nowInPart: -85, - partStarted: getCurrentTime() + 142, + partTimes: createPartCurrentTimes(getCurrentTime(), getCurrentTime() + 142), pieceInstances: ['5'] as any, calculatedTimings: { inTransitionStart: null } as any, regenerateTimelineAt: undefined, @@ -326,7 +324,7 @@ describe('Lookahead', () => { const expectedNext = { part: partInstancesInfo.next.partInstance, onTimeline: false, - nowInPart: partInstancesInfo.next.nowInPart, + nowInPart: partInstancesInfo.next.partTimes.nowInPart, allPieces: partInstancesInfo.next.pieceInstances, calculatedTimings: partInstancesInfo.next.calculatedTimings, } diff --git a/packages/job-worker/src/playout/lookahead/findObjects.ts b/packages/job-worker/src/playout/lookahead/findObjects.ts index 8e228a5a01..d96035a74f 100644 --- a/packages/job-worker/src/playout/lookahead/findObjects.ts +++ b/packages/job-worker/src/playout/lookahead/findObjects.ts @@ -19,7 +19,7 @@ function getBestPieceInstanceId(piece: ReadonlyDeep): string { return unprotectString(piece._id) } // Something is needed, and it must be distant future here, so accuracy is not important - return unprotectString(piece.piece.startPartId) + return unprotectString(piece.piece.startPartId ?? piece.rundownId) } function tryActivateKeyframesForObject( diff --git a/packages/job-worker/src/playout/lookahead/index.ts b/packages/job-worker/src/playout/lookahead/index.ts index 83fe9ea839..64ac5a2337 100644 --- a/packages/job-worker/src/playout/lookahead/index.ts +++ b/packages/job-worker/src/playout/lookahead/index.ts @@ -46,15 +46,29 @@ function getPrunedEndedPieceInstances(info: SelectedPartInstanceTimelineInfo) { if (!info.partInstance.timings?.plannedStartedPlayback) { return info.pieceInstances } else { - return info.pieceInstances.filter((p) => !hasPieceInstanceDefinitelyEnded(p, info.nowInPart)) + return info.pieceInstances.filter((p) => !hasPieceInstanceDefinitelyEnded(p, info.partTimes.nowInPart)) } } -function removeInfiniteContinuations(info: PartInstanceAndPieceInstances): PartInstanceAndPieceInstances { +function removeInfiniteContinuations( + info: PartInstanceAndPieceInstances, + isCurrentPart: boolean +): PartInstanceAndPieceInstances { const partId = info.part.part._id return { ...info, // Ignore PieceInstances that continue from the previous part, as they will not need lookahead - allPieces: info.allPieces.filter((inst) => !inst.infinite || inst.piece.startPartId === partId), + allPieces: info.allPieces.filter((inst) => { + // Always include non infinite pieces + if (!inst.infinite) return true + + // Only include rundown owned pieces in the current part + if (!inst.piece.startPartId) { + return isCurrentPart + } + + // Include infinite pieces in the part where they start + return inst.piece.startPartId === partId + }), } } @@ -92,35 +106,44 @@ export async function getLookeaheadObjects( const partInstancesInfo: PartInstanceAndPieceInstances[] = _.compact([ partInstancesInfo0.current - ? removeInfiniteContinuations({ - part: partInstancesInfo0.current.partInstance, - onTimeline: true, - nowInPart: partInstancesInfo0.current.nowInPart, - allPieces: getPrunedEndedPieceInstances(partInstancesInfo0.current), - calculatedTimings: partInstancesInfo0.current.calculatedTimings, - }) + ? removeInfiniteContinuations( + { + part: partInstancesInfo0.current.partInstance, + onTimeline: true, + nowInPart: partInstancesInfo0.current.partTimes.nowInPart, + allPieces: getPrunedEndedPieceInstances(partInstancesInfo0.current), + calculatedTimings: partInstancesInfo0.current.calculatedTimings, + }, + true + ) : undefined, partInstancesInfo0.next - ? removeInfiniteContinuations({ - part: partInstancesInfo0.next.partInstance, - onTimeline: !!partInstancesInfo0.current?.partInstance?.part?.autoNext, //TODO -QL - nowInPart: partInstancesInfo0.next.nowInPart, - allPieces: partInstancesInfo0.next.pieceInstances, - calculatedTimings: partInstancesInfo0.next.calculatedTimings, - }) + ? removeInfiniteContinuations( + { + part: partInstancesInfo0.next.partInstance, + onTimeline: !!partInstancesInfo0.current?.partInstance?.part?.autoNext, //TODO -QL + nowInPart: partInstancesInfo0.next.partTimes.nowInPart, + allPieces: partInstancesInfo0.next.pieceInstances, + calculatedTimings: partInstancesInfo0.next.calculatedTimings, + }, + false + ) : undefined, ]) // Track the previous info for checking how the timeline will be built let previousPartInfo: PartInstanceAndPieceInstances | undefined if (partInstancesInfo0.previous) { - previousPartInfo = removeInfiniteContinuations({ - part: partInstancesInfo0.previous.partInstance, - onTimeline: true, - nowInPart: partInstancesInfo0.previous.nowInPart, - allPieces: getPrunedEndedPieceInstances(partInstancesInfo0.previous), - calculatedTimings: partInstancesInfo0.previous.calculatedTimings, - }) + previousPartInfo = removeInfiniteContinuations( + { + part: partInstancesInfo0.previous.partInstance, + onTimeline: true, + nowInPart: partInstancesInfo0.previous.partTimes.nowInPart, + allPieces: getPrunedEndedPieceInstances(partInstancesInfo0.previous), + calculatedTimings: partInstancesInfo0.previous.calculatedTimings, + }, + false + ) } // TODO: Do we need to use processAndPrunePieceInstanceTimings on these pieces? In theory yes, but that gets messy and expensive. @@ -129,6 +152,9 @@ export async function getLookeaheadObjects( const piecesByPart = new Map>() for (const piece of piecesToSearch) { + // Don't lookahead any rundown owned pieces, that should only happen once they become PieceInstances + if (!piece.startPartId) continue + const pieceInstance = wrapPieceToInstance(piece, protectString(''), protectString(''), true) const existing = piecesByPart.get(piece.startPartId) if (existing) { diff --git a/packages/job-worker/src/playout/model/PlayoutModel.ts b/packages/job-worker/src/playout/model/PlayoutModel.ts index e3c710ba17..0c47b429d9 100644 --- a/packages/job-worker/src/playout/model/PlayoutModel.ts +++ b/packages/job-worker/src/playout/model/PlayoutModel.ts @@ -288,13 +288,6 @@ export interface PlayoutModel extends PlayoutModelReadonly, StudioPlayoutModelBa */ queuePartInstanceTimingEvent(partInstanceId: PartInstanceId): void - /** - * Queue a `NotifyCurrentlyPlayingPart` operation to be performed upon completion of this Playout operation - * @param rundownId The Rundown to report the notification to - * @param partInstance The PartInstance the event is in relation to - */ - queueNotifyCurrentlyPlayingPartEvent(rundownId: RundownId, partInstance: PlayoutPartInstanceModel | null): void - /** * Remove all loaded PartInstances marked as `rehearsal` from this RundownPlaylist */ diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts index 771bcfd752..1392edc3d4 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts @@ -53,15 +53,13 @@ import { DeferredAfterSaveFunction, DeferredFunction, PlayoutModel, PlayoutModel import { writePartInstancesAndPieceInstances, writeAdlibTestingSegments } from './SavePlayoutModel.js' import { PlayoutPieceInstanceModel } from '../PlayoutPieceInstanceModel.js' import { DatabasePersistedModel } from '../../../modelBase.js' -import { ExpectedPackageDBFromStudioBaselineObjects } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { StudioBaselineHelper } from '../../../studio/model/StudioBaselineHelper.js' -import { EventsJobs } from '@sofie-automation/corelib/dist/worker/events' import { QuickLoopService } from '../services/QuickLoopService.js' import { calculatePartTimings, PartCalculatedTimings } from '@sofie-automation/corelib/dist/playout/timings' import { PieceInstanceWithTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' -import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { NotificationsModelHelper } from '../../../notifications/NotificationsModelHelper.js' +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' export class PlayoutModelReadonlyImpl implements PlayoutModelReadonly { public readonly playlistId: RundownPlaylistId @@ -283,7 +281,6 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou #timelineHasChanged = false #pendingPartInstanceTimingEvents = new Set() - #pendingNotifyCurrentlyPlayingPartEvent = new Map() get hackDeletedPartInstanceIds(): PartInstanceId[] { const result: PartInstanceId[] = [] @@ -528,14 +525,6 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou this.#pendingPartInstanceTimingEvents.add(partInstanceId) } - queueNotifyCurrentlyPlayingPartEvent(rundownId: RundownId, partInstance: PlayoutPartInstanceModel | null): void { - if (partInstance && partInstance.partInstance.part.shouldNotifyCurrentPlayingPart) { - this.#pendingNotifyCurrentlyPlayingPartEvent.set(rundownId, partInstance.partInstance.part.externalId) - } else if (!partInstance) { - this.#pendingNotifyCurrentlyPlayingPartEvent.set(rundownId, null) - } - } - removeAllRehearsalPartInstances(): void { const partInstancesToRemove: PartInstanceId[] = [] @@ -703,21 +692,6 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou } this.#pendingPartInstanceTimingEvents.clear() - for (const [rundownId, partExternalId] of this.#pendingNotifyCurrentlyPlayingPartEvent) { - // This is low-prio, defer so that it's executed well after publications has been updated, - // so that the playout gateway has had the chance to learn about the timeline changes - this.context - .queueEventJob(EventsJobs.NotifyCurrentlyPlayingPart, { - rundownId: rundownId, - isRehearsal: !!this.playlist.rehearsal, - partExternalId: partExternalId, - }) - .catch((e) => { - logger.warn(`Failed to queue NotifyCurrentlyPlayingPart job: ${stringifyError(e)}`) - }) - } - this.#pendingNotifyCurrentlyPlayingPartEvent.clear() - if (span) span.end() } @@ -832,7 +806,7 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou return this.timelineImpl } - setExpectedPackagesForStudioBaseline(packages: ExpectedPackageDBFromStudioBaselineObjects[]): void { + setExpectedPackagesForStudioBaseline(packages: ExpectedPackage.Any[]): void { this.#baselineHelper.setExpectedPackages(packages) } setExpectedPlayoutItemsForStudioBaseline(playoutItems: ExpectedPlayoutItemStudio[]): void { diff --git a/packages/job-worker/src/playout/resolvedPieces.ts b/packages/job-worker/src/playout/resolvedPieces.ts index f28b928506..4bbc96a8c6 100644 --- a/packages/job-worker/src/playout/resolvedPieces.ts +++ b/packages/job-worker/src/playout/resolvedPieces.ts @@ -4,6 +4,7 @@ import { SourceLayers } from '@sofie-automation/corelib/dist/dataModel/ShowStyle import { JobContext } from '../jobs/index.js' import { getCurrentTime } from '../lib/index.js' import { + createPartCurrentTimes, processAndPrunePieceInstanceTimings, resolvePrunedPieceInstance, } from '@sofie-automation/corelib/dist/playout/processAndPrune' @@ -26,15 +27,14 @@ export function getResolvedPiecesForCurrentPartInstance( ): ResolvedPieceInstance[] { if (now === undefined) now = getCurrentTime() - const partStarted = partInstance.partInstance.timings?.plannedStartedPlayback - const nowInPart = partStarted ? now - partStarted : 0 + const partTimes = createPartCurrentTimes(now, partInstance.partInstance.timings?.plannedStartedPlayback) const preprocessedPieces = processAndPrunePieceInstanceTimings( sourceLayers, partInstance.pieceInstances.map((p) => p.pieceInstance), - nowInPart + partTimes ) - return preprocessedPieces.map((instance) => resolvePrunedPieceInstance(nowInPart, instance)) + return preprocessedPieces.map((instance) => resolvePrunedPieceInstance(partTimes, instance)) } export function getResolvedPiecesForPartInstancesOnTimeline( @@ -45,7 +45,7 @@ export function getResolvedPiecesForPartInstancesOnTimeline( // With no current part, there are no timings to consider if (!partInstancesInfo.current) return [] - const currentPartStarted = partInstancesInfo.current.partStarted ?? now + const currentPartStarted = partInstancesInfo.current.partTimes.partStartTime ?? now const nextPartStarted = partInstancesInfo.current.partInstance.part.autoNext && @@ -57,9 +57,9 @@ export function getResolvedPiecesForPartInstancesOnTimeline( // Calculate the next part if needed let nextResolvedPieces: ResolvedPieceInstance[] = [] if (partInstancesInfo.next && nextPartStarted != null) { - const nowInPart = partInstancesInfo.next.nowInPart + const partTimes = partInstancesInfo.next.partTimes nextResolvedPieces = partInstancesInfo.next.pieceInstances.map((instance) => - resolvePrunedPieceInstance(nowInPart, instance) + resolvePrunedPieceInstance(partTimes, instance) ) // Translate start to absolute times @@ -67,9 +67,9 @@ export function getResolvedPiecesForPartInstancesOnTimeline( } // Calculate the current part - const nowInCurrentPart = partInstancesInfo.current.nowInPart + const currentPartTimes = partInstancesInfo.current.partTimes const currentResolvedPieces = partInstancesInfo.current.pieceInstances.map((instance) => - resolvePrunedPieceInstance(nowInCurrentPart, instance) + resolvePrunedPieceInstance(currentPartTimes, instance) ) // Translate start to absolute times @@ -77,16 +77,16 @@ export function getResolvedPiecesForPartInstancesOnTimeline( // Calculate the previous part let previousResolvedPieces: ResolvedPieceInstance[] = [] - if (partInstancesInfo.previous?.partStarted) { - const nowInPart = partInstancesInfo.previous.nowInPart + if (partInstancesInfo.previous?.partTimes.partStartTime) { + const partTimes = partInstancesInfo.previous.partTimes previousResolvedPieces = partInstancesInfo.previous.pieceInstances.map((instance) => - resolvePrunedPieceInstance(nowInPart, instance) + resolvePrunedPieceInstance(partTimes, instance) ) // Translate start to absolute times offsetResolvedStartAndCapDuration( previousResolvedPieces, - partInstancesInfo.previous.partStarted, + partInstancesInfo.previous.partTimes.partStartTime, currentPartStarted ) } diff --git a/packages/job-worker/src/playout/setNext.ts b/packages/job-worker/src/playout/setNext.ts index 7134917332..8739e289a3 100644 --- a/packages/job-worker/src/playout/setNext.ts +++ b/packages/job-worker/src/playout/setNext.ts @@ -8,6 +8,7 @@ import { PlayoutPartInstanceModel } from './model/PlayoutPartInstanceModel.js' import { PlayoutSegmentModel } from './model/PlayoutSegmentModel.js' import { fetchPiecesThatMayBeActiveForPart, + getBaselineInfinitesForPart, getPieceInstancesForPart, syncPlayheadInfinitesForNextPartInstance, } from './infinites.js' @@ -283,7 +284,13 @@ async function prepareExistingPartInstanceForBeingNexted( playoutModel: PlayoutModel, instance: PlayoutPartInstanceModel ): Promise { - await syncPlayheadInfinitesForNextPartInstance(context, playoutModel, playoutModel.currentPartInstance, instance) + await syncPlayheadInfinitesForNextPartInstance( + context, + playoutModel, + undefined, // Any ingest model must have been fully written before we get here + playoutModel.currentPartInstance, + instance + ) return instance } @@ -297,6 +304,8 @@ async function preparePartInstanceForPartBeingNexted( const rundown = playoutModel.getRundown(nextPart.rundownId) if (!rundown) throw new Error(`Could not find rundown ${nextPart.rundownId}`) + const partInstanceId = protectString('') // Replaced inside playoutModel.createInstanceForPart + const possiblePieces = await fetchPiecesThatMayBeActiveForPart(context, playoutModel, undefined, nextPart) const newPieceInstances = getPieceInstancesForPart( context, @@ -305,9 +314,21 @@ async function preparePartInstanceForPartBeingNexted( rundown, nextPart, possiblePieces, - protectString('') // Replaced inside playoutModel.createInstanceForPart + partInstanceId ) + if (currentPartInstance === null) { + // This is the first take of the rundown, ensure the baseline infinites are loaded + const baselineInfinites = await getBaselineInfinitesForPart( + context, + playoutModel, + undefined, // Any ingest model must have been fully written before we get here + nextPart, + partInstanceId + ) + newPieceInstances.push(...baselineInfinites) + } + return playoutModel.createInstanceForPart(nextPart, newPieceInstances) } diff --git a/packages/job-worker/src/playout/snapshot.ts b/packages/job-worker/src/playout/snapshot.ts index 6f7cc36a73..a9705631c5 100644 --- a/packages/job-worker/src/playout/snapshot.ts +++ b/packages/job-worker/src/playout/snapshot.ts @@ -1,4 +1,9 @@ -import { ExpectedPackageDBType, getExpectedPackageId } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' +import { + ExpectedPackageDB, + ExpectedPackageDBType, + ExpectedPackageIngestSource, + getExpectedPackageIdFromIngestSource, +} from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { AdLibActionId, ExpectedPackageId, @@ -25,12 +30,14 @@ import { CoreRundownPlaylistSnapshot } from '@sofie-automation/corelib/dist/snap import { unprotectString, ProtectedString, protectString } from '@sofie-automation/corelib/dist/protectedString' import { saveIntoDb } from '../db/changes.js' import { getPartId, getSegmentId } from '../ingest/lib.js' -import { assertNever, getRandomId, literal } from '@sofie-automation/corelib/dist/lib' +import { assertNever, getRandomId, literal, omit } from '@sofie-automation/corelib/dist/lib' import { logger } from '../logging.js' import { JSONBlobParse, JSONBlobStringify } from '@sofie-automation/shared-lib/dist/lib/JSONBlob' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { RundownOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { SofieIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/SofieIngestDataCache' +import * as PackagesPreR53 from '@sofie-automation/corelib/dist/dataModel/Old/ExpectedPackagesR52' +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' class IdMapWithGenerator> extends Map { getOrGenerate(key: V): V { @@ -197,7 +204,6 @@ export async function handleRestorePlaylistSnapshot( rundownId: rd._id, } rd.studioId = snapshot.playlist.studioId - rd.notifiedCurrentPlayingPartExternalId = undefined } // TODO: This is too naive. Ideally we should unset it if it isnt valid, as anything other than a match is likely to have issues. @@ -240,14 +246,15 @@ export async function handleRestorePlaylistSnapshot( delete pieceOld.rundownId } if (pieceOld.partId) { - piece.startPartId = pieceOld.partId + const partId = pieceOld.partId + piece.startPartId = partId delete pieceOld.partId - piece.startSegmentId = partSegmentIds[unprotectString(piece.startPartId)] + piece.startSegmentId = partSegmentIds[unprotectString(partId)] } } // List any ids that need updating on other documents - const rundownIdMap = new Map() + const rundownIdMap = new IdMapWithGenerator() const getNewRundownId = (oldRundownId: RundownId) => { const rundownId = rundownIdMap.get(oldRundownId) if (!rundownId) { @@ -289,14 +296,18 @@ export async function handleRestorePlaylistSnapshot( for (const piece of snapshot.pieces) { const oldId = piece._id piece.startRundownId = getNewRundownId(piece.startRundownId) - piece.startPartId = partIdMap.getOrGenerateAndWarn( - piece.startPartId, - `piece.startPartId=${piece.startPartId} of piece=${piece._id}` - ) - piece.startSegmentId = segmentIdMap.getOrGenerateAndWarn( - piece.startSegmentId, - `piece.startSegmentId=${piece.startSegmentId} of piece=${piece._id}` - ) + if (piece.startPartId) { + piece.startPartId = partIdMap.getOrGenerateAndWarn( + piece.startPartId, + `piece.startPartId=${piece.startPartId} of piece=${piece._id}` + ) + } + if (piece.startSegmentId) { + piece.startSegmentId = segmentIdMap.getOrGenerateAndWarn( + piece.startSegmentId, + `piece.startSegmentId=${piece.startSegmentId} of piece=${piece._id}` + ) + } piece._id = getRandomId() pieceIdMap.set(oldId, piece._id) } @@ -340,46 +351,179 @@ export async function handleRestorePlaylistSnapshot( ) const expectedPackageIdMap = new Map() - for (const expectedPackage of snapshot.expectedPackages) { - const oldId = expectedPackage._id - - switch (expectedPackage.fromPieceType) { - case ExpectedPackageDBType.PIECE: - case ExpectedPackageDBType.ADLIB_PIECE: - case ExpectedPackageDBType.ADLIB_ACTION: - case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: - case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: { - expectedPackage.pieceId = pieceIdMap.getOrGenerateAndWarn( - expectedPackage.pieceId, - `expectedPackage.pieceId=${expectedPackage.pieceId}` - ) - - expectedPackage._id = getExpectedPackageId(expectedPackage.pieceId, expectedPackage.blueprintPackageId) - - break + snapshot.expectedPackages = snapshot.expectedPackages.map((expectedPackage0): ExpectedPackageDB => { + if ('fromPieceType' in expectedPackage0) { + const expectedPackage = expectedPackage0 as unknown as PackagesPreR53.ExpectedPackageDB + + let source: ExpectedPackageIngestSource | undefined + + switch (expectedPackage.fromPieceType) { + case PackagesPreR53.ExpectedPackageDBType.PIECE: + case PackagesPreR53.ExpectedPackageDBType.ADLIB_PIECE: + case PackagesPreR53.ExpectedPackageDBType.ADLIB_ACTION: + source = { + fromPieceType: expectedPackage.fromPieceType, + pieceId: pieceIdMap.getOrGenerateAndWarn( + expectedPackage.pieceId, + `expectedPackage.pieceId=${expectedPackage.pieceId}` + ) as any, + partId: partIdMap.getOrGenerateAndWarn( + expectedPackage.partId, + `expectedPackage.partId=${expectedPackage.partId}` + ), + segmentId: segmentIdMap.getOrGenerateAndWarn( + expectedPackage.segmentId, + `expectedPackage.segmentId=${expectedPackage.segmentId}` + ), + } + + break + case PackagesPreR53.ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + case PackagesPreR53.ExpectedPackageDBType.BASELINE_ADLIB_ACTION: { + source = { + fromPieceType: expectedPackage.fromPieceType, + pieceId: pieceIdMap.getOrGenerateAndWarn( + expectedPackage.pieceId, + `expectedPackage.pieceId=${expectedPackage.pieceId}` + ) as any, + } + + break + } + + case PackagesPreR53.ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: { + source = { + fromPieceType: expectedPackage.fromPieceType, + } + break + } + case PackagesPreR53.ExpectedPackageDBType.BUCKET_ADLIB: + case PackagesPreR53.ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + case PackagesPreR53.ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: { + // ignore, these are not present in the rundown snapshot anyway. + logger.warn(`Unexpected ExpectedPackage in snapshot: ${JSON.stringify(expectedPackage)}`) + break + } + + default: + assertNever(expectedPackage) + break } - case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: { - expectedPackage._id = getExpectedPackageId( - expectedPackage.rundownId, - expectedPackage.blueprintPackageId - ) - break + + if (!source) { + logger.warn(`Failed to fixup ExpectedPackage in snapshot: ${JSON.stringify(expectedPackage)}`) + // Define a fake source, so that it gets imported. + source = { + fromPieceType: ExpectedPackageDBType.PIECE, + pieceId: protectString('fakePiece'), + partId: protectString('fakePart'), + segmentId: protectString('fakeSegment'), + } } - case ExpectedPackageDBType.BUCKET_ADLIB: - case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: - case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: { - // ignore, these are not present in the rundown snapshot anyway. - logger.warn(`Unexpected ExpectedPackage in snapshot: ${JSON.stringify(expectedPackage)}`) - break + + const packageRundownId: RundownId | null = + 'rundownId' in expectedPackage + ? rundownIdMap.getOrGenerateAndWarn( + expectedPackage.rundownId, + `expectedPackage.rundownId=${expectedPackage.rundownId}` + ) + : null + const newPackageId = getExpectedPackageIdFromIngestSource( + packageRundownId || context.studioId, + source, + expectedPackage.blueprintPackageId + ) + + const newExpectedPackage: ExpectedPackageDB = { + _id: newPackageId, + studioId: context.studioId, + rundownId: packageRundownId, + bucketId: null, + created: expectedPackage.created, + package: { + ...(omit( + expectedPackage, + '_id', + 'studioId', + 'fromPieceType', + 'blueprintPackageId', + 'contentVersionHash', + // @ts-expect-error only sometimes present + 'rundownId', + 'pieceId', + 'partId', + 'segmentId', + 'pieceExternalId' + ) as ExpectedPackage.Any), + _id: expectedPackage.blueprintPackageId, + }, + + ingestSources: [source], } - default: - assertNever(expectedPackage) - break - } + expectedPackageIdMap.set(expectedPackage._id, newExpectedPackage._id) + return newExpectedPackage + } else { + const expectedPackage = expectedPackage0 + const oldId = expectedPackage._id + + for (const source of expectedPackage.ingestSources) { + switch (source.fromPieceType) { + case ExpectedPackageDBType.PIECE: + case ExpectedPackageDBType.ADLIB_PIECE: + case ExpectedPackageDBType.ADLIB_ACTION: + source.pieceId = pieceIdMap.getOrGenerateAndWarn( + source.pieceId, + `expectedPackage.pieceId=${source.pieceId}` + ) as any + source.partId = partIdMap.getOrGenerateAndWarn( + source.partId, + `expectedPackage.partId=${source.partId}` + ) + source.segmentId = segmentIdMap.getOrGenerateAndWarn( + source.segmentId, + `expectedPackage.segmentId=${source.segmentId}` + ) + + break + case ExpectedPackageDBType.BASELINE_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: + case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: { + source.pieceId = pieceIdMap.getOrGenerateAndWarn( + source.pieceId, + `expectedPackage.pieceId=${source.pieceId}` + ) as any + + break + } + case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: { + // No properties to update + break + } + case ExpectedPackageDBType.BUCKET_ADLIB: + case ExpectedPackageDBType.BUCKET_ADLIB_ACTION: + case ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS: { + // ignore, these are not present in the rundown snapshot anyway. + logger.warn(`Unexpected ExpectedPackage in snapshot: ${JSON.stringify(expectedPackage)}`) + break + } + default: + assertNever(source) + break + } + } - expectedPackageIdMap.set(oldId, expectedPackage._id) - } + // Regenerate the ID from the new rundownId and packageId + expectedPackage._id = getExpectedPackageIdFromIngestSource( + expectedPackage.rundownId || expectedPackage.studioId, + expectedPackage.ingestSources[0], + expectedPackage.package._id + ) + + expectedPackageIdMap.set(oldId, expectedPackage._id) + return expectedPackage + } + }) snapshot.playlist.rundownIdsInOrder = snapshot.playlist.rundownIdsInOrder.map((id) => rundownIdMap.get(id) ?? id) diff --git a/packages/job-worker/src/playout/take.ts b/packages/job-worker/src/playout/take.ts index 67b8f51742..036b3bb053 100644 --- a/packages/job-worker/src/playout/take.ts +++ b/packages/job-worker/src/playout/take.ts @@ -22,7 +22,10 @@ import { WrappedShowStyleBlueprint } from '../blueprints/cache.js' import { innerStopPieces } from './adlibUtils.js' import { reportPartInstanceHasStarted, reportPartInstanceHasStopped } from './timings/partPlayback.js' import { convertPartInstanceToBlueprints, convertResolvedPieceInstanceToBlueprints } from '../blueprints/context/lib.js' -import { processAndPrunePieceInstanceTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' +import { + createPartCurrentTimes, + processAndPrunePieceInstanceTimings, +} from '@sofie-automation/corelib/dist/playout/processAndPrune' import { TakeNextPartProps } from '@sofie-automation/corelib/dist/worker/studio' import { runJobWithPlayoutModel } from './lock.js' import _ from 'underscore' @@ -541,10 +544,11 @@ export function updatePartInstanceOnTake( } // calculate and cache playout timing properties, so that we don't depend on the previousPartInstance: + const partTimes = createPartCurrentTimes(getCurrentTime(), null) const tmpTakePieces = processAndPrunePieceInstanceTimings( showStyle.sourceLayers, takePartInstance.pieceInstances.map((p) => p.pieceInstance), - 0 + partTimes ) const partPlayoutTimings = playoutModel.calculatePartTimings(currentPartInstance, takePartInstance, tmpTakePieces) @@ -554,7 +558,7 @@ export function updatePartInstanceOnTake( export async function afterTake( context: JobContext, playoutModel: PlayoutModel, - takePartInstance: PlayoutPartInstanceModel + _takePartInstance: PlayoutPartInstanceModel ): Promise { const span = context.startSpan('afterTake') // This function should be called at the end of a "take" event (when the Parts have been updated) @@ -562,8 +566,6 @@ export async function afterTake( await updateTimeline(context, playoutModel) - playoutModel.queueNotifyCurrentlyPlayingPartEvent(takePartInstance.partInstance.rundownId, takePartInstance) - if (span) span.end() } diff --git a/packages/job-worker/src/playout/timeline/__tests__/rundown.test.ts b/packages/job-worker/src/playout/timeline/__tests__/rundown.test.ts index 18322e9567..b45ef32750 100644 --- a/packages/job-worker/src/playout/timeline/__tests__/rundown.test.ts +++ b/packages/job-worker/src/playout/timeline/__tests__/rundown.test.ts @@ -10,7 +10,10 @@ import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { transformTimeline } from '@sofie-automation/corelib/dist/playout/timeline' import { deleteAllUndefinedProperties, getRandomId } from '@sofie-automation/corelib/dist/lib' import { PieceInstance, PieceInstancePiece } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' -import { PieceInstanceWithTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' +import { + createPartCurrentTimes, + PieceInstanceWithTimings, +} from '@sofie-automation/corelib/dist/playout/processAndPrune' import { EmptyPieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import { IBlueprintPieceType, PieceLifespan } from '@sofie-automation/blueprints-integration' import { getPartGroupId } from '@sofie-automation/corelib/dist/playout/ids' @@ -70,6 +73,8 @@ function transformTimelineIntoSimplifiedForm(res: RundownTimelineResult) { * inside of this will have their own tests to stress difference scenarios. */ describe('buildTimelineObjsForRundown', () => { + const currentTime = 5678 + function createMockPlaylist(selectedPartInfos: SelectedPartInstancesTimelineInfo): DBRundownPlaylist { function convertSelectedPartInstance( info: SelectedPartInstanceTimelineInfo | undefined @@ -196,8 +201,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { previous: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [], calculatedTimings: DEFAULT_PART_TIMINGS, @@ -217,8 +221,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: DEFAULT_PART_TIMINGS, @@ -243,8 +246,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance( 'part0', {}, @@ -277,16 +279,14 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: DEFAULT_PART_TIMINGS, regenerateTimelineAt: undefined, }, next: { - nowInPart: 0, - partStarted: undefined, + partTimes: createPartCurrentTimes(currentTime, undefined), partInstance: createMockPartInstance('part1'), pieceInstances: [createMockPieceInstance('piece1')], calculatedTimings: DEFAULT_PART_TIMINGS, @@ -312,16 +312,14 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0', { autoNext: true, expectedDuration: 5000 }), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: DEFAULT_PART_TIMINGS, regenerateTimelineAt: undefined, }, next: { - nowInPart: 0, - partStarted: undefined, + partTimes: createPartCurrentTimes(currentTime, undefined), partInstance: createMockPartInstance('part1'), pieceInstances: [createMockPieceInstance('piece1')], calculatedTimings: DEFAULT_PART_TIMINGS, @@ -347,8 +345,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { previous: { - nowInPart: 9999, - partStarted: 1234, + partTimes: createPartCurrentTimes(currentTime, 1234), partInstance: createMockPartInstance( 'part9', { autoNext: true, expectedDuration: 5000 }, @@ -363,8 +360,7 @@ describe('buildTimelineObjsForRundown', () => { regenerateTimelineAt: undefined, }, current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: DEFAULT_PART_TIMINGS, @@ -391,8 +387,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { previous: { - nowInPart: 9999, - partStarted: 1234, + partTimes: createPartCurrentTimes(currentTime, 1234), partInstance: createMockPartInstance( 'part9', { autoNext: true, expectedDuration: 5000 }, @@ -407,8 +402,7 @@ describe('buildTimelineObjsForRundown', () => { regenerateTimelineAt: undefined, }, current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: { @@ -441,8 +435,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { previous: { - nowInPart: 9999, - partStarted: 1234, + partTimes: createPartCurrentTimes(currentTime, 1234), partInstance: createMockPartInstance( 'part9', { autoNext: true, expectedDuration: 5000 }, @@ -462,8 +455,7 @@ describe('buildTimelineObjsForRundown', () => { regenerateTimelineAt: undefined, }, current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: { @@ -496,16 +488,14 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0', { autoNext: true, expectedDuration: 5000 }), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: DEFAULT_PART_TIMINGS, regenerateTimelineAt: undefined, }, next: { - nowInPart: 0, - partStarted: undefined, + partTimes: createPartCurrentTimes(currentTime, undefined), partInstance: createMockPartInstance('part1'), pieceInstances: [createMockPieceInstance('piece1')], calculatedTimings: { @@ -540,8 +530,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance( 'part0', { autoNext: true, expectedDuration: 5000 }, @@ -561,8 +550,7 @@ describe('buildTimelineObjsForRundown', () => { regenerateTimelineAt: undefined, }, next: { - nowInPart: 0, - partStarted: undefined, + partTimes: createPartCurrentTimes(currentTime, undefined), partInstance: createMockPartInstance( 'part1', {}, @@ -601,8 +589,7 @@ describe('buildTimelineObjsForRundown', () => { describe('infinite pieces', () => { const PREVIOUS_PART_INSTANCE: SelectedPartInstanceTimelineInfo = { - nowInPart: 9999, - partStarted: 1234, + partTimes: createPartCurrentTimes(currentTime, 1234), partInstance: createMockPartInstance( 'part9', { autoNext: true, expectedDuration: 5000 }, @@ -623,8 +610,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { previous: PREVIOUS_PART_INSTANCE, current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [ createMockPieceInstance('piece0'), @@ -655,8 +641,7 @@ describe('buildTimelineObjsForRundown', () => { ], }, current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: DEFAULT_PART_TIMINGS, @@ -684,8 +669,7 @@ describe('buildTimelineObjsForRundown', () => { ], }, current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: DEFAULT_PART_TIMINGS, @@ -712,8 +696,7 @@ describe('buildTimelineObjsForRundown', () => { pieceInstances: [...PREVIOUS_PART_INSTANCE.pieceInstances, infinitePiece], }, current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [createMockPieceInstance('piece0'), continueInfinitePiece(infinitePiece)], calculatedTimings: DEFAULT_PART_TIMINGS, @@ -736,8 +719,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance( 'part0', { autoNext: true, expectedDuration: 5000 }, @@ -752,8 +734,7 @@ describe('buildTimelineObjsForRundown', () => { regenerateTimelineAt: undefined, }, next: { - nowInPart: 0, - partStarted: undefined, + partTimes: createPartCurrentTimes(currentTime, undefined), partInstance: createMockPartInstance( 'part1', {}, @@ -782,8 +763,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance( 'part0', { autoNext: true, expectedDuration: 5000 }, @@ -798,8 +778,7 @@ describe('buildTimelineObjsForRundown', () => { regenerateTimelineAt: undefined, }, next: { - nowInPart: 0, - partStarted: undefined, + partTimes: createPartCurrentTimes(currentTime, undefined), partInstance: createMockPartInstance( 'part1', {}, @@ -831,8 +810,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance( 'part0', { autoNext: true, expectedDuration: 5000 }, @@ -850,8 +828,7 @@ describe('buildTimelineObjsForRundown', () => { regenerateTimelineAt: undefined, }, next: { - nowInPart: 0, - partStarted: undefined, + partTimes: createPartCurrentTimes(currentTime, undefined), partInstance: createMockPartInstance( 'part1', {}, diff --git a/packages/job-worker/src/playout/timeline/generate.ts b/packages/job-worker/src/playout/timeline/generate.ts index 7661c773fe..9d8338665e 100644 --- a/packages/job-worker/src/playout/timeline/generate.ts +++ b/packages/job-worker/src/playout/timeline/generate.ts @@ -22,6 +22,8 @@ import { getResolvedPiecesForPartInstancesOnTimeline } from '../resolvedPieces.j import { processAndPrunePieceInstanceTimings, PieceInstanceWithTimings, + createPartCurrentTimes, + PartCurrentTimes, } from '@sofie-automation/corelib/dist/playout/processAndPrune' import { StudioPlayoutModel, StudioPlayoutModelBase } from '../../studio/model/StudioPlayoutModel.js' import { getLookeaheadObjects } from '../lookahead/index.js' @@ -41,8 +43,11 @@ import { getPartTimingsOrDefaults, PartCalculatedTimings } from '@sofie-automati import { applyAbPlaybackForTimeline } from '../abPlayback/index.js' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { PlayoutPartInstanceModel } from '../model/PlayoutPartInstanceModel.js' -import { PersistentPlayoutStateStore } from '../../blueprints/context/services/PersistantStateStore.js' import { PlayoutChangedType } from '@sofie-automation/shared-lib/dist/peripheralDevice/peripheralDeviceAPI' +import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' +import { PersistentPlayoutStateStore } from '../../blueprints/context/services/PersistantStateStore.js' + +const DEFAULT_ABSOLUTE_PIECE_PREPARE_TIME = 30000 function isModelForStudio(model: StudioPlayoutModelBase): model is StudioPlayoutModel { const tmp = model as StudioPlayoutModel @@ -86,7 +91,7 @@ export async function updateStudioTimeline( const studioBlueprint = context.studioBlueprint if (studioBlueprint) { - const watchedPackages = await WatchedPackagesHelper.create(context, { + const watchedPackages = await WatchedPackagesHelper.create(context, null, null, { fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS, }) @@ -246,8 +251,7 @@ export interface SelectedPartInstancesTimelineInfo { next?: SelectedPartInstanceTimelineInfo } export interface SelectedPartInstanceTimelineInfo { - nowInPart: number - partStarted: number | undefined + partTimes: PartCurrentTimes partInstance: ReadonlyDeep pieceInstances: PieceInstanceWithTimings[] calculatedTimings: PartCalculatedTimings @@ -255,29 +259,44 @@ export interface SelectedPartInstanceTimelineInfo { } function getPartInstanceTimelineInfo( + absolutePiecePrepareTime: number, currentTime: Time, sourceLayers: SourceLayers, partInstance: PlayoutPartInstanceModel | null ): SelectedPartInstanceTimelineInfo | undefined { if (!partInstance) return undefined - const partStarted = partInstance.partInstance.timings?.plannedStartedPlayback - const nowInPart = partStarted === undefined ? 0 : currentTime - partStarted - const pieceInstances = processAndPrunePieceInstanceTimings( - sourceLayers, - partInstance.pieceInstances.map((p) => p.pieceInstance), - nowInPart - ) + const partTimes = createPartCurrentTimes(currentTime, partInstance.partInstance.timings?.plannedStartedPlayback) + + let regenerateTimelineAt: Time | undefined = undefined + + const rawPieceInstances: ReadonlyDeep[] = [] + for (const { pieceInstance } of partInstance.pieceInstances) { + if ( + pieceInstance.piece.enable.isAbsolute && + typeof pieceInstance.piece.enable.start === 'number' && + pieceInstance.piece.enable.start > currentTime + absolutePiecePrepareTime + ) { + // This absolute timed piece is starting too far in the future, ignore it + regenerateTimelineAt = Math.min( + regenerateTimelineAt ?? Number.POSITIVE_INFINITY, + pieceInstance.piece.enable.start - absolutePiecePrepareTime + ) + + continue + } + + rawPieceInstances.push(pieceInstance) + } const partInstanceWithOverrides = partInstance.getPartInstanceWithQuickLoopOverrides() return { partInstance: partInstanceWithOverrides, - pieceInstances, - nowInPart, - partStarted, + pieceInstances: processAndPrunePieceInstanceTimings(sourceLayers, rawPieceInstances, partTimes), + partTimes, // Approximate `calculatedTimings`, for the partInstances which already have it cached - calculatedTimings: getPartTimingsOrDefaults(partInstanceWithOverrides, pieceInstances), - regenerateTimelineAt: undefined, // Future use + calculatedTimings: getPartTimingsOrDefaults(partInstanceWithOverrides, rawPieceInstances), + regenerateTimelineAt, } } @@ -318,10 +337,27 @@ async function getTimelineRundown( } const currentTime = getCurrentTime() + const absolutePiecePrepareTime = + context.studio.settings.rundownGlobalPiecesPrepareTime || DEFAULT_ABSOLUTE_PIECE_PREPARE_TIME const partInstancesInfo: SelectedPartInstancesTimelineInfo = { - current: getPartInstanceTimelineInfo(currentTime, showStyle.sourceLayers, currentPartInstance), - next: getPartInstanceTimelineInfo(currentTime, showStyle.sourceLayers, nextPartInstance), - previous: getPartInstanceTimelineInfo(currentTime, showStyle.sourceLayers, previousPartInstance), + current: getPartInstanceTimelineInfo( + absolutePiecePrepareTime, + currentTime, + showStyle.sourceLayers, + currentPartInstance + ), + next: getPartInstanceTimelineInfo( + absolutePiecePrepareTime, + currentTime, + showStyle.sourceLayers, + nextPartInstance + ), + previous: getPartInstanceTimelineInfo( + absolutePiecePrepareTime, + currentTime, + showStyle.sourceLayers, + previousPartInstance + ), } if (partInstancesInfo.next && nextPartInstance) { diff --git a/packages/job-worker/src/playout/timeline/multi-gateway.ts b/packages/job-worker/src/playout/timeline/multi-gateway.ts index dbc29f9327..9db7d43267 100644 --- a/packages/job-worker/src/playout/timeline/multi-gateway.ts +++ b/packages/job-worker/src/playout/timeline/multi-gateway.ts @@ -343,18 +343,24 @@ function setPlannedTimingsOnPieceInstance( } if (typeof pieceInstance.pieceInstance.piece.enable.start === 'number') { - const plannedStart = partPlannedStart + pieceInstance.pieceInstance.piece.enable.start + const plannedStart = + (pieceInstance.pieceInstance.piece.enable.isAbsolute ? 0 : partPlannedStart) + + pieceInstance.pieceInstance.piece.enable.start pieceInstance.setPlannedStartedPlayback(plannedStart) const userDurationEnd = pieceInstance.pieceInstance.userDuration && 'endRelativeToPart' in pieceInstance.pieceInstance.userDuration ? pieceInstance.pieceInstance.userDuration.endRelativeToPart : null - const plannedEnd = - userDurationEnd ?? - (pieceInstance.pieceInstance.piece.enable.duration - ? plannedStart + pieceInstance.pieceInstance.piece.enable.duration - : partPlannedEnd) + + let plannedEnd: number | undefined = userDurationEnd ?? undefined + if (plannedEnd === undefined) { + if (pieceInstance.pieceInstance.piece.enable.duration !== undefined) { + plannedEnd = plannedStart + pieceInstance.pieceInstance.piece.enable.duration + } else if (!pieceInstance.pieceInstance.piece.enable.isAbsolute) { + plannedEnd = partPlannedEnd + } + } pieceInstance.setPlannedStoppedPlayback(plannedEnd) } diff --git a/packages/job-worker/src/playout/timeline/part.ts b/packages/job-worker/src/playout/timeline/part.ts index fc2d546100..b697f7da27 100644 --- a/packages/job-worker/src/playout/timeline/part.ts +++ b/packages/job-worker/src/playout/timeline/part.ts @@ -32,7 +32,7 @@ export function transformPartIntoTimeline( ): Array { const span = context.startSpan('transformPartIntoTimeline') - const nowInParentGroup = partInfo.nowInPart + const nowInParentGroup = partInfo.partTimes.nowInPart const partTimings = partInfo.calculatedTimings const outTransition = partInfo.partInstance.part.outTransition ?? null diff --git a/packages/job-worker/src/playout/timeline/pieceGroup.ts b/packages/job-worker/src/playout/timeline/pieceGroup.ts index cd6a5318e8..aa7f379dfb 100644 --- a/packages/job-worker/src/playout/timeline/pieceGroup.ts +++ b/packages/job-worker/src/playout/timeline/pieceGroup.ts @@ -136,8 +136,8 @@ export function createPieceGroupAndCap( let resolvedEndCap: number | string | undefined // If the start has been adjusted, the end needs to be updated to compensate if (typeof pieceInstance.resolvedEndCap === 'number') { - resolvedEndCap = pieceInstance.resolvedEndCap + (pieceStartOffset ?? 0) - } else if (pieceInstance.resolvedEndCap) { + resolvedEndCap = pieceInstance.resolvedEndCap - (pieceStartOffset ?? 0) + } else if (pieceInstance.resolvedEndCap || controlObj.enable.end === 'now') { // TODO - there could already be a piece with a cap of 'now' that we could use as our end time // As the cap is for 'now', rather than try to get tsr to understand `end: 'now'`, we can create a 'now' object to tranlate it const nowObj = literal>({ @@ -157,7 +157,13 @@ export function createPieceGroupAndCap( priority: 0, }) capObjs.push(nowObj) - resolvedEndCap = `#${nowObj.id}.start + ${pieceInstance.resolvedEndCap.offsetFromNow}` + + resolvedEndCap = `#${nowObj.id}.start + ${pieceInstance.resolvedEndCap?.offsetFromNow ?? 0}` + + // If the object has an end of now, we can remove it as it will be replaced by the `resolvedEndCap` + if (controlObj.enable.end === 'now') { + delete controlObj.enable.end + } } if (controlObj.enable.duration !== undefined || controlObj.enable.end !== undefined) { diff --git a/packages/job-worker/src/playout/timeline/rundown.ts b/packages/job-worker/src/playout/timeline/rundown.ts index ba92dd2e21..a473ed066f 100644 --- a/packages/job-worker/src/playout/timeline/rundown.ts +++ b/packages/job-worker/src/playout/timeline/rundown.ts @@ -275,9 +275,25 @@ function generateCurrentInfinitePieceObjects( return [] } - const infiniteGroup = createPartGroup(currentPartInfo.partInstance, { - start: `#${timingContext.currentPartGroup.id}.start`, // This gets overriden with a concrete time if the original piece is known to have already started - }) + const { infiniteGroupEnable, pieceEnable, nowInParent } = calculateInfinitePieceEnable( + currentPartInfo, + timingContext, + pieceInstance, + currentTime, + currentPartInstanceTimings + ) + + const { pieceInstanceWithUpdatedEndCap, cappedInfiniteGroupEnable } = applyInfinitePieceGroupEndCap( + currentPartInfo, + timingContext, + pieceInstance, + infiniteGroupEnable, + currentPartInstanceTimings, + nextPartInstanceTimings, + nextPartInfinites.get(pieceInstance.infinite.infiniteInstanceId) + ) + + const infiniteGroup = createPartGroup(currentPartInfo.partInstance, cappedInfiniteGroupEnable) infiniteGroup.id = getInfinitePartGroupId(pieceInstance._id) // This doesnt want to belong to a part, so force the ids infiniteGroup.priority = 1 @@ -287,6 +303,34 @@ function generateCurrentInfinitePieceObjects( groupClasses.push('continues_infinite') } + // Still show objects flagged as 'HoldMode.EXCEPT' if this is a infinite continuation as they belong to the previous too + const isOriginOfInfinite = pieceInstance.piece.startPartId !== currentPartInfo.partInstance.part._id + const isInHold = activePlaylist.holdState === RundownHoldState.ACTIVE + + return [ + infiniteGroup, + ...transformPieceGroupAndObjects( + activePlaylist._id, + infiniteGroup, + nowInParent, + pieceInstanceWithUpdatedEndCap, + pieceEnable, + 0, + groupClasses, + isInHold, + isOriginOfInfinite + ), + ] +} + +function calculateInfinitePieceEnable( + currentPartInfo: SelectedPartInstanceTimelineInfo, + timingContext: RundownTimelineTimingContext, + pieceInstance: ReadonlyDeep, + // infiniteGroup: TimelineObjGroupPart & OnGenerateTimelineObjExt, + currentTime: number, + currentPartInstanceTimings: PartCalculatedTimings +) { const pieceEnable = getPieceEnableInsidePart( pieceInstance, currentPartInstanceTimings, @@ -295,8 +339,28 @@ function generateCurrentInfinitePieceObjects( timingContext.currentPartGroup.enable.duration !== undefined ) - let nowInParent = currentPartInfo.nowInPart // Where is 'now' inside of the infiniteGroup? - if (pieceInstance.plannedStartedPlayback !== undefined) { + let infiniteGroupEnable: PartEnable = { + start: `#${timingContext.currentPartGroup.id}.start`, // This gets overriden with a concrete time if the original piece is known to have already started + } + + let nowInParent = currentPartInfo.partTimes.nowInPart // Where is 'now' inside of the infiniteGroup? + if (pieceInstance.piece.enable.isAbsolute) { + // Piece is absolute, so we should use the absolute time. This is a special case for pieces belonging to the rundown directly. + + const infiniteGroupStart = pieceInstance.plannedStartedPlayback ?? pieceInstance.piece.enable.start + + if (typeof infiniteGroupStart === 'number') { + nowInParent = currentTime - infiniteGroupStart + } else { + // We should never hit this, but in case start is "now" + nowInParent = 0 + } + + infiniteGroupEnable = { start: infiniteGroupStart } + pieceEnable.start = 0 + + // Future: should this consider the prerollDuration? + } else if (pieceInstance.plannedStartedPlayback !== undefined) { // We have a absolute start time, so we should use that. let infiniteGroupStart = pieceInstance.plannedStartedPlayback nowInParent = currentTime - pieceInstance.plannedStartedPlayback @@ -313,30 +377,47 @@ function generateCurrentInfinitePieceObjects( pieceEnable.start = 0 } - infiniteGroup.enable = { start: infiniteGroupStart } + infiniteGroupEnable = { start: infiniteGroupStart } // If an end time has been set by a hotkey, then update the duration to be correct if (pieceInstance.userDuration && pieceInstance.piece.enable.start !== 'now') { if ('endRelativeToPart' in pieceInstance.userDuration) { - infiniteGroup.enable.duration = + infiniteGroupEnable.duration = pieceInstance.userDuration.endRelativeToPart - pieceInstance.piece.enable.start } else { - infiniteGroup.enable.end = 'now' + infiniteGroupEnable.end = 'now' } } } + return { + pieceEnable, + infiniteGroupEnable, + nowInParent, + } +} + +function applyInfinitePieceGroupEndCap( + currentPartInfo: SelectedPartInstanceTimelineInfo, + timingContext: RundownTimelineTimingContext, + pieceInstance: ReadonlyDeep, + infiniteGroupEnable: Readonly, + currentPartInstanceTimings: PartCalculatedTimings, + nextPartInstanceTimings: PartCalculatedTimings | null, + infiniteInNextPart: PieceInstanceWithTimings | undefined +) { + const cappedInfiniteGroupEnable: PartEnable = { ...infiniteGroupEnable } + // If this infinite piece continues to the next part, and has a duration then we should respect that in case it is really close to the take const hasDurationOrEnd = (enable: TSR.Timeline.TimelineEnable) => enable.duration !== undefined || enable.end !== undefined - const infiniteInNextPart = nextPartInfinites.get(pieceInstance.infinite.infiniteInstanceId) if ( infiniteInNextPart && - !hasDurationOrEnd(infiniteGroup.enable) && + !hasDurationOrEnd(cappedInfiniteGroupEnable) && hasDurationOrEnd(infiniteInNextPart.piece.enable) ) { // infiniteGroup.enable.end = infiniteInNextPart.piece.enable.end - infiniteGroup.enable.duration = infiniteInNextPart.piece.enable.duration + cappedInfiniteGroupEnable.duration = infiniteInNextPart.piece.enable.duration } const pieceInstanceWithUpdatedEndCap: PieceInstanceWithTimings = { ...pieceInstance } @@ -344,16 +425,16 @@ function generateCurrentInfinitePieceObjects( if (pieceInstance.resolvedEndCap) { // If the cap is a number, it is relative to the part, not the parent group so needs to be handled here if (typeof pieceInstance.resolvedEndCap === 'number') { - infiniteGroup.enable.end = `#${timingContext.currentPartGroup.id}.start + ${pieceInstance.resolvedEndCap}` - delete infiniteGroup.enable.duration + cappedInfiniteGroupEnable.end = `#${timingContext.currentPartGroup.id}.start + ${pieceInstance.resolvedEndCap}` + delete cappedInfiniteGroupEnable.duration delete pieceInstanceWithUpdatedEndCap.resolvedEndCap } } else if ( // If this piece does not continue in the next part, then set it to end with the part it belongs to !infiniteInNextPart && currentPartInfo.partInstance.part.autoNext && - infiniteGroup.enable.duration === undefined && - infiniteGroup.enable.end === undefined + cappedInfiniteGroupEnable.duration === undefined && + cappedInfiniteGroupEnable.end === undefined ) { let endOffset = 0 @@ -365,27 +446,10 @@ function generateCurrentInfinitePieceObjects( endOffset -= nextPartInstanceTimings.fromPartKeepalive // cap relative to the currentPartGroup - infiniteGroup.enable.end = `#${timingContext.currentPartGroup.id}.end + ${endOffset}` + cappedInfiniteGroupEnable.end = `#${timingContext.currentPartGroup.id}.end + ${endOffset}` } - // Still show objects flagged as 'HoldMode.EXCEPT' if this is a infinite continuation as they belong to the previous too - const isOriginOfInfinite = pieceInstance.piece.startPartId !== currentPartInfo.partInstance.part._id - const isInHold = activePlaylist.holdState === RundownHoldState.ACTIVE - - return [ - infiniteGroup, - ...transformPieceGroupAndObjects( - activePlaylist._id, - infiniteGroup, - nowInParent, - pieceInstanceWithUpdatedEndCap, - pieceEnable, - 0, - groupClasses, - isInHold, - isOriginOfInfinite - ), - ] + return { pieceInstanceWithUpdatedEndCap, cappedInfiniteGroupEnable } } function generatePreviousPartInstanceObjects( diff --git a/packages/job-worker/src/playout/timings/piecePlayback.ts b/packages/job-worker/src/playout/timings/piecePlayback.ts index 66e466403f..fc2bd2bd1c 100644 --- a/packages/job-worker/src/playout/timings/piecePlayback.ts +++ b/packages/job-worker/src/playout/timings/piecePlayback.ts @@ -23,24 +23,19 @@ export function onPiecePlaybackStarted( ): void { const playlist = playoutModel.playlist + if (!playlist.activationId) { + logger.warn(`onPiecePlaybackStarted: Received for inactive RundownPlaylist "${playlist._id}"`) + return + } + const partInstance = playoutModel.getPartInstance(data.partInstanceId) if (!partInstance) { - if (!playlist.activationId) { - logger.warn(`onPiecePlaybackStarted: Received for inactive RundownPlaylist "${playlist._id}"`) - } else { - throw new Error(`PartInstance "${data.partInstanceId}" in RundownPlaylist "${playlist._id}" not found!`) - } - return + throw new Error(`PartInstance "${data.partInstanceId}" in RundownPlaylist "${playlist._id}" not found!`) } const pieceInstance = partInstance.getPieceInstance(data.pieceInstanceId) if (!pieceInstance) { - if (!playlist.activationId) { - logger.warn(`onPiecePlaybackStarted: Received for inactive RundownPlaylist "${playlist._id}"`) - } else { - throw new Error(`PieceInstance "${data.partInstanceId}" in RundownPlaylist "${playlist._id}" not found!`) - } - return + throw new Error(`PieceInstance "${data.partInstanceId}" in RundownPlaylist "${playlist._id}" not found!`) } const isPlaying = !!( @@ -75,6 +70,11 @@ export function onPiecePlaybackStopped( ): void { const playlist = playoutModel.playlist + if (!playlist.activationId) { + logger.warn(`onPiecePlaybackStopped: Received for inactive RundownPlaylist "${playlist._id}"`) + return + } + const partInstance = playoutModel.getPartInstance(data.partInstanceId) if (!partInstance) { // PartInstance not found, so we can rely on the onPartPlaybackStopped callback erroring @@ -83,12 +83,7 @@ export function onPiecePlaybackStopped( const pieceInstance = partInstance.getPieceInstance(data.pieceInstanceId) if (!pieceInstance) { - if (!playlist.activationId) { - logger.warn(`onPiecePlaybackStopped: Received for inactive RundownPlaylist "${playlist._id}"`) - } else { - throw new Error(`PieceInstance "${data.partInstanceId}" in RundownPlaylist "${playlist._id}" not found!`) - } - return + throw new Error(`PieceInstance "${data.partInstanceId}" in RundownPlaylist "${playlist._id}" not found!`) } const isPlaying = !!( @@ -171,6 +166,8 @@ function reportPieceHasStopped( pieceInstance.setPlannedStoppedPlayback(timestamp) } - playoutModel.queuePartInstanceTimingEvent(pieceInstance.pieceInstance.partInstanceId) + if (pieceInstance.pieceInstance.partInstanceId) { + playoutModel.queuePartInstanceTimingEvent(pieceInstance.pieceInstance.partInstanceId) + } } } diff --git a/packages/job-worker/src/studio/model/StudioBaselineHelper.ts b/packages/job-worker/src/studio/model/StudioBaselineHelper.ts index 99336b67c6..ab13dee2b1 100644 --- a/packages/job-worker/src/studio/model/StudioBaselineHelper.ts +++ b/packages/job-worker/src/studio/model/StudioBaselineHelper.ts @@ -1,16 +1,21 @@ import { JobContext } from '../../jobs/index.js' import { - ExpectedPackageDB, - ExpectedPackageDBFromStudioBaselineObjects, ExpectedPackageDBType, + ExpectedPackageIngestSource, + ExpectedPackageIngestSourceStudioBaseline, + getExpectedPackageIdFromIngestSource, } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { saveIntoDb } from '../../db/changes.js' +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' +import type { IngestExpectedPackage } from '../../ingest/model/IngestExpectedPackage.js' +import { setDefaultIdOnExpectedPackages } from '../../ingest/expectedPackages.js' +import { writeExpectedPackagesChangesForRundown } from '../../ingest/model/implementation/SaveIngestModel.js' export class StudioBaselineHelper { readonly #context: JobContext - #pendingExpectedPackages: ExpectedPackageDBFromStudioBaselineObjects[] | undefined + #pendingExpectedPackages: IngestExpectedPackage[] | undefined #pendingExpectedPlayoutItems: ExpectedPlayoutItemStudio[] | undefined constructor(context: JobContext) { @@ -21,8 +26,21 @@ export class StudioBaselineHelper { return !!this.#pendingExpectedPackages || !!this.#pendingExpectedPlayoutItems } - setExpectedPackages(packages: ExpectedPackageDBFromStudioBaselineObjects[]): void { - this.#pendingExpectedPackages = packages + setExpectedPackages(packages: ExpectedPackage.Any[]): void { + const source: ExpectedPackageIngestSource = { fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS } + + setDefaultIdOnExpectedPackages(packages) + + this.#pendingExpectedPackages = packages.map( + (expectedPackage) => + ({ + _id: getExpectedPackageIdFromIngestSource(this.#context.studioId, source, expectedPackage._id), + + package: expectedPackage, + + source: source, + }) satisfies IngestExpectedPackage + ) } setExpectedPlayoutItems(playoutItems: ExpectedPlayoutItemStudio[]): void { this.#pendingExpectedPlayoutItems = playoutItems @@ -39,15 +57,7 @@ export class StudioBaselineHelper { ) : undefined, this.#pendingExpectedPackages - ? saveIntoDb( - this.#context, - this.#context.directCollections.ExpectedPackages, - { - studioId: this.#context.studioId, - fromPieceType: ExpectedPackageDBType.STUDIO_BASELINE_OBJECTS, - }, - this.#pendingExpectedPackages - ) + ? writeExpectedPackagesChangesForRundown(this.#context, null, this.#pendingExpectedPackages) : undefined, ]) diff --git a/packages/job-worker/src/studio/model/StudioPlayoutModel.ts b/packages/job-worker/src/studio/model/StudioPlayoutModel.ts index f481983c69..ad768271d2 100644 --- a/packages/job-worker/src/studio/model/StudioPlayoutModel.ts +++ b/packages/job-worker/src/studio/model/StudioPlayoutModel.ts @@ -1,15 +1,15 @@ -import { RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { +import type { RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import type { PeripheralDevice } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' +import type { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' +import type { TimelineComplete, TimelineCompleteGenerationVersions, TimelineObjGeneric, } from '@sofie-automation/corelib/dist/dataModel/Timeline' -import { BaseModel } from '../../modelBase.js' -import { ReadonlyDeep } from 'type-fest' -import { ExpectedPackageDBFromStudioBaselineObjects } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' -import { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' +import type { BaseModel } from '../../modelBase.js' +import type { ReadonlyDeep } from 'type-fest' +import type { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' +import type { ExpectedPackage } from '@sofie-automation/blueprints-integration' export interface StudioPlayoutModelBaseReadonly { /** @@ -33,7 +33,7 @@ export interface StudioPlayoutModelBase extends StudioPlayoutModelBaseReadonly { * Update the ExpectedPackages for the StudioBaseline of the current Studio * @param packages ExpectedPackages to store */ - setExpectedPackagesForStudioBaseline(packages: ExpectedPackageDBFromStudioBaselineObjects[]): void + setExpectedPackagesForStudioBaseline(packages: ExpectedPackage.Any[]): void /** * Update the ExpectedPlayoutItems for the StudioBaseline of the current Studio * @param playoutItems ExpectedPlayoutItems to store diff --git a/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts b/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts index 670886cff0..b7bda8f32c 100644 --- a/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts +++ b/packages/job-worker/src/studio/model/StudioPlayoutModelImpl.ts @@ -15,9 +15,9 @@ import { IS_PRODUCTION } from '../../environment.js' import { logger } from '../../logging.js' import { StudioPlayoutModel } from './StudioPlayoutModel.js' import { DatabasePersistedModel } from '../../modelBase.js' -import { ExpectedPackageDBFromStudioBaselineObjects } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { StudioBaselineHelper } from './StudioBaselineHelper.js' +import { ExpectedPackage } from '@sofie-automation/blueprints-integration' /** * This is a model used for studio operations. @@ -78,7 +78,7 @@ export class StudioPlayoutModelImpl implements StudioPlayoutModel { return this.#isMultiGatewayMode } - setExpectedPackagesForStudioBaseline(packages: ExpectedPackageDBFromStudioBaselineObjects[]): void { + setExpectedPackagesForStudioBaseline(packages: ExpectedPackage.Any[]): void { this.#baselineHelper.setExpectedPackages(packages) } setExpectedPlayoutItemsForStudioBaseline(playoutItems: ExpectedPlayoutItemStudio[]): void { diff --git a/packages/job-worker/src/workers/events/jobs.ts b/packages/job-worker/src/workers/events/jobs.ts index 1cd2abed7d..272e682633 100644 --- a/packages/job-worker/src/workers/events/jobs.ts +++ b/packages/job-worker/src/workers/events/jobs.ts @@ -1,10 +1,6 @@ import { JobContext } from '../../jobs/index.js' import { EventsJobFunc, EventsJobs } from '@sofie-automation/corelib/dist/worker/events' -import { - handleNotifyCurrentlyPlayingPart, - handlePartInstanceTimings, - handleRundownDataHasChanged, -} from '../../events/handle.js' +import { handlePartInstanceTimings, handleRundownDataHasChanged } from '../../events/handle.js' type ExecutableFunction = ( context: JobContext, @@ -18,5 +14,4 @@ export type EventsJobHandlers = { export const eventJobHandlers: EventsJobHandlers = { [EventsJobs.PartInstanceTimings]: handlePartInstanceTimings, [EventsJobs.RundownDataChanged]: handleRundownDataHasChanged, - [EventsJobs.NotifyCurrentlyPlayingPart]: handleNotifyCurrentlyPlayingPart, } diff --git a/packages/job-worker/src/workers/ingest/jobs.ts b/packages/job-worker/src/workers/ingest/jobs.ts index b8cdbb0779..2bc85736ca 100644 --- a/packages/job-worker/src/workers/ingest/jobs.ts +++ b/packages/job-worker/src/workers/ingest/jobs.ts @@ -29,7 +29,7 @@ import { handleUpdatedSegment, handleUpdatedSegmentRanks, } from '../../ingest/ingestSegmentJobs.js' -import { handleExpectedPackagesRegenerate, handleUpdatedPackageInfoForRundown } from '../../ingest/packageInfo.js' +import { handleUpdatedPackageInfoForRundown } from '../../ingest/packageInfo.js' import { handleBucketActionModify, handleBucketActionRegenerateExpectedPackages, @@ -81,7 +81,6 @@ export const ingestJobHandlers: IngestJobHandlers = { [IngestJobs.MosMoveStory]: wrapMosIngestJob(handleMosMoveStories), [IngestJobs.MosSwapStory]: wrapMosIngestJob(handleMosSwapStories), - [IngestJobs.ExpectedPackagesRegenerate]: handleExpectedPackagesRegenerate, [IngestJobs.PackageInfosUpdatedRundown]: handleUpdatedPackageInfoForRundown, [IngestJobs.UserRemoveRundown]: handleUserRemoveRundown, diff --git a/packages/live-status-gateway/src/collections/pieceInstancesHandler.ts b/packages/live-status-gateway/src/collections/pieceInstancesHandler.ts index 8861d109d9..ae58442274 100644 --- a/packages/live-status-gateway/src/collections/pieceInstancesHandler.ts +++ b/packages/live-status-gateway/src/collections/pieceInstancesHandler.ts @@ -9,6 +9,7 @@ import _ from 'underscore' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' import { PartInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { + createPartCurrentTimes, PieceInstanceWithTimings, processAndPrunePieceInstanceTimings, resolvePrunedPieceInstance, @@ -88,25 +89,24 @@ export class PieceInstancesHandler extends PublicationCollection< filterActive: boolean ): PieceInstanceWithTimings[] { // Approximate when 'now' is in the PartInstance, so that any adlibbed Pieces will be timed roughly correctly - const partStarted = partInstance?.timings?.plannedStartedPlayback - const nowInPart = partStarted === undefined ? 0 : Date.now() - partStarted + const partTimes = createPartCurrentTimes(Date.now(), partInstance?.timings?.plannedStartedPlayback) const prunedPieceInstances = processAndPrunePieceInstanceTimings( this._sourceLayers, pieceInstances, - nowInPart, - false, + partTimes, false ) if (!filterActive) return prunedPieceInstances return prunedPieceInstances.filter((pieceInstance) => { - const resolvedPieceInstance = resolvePrunedPieceInstance(nowInPart, pieceInstance) + const resolvedPieceInstance = resolvePrunedPieceInstance(partTimes, pieceInstance) return ( - resolvedPieceInstance.resolvedStart <= nowInPart && + resolvedPieceInstance.resolvedStart <= partTimes.nowInPart && (resolvedPieceInstance.resolvedDuration == null || - resolvedPieceInstance.resolvedStart + resolvedPieceInstance.resolvedDuration > nowInPart) && + resolvedPieceInstance.resolvedStart + resolvedPieceInstance.resolvedDuration > + partTimes.nowInPart) && pieceInstance.piece.virtual !== true && pieceInstance.disabled !== true ) diff --git a/packages/meteor-lib/src/api/pubsub.ts b/packages/meteor-lib/src/api/pubsub.ts index 0bc86c22c3..49db69d466 100644 --- a/packages/meteor-lib/src/api/pubsub.ts +++ b/packages/meteor-lib/src/api/pubsub.ts @@ -2,6 +2,7 @@ import { BucketId, OrganizationId, PartId, + PeripheralDeviceId, RundownId, RundownPlaylistActivationId, RundownPlaylistId, @@ -118,6 +119,11 @@ export enum MeteorPubSub { */ timelineForStudio = 'timelineForStudio', + /** + * Ingest status of rundowns for a PeripheralDevice + */ + ingestDeviceRundownStatusTestTool = 'ingestDeviceRundownStatusTestTool', + /** * Fetch the simplified playout UI view of the specified ShowStyleBase */ @@ -218,6 +224,11 @@ export interface MeteorPubSubTypes { studioId: StudioId, token?: string ) => PeripheralDevicePubSubCollectionsNames.studioTimeline + + [MeteorPubSub.ingestDeviceRundownStatusTestTool]: ( + peripheralDeviceId: PeripheralDeviceId + ) => PeripheralDevicePubSubCollectionsNames.ingestRundownStatus + [MeteorPubSub.uiShowStyleBase]: (showStyleBaseId: ShowStyleBaseId) => CustomCollectionName.UIShowStyleBase /** Subscribe to one or all studios */ [MeteorPubSub.uiStudio]: (studioId: StudioId | null) => CustomCollectionName.UIStudio diff --git a/packages/meteor-lib/src/collections/ExpectedPackages.ts b/packages/meteor-lib/src/collections/ExpectedPackages.ts index ca7d656a5c..5815971453 100644 --- a/packages/meteor-lib/src/collections/ExpectedPackages.ts +++ b/packages/meteor-lib/src/collections/ExpectedPackages.ts @@ -6,6 +6,7 @@ import { htmlTemplateGetSteps, htmlTemplateGetFileNamesFromSteps, } from '@sofie-automation/shared-lib/dist/package-manager/helpers' +import { ReadonlyDeep } from 'type-fest' export function getPreviewPackageSettings( expectedPackage: ExpectedPackage.Any @@ -55,7 +56,7 @@ export function getThumbnailPackageSettings( } } export function getSideEffect( - expectedPackage: ExpectedPackage.Base, + expectedPackage: ReadonlyDeep, studio: Pick ): ExpectedPackage.Base['sideEffect'] { return deepExtend( diff --git a/packages/mos-gateway/src/$schemas/devices.json b/packages/mos-gateway/src/$schemas/devices.json index 78a895f86f..08f3c143c3 100644 --- a/packages/mos-gateway/src/$schemas/devices.json +++ b/packages/mos-gateway/src/$schemas/devices.json @@ -140,8 +140,36 @@ }, "required": ["id", "host"], "additionalProperties": false + }, + "statuses": { + "type": "object", + "ui:title": "Statuses", + "title": "MosDeviceStatusesConfig", + "properties": { + "enabled": { + "type": "boolean", + "ui:title": "Write Statuses to NRCS", + "ui:description": "", + "ui:summaryTitle": "Statuses", + "default": true + }, + "sendInRehearsal": { + "type": "boolean", + "ui:title": "Send when in Rehearsal mode", + "ui:description": "", + "default": false + }, + "onlySendPlay": { + "type": "boolean", + "ui:title": "Only send PLAY statuses", + "ui:description": "", + "default": false + } + }, + "required": ["enabled"], + "additionalProperties": false } }, - "required": ["primary"], + "required": ["primary", "statuses"], "additionalProperties": false } diff --git a/packages/mos-gateway/src/CoreMosDeviceHandler.ts b/packages/mos-gateway/src/CoreMosDeviceHandler.ts index 242cc84055..faff9a50a7 100644 --- a/packages/mos-gateway/src/CoreMosDeviceHandler.ts +++ b/packages/mos-gateway/src/CoreMosDeviceHandler.ts @@ -4,6 +4,7 @@ import { protectString, Observer, PeripheralDevicePubSub, + stringifyError, } from '@sofie-automation/server-core-integration' import { IMOSConnectionStatus, @@ -21,7 +22,6 @@ import { IMOSItem, IMOSROReadyToAir, IMOSROFullStory, - IMOSObjectStatus, IMOSROAck, getMosTypes, MosTypes, @@ -112,9 +112,7 @@ export class CoreMosDeviceHandler { deviceName: this._mosDevice.idPrimary, }) this.core.on('error', (err) => { - this._coreParentHandler.logger.error( - 'Core Error: ' + (typeof err === 'string' ? err : err.message || err.toString()) - ) + this._coreParentHandler.logger.error(`Core Error: ${stringifyError(err)}`) }) this.setupSubscriptionsAndObservers() @@ -138,7 +136,7 @@ export class CoreMosDeviceHandler { Promise.all([ this.core.autoSubscribe(PeripheralDevicePubSub.peripheralDeviceCommands, this.core.deviceId), ]).catch((e) => { - this._coreParentHandler.logger.error(e) + this._coreParentHandler.logger.error(stringifyError(e)) }) this._coreParentHandler.logger.info('CoreMos: Setting up observers..') @@ -349,42 +347,6 @@ export class CoreMosDeviceHandler { // console.log('GOT REPLY', results) return this.fixMosData(ro) } - async setROStatus(roId: string, status: IMOSObjectStatus): Promise { - // console.log('setStoryStatus') - const result = await this._mosDevice.sendRunningOrderStatus({ - ID: this.mosTypes.mosString128.create(roId), - Status: status, - Time: this.mosTypes.mosTime.create(new Date()), - }) - - // console.log('got result', result) - return this.fixMosData(result) - } - async setStoryStatus(roId: string, storyId: string, status: IMOSObjectStatus): Promise { - // console.log('setStoryStatus') - const result = await this._mosDevice.sendStoryStatus({ - RunningOrderId: this.mosTypes.mosString128.create(roId), - ID: this.mosTypes.mosString128.create(storyId), - Status: status, - Time: this.mosTypes.mosTime.create(new Date()), - }) - - // console.log('got result', result) - return this.fixMosData(result) - } - async setItemStatus(roId: string, storyId: string, itemId: string, status: IMOSObjectStatus): Promise { - // console.log('setStoryStatus') - const result = await this._mosDevice.sendItemStatus({ - RunningOrderId: this.mosTypes.mosString128.create(roId), - StoryId: this.mosTypes.mosString128.create(storyId), - ID: this.mosTypes.mosString128.create(itemId), - Status: status, - Time: this.mosTypes.mosTime.create(new Date()), - }) - - // console.log('got result', result) - return this.fixMosData(result) - } async replaceStoryItem( roID: string, storyID: string, diff --git a/packages/mos-gateway/src/connector.ts b/packages/mos-gateway/src/connector.ts index a6ea0de258..fe4275bcbd 100644 --- a/packages/mos-gateway/src/connector.ts +++ b/packages/mos-gateway/src/connector.ts @@ -5,6 +5,7 @@ import { PeripheralDeviceId, loadCertificatesFromDisk, CertificatesConfig, + stringifyError, } from '@sofie-automation/server-core-integration' export interface Config { @@ -36,18 +37,23 @@ export class Connector { this._logger.info('Process initialized') this._logger.info('Initializing Core...') - await this.initCore(certificates) + this.coreHandler = await CoreHandler.create( + this._logger, + this._config.core, + certificates, + this._config.device + ) this._logger.info('Initializing Mos...') - await this.initMos() + this.mosHandler = await MosHandler.create(this._logger, this._config.mos, this.coreHandler) this._logger.info('Initialization done') } catch (e: any) { - this._logger.error('Error during initialization:', e, e.stack) + this._logger.error(`Error during initialization: ${stringifyError(e)}`, e.stack) this._logger.info('Shutting down in 10 seconds!') - this.dispose().catch((e2) => this._logger.error(e2)) + this.dispose().catch((e2) => this._logger.error(stringifyError(e2))) setTimeout(() => { // eslint-disable-next-line n/no-process-exit @@ -55,32 +61,7 @@ export class Connector { }, 10 * 1000) } } - async initCore(certificates: Buffer[]): Promise { - if (!this._config) { - throw Error('_config is undefined!') - } - - this.coreHandler = new CoreHandler(this._logger, this._config.device) - - if (!this.coreHandler) { - throw Error('coreHandler is undefined!') - } - - return this.coreHandler.init(this._config.core, certificates) - } - async initMos(): Promise { - this.mosHandler = new MosHandler(this._logger) - if (!this._config) { - throw Error('_config is undefined!') - } - - if (!this.coreHandler) { - throw Error('coreHandler is undefined!') - } - - return this.mosHandler.init(this._config.mos, this.coreHandler) - } async dispose(): Promise { if (this.mosHandler) await this.mosHandler.dispose() diff --git a/packages/mos-gateway/src/coreHandler.ts b/packages/mos-gateway/src/coreHandler.ts index e6a38c5849..b2a9d8c1fb 100644 --- a/packages/mos-gateway/src/coreHandler.ts +++ b/packages/mos-gateway/src/coreHandler.ts @@ -41,12 +41,23 @@ export class CoreHandler { private _coreConfig?: CoreConfig private _certificates?: Buffer[] - constructor(logger: Winston.Logger, deviceOptions: DeviceConfig) { + public static async create( + logger: Winston.Logger, + config: CoreConfig, + certificates: Buffer[], + deviceOptions: DeviceConfig + ): Promise { + const handler = new CoreHandler(logger, deviceOptions) + await handler.init(config, certificates) + return handler + } + + private constructor(logger: Winston.Logger, deviceOptions: DeviceConfig) { this.logger = logger this._deviceOptions = deviceOptions } - async init(config: CoreConfig, certificates: Buffer[]): Promise { + private async init(config: CoreConfig, certificates: Buffer[]): Promise { // this.logger.info('========') this._coreConfig = config this._certificates = certificates @@ -224,7 +235,7 @@ export class CoreHandler { // console.log('cb done') }) .catch((e) => { - this.logger.error(e) + this.logger.error(stringifyError(e)) }) } // eslint-disable-next-line @typescript-eslint/ban-ts-comment diff --git a/packages/mos-gateway/src/mosHandler.ts b/packages/mos-gateway/src/mosHandler.ts index 6f0ffd241f..d0101d8944 100644 --- a/packages/mos-gateway/src/mosHandler.ts +++ b/packages/mos-gateway/src/mosHandler.ts @@ -1,6 +1,5 @@ import { MosConnection, - IMOSDevice, IMOSConnectionStatus, IMOSRunningOrder, IMOSROAck, @@ -16,7 +15,6 @@ import { IMOSROReadyToAir, IMOSROFullStory, IConnectionConfig, - IMOSDeviceConnectionOptions, MosDevice, IMOSListMachInfo, IMOSString128, @@ -27,7 +25,11 @@ import { import * as Winston from 'winston' import { CoreHandler } from './coreHandler.js' import { CoreMosDeviceHandler } from './CoreMosDeviceHandler.js' -import { Observer, PeripheralDevicePubSubCollectionsNames } from '@sofie-automation/server-core-integration' +import { + Observer, + PeripheralDevicePubSubCollectionsNames, + stringifyError, +} from '@sofie-automation/server-core-integration' import { DEFAULT_MOS_TIMEOUT_TIME, DEFAULT_MOS_HEARTBEAT_INTERVAL, @@ -35,12 +37,15 @@ import { import { MosGatewayConfig } from '@sofie-automation/shared-lib/dist/generated/MosGatewayOptionsTypes' import { MosDeviceConfig } from '@sofie-automation/shared-lib/dist/generated/MosGatewayDevicesTypes' import { PeripheralDeviceForDevice } from '@sofie-automation/server-core-integration' +import _ from 'underscore' +import { MosStatusHandler } from './mosStatus/handler.js' +import { isPromise } from 'util/types' export interface MosConfig { self: IConnectionConfig // devices: Array } -export type MosSubDeviceSettings = Record< +type MosSubDeviceSettings = Record< string, { type: '' @@ -48,29 +53,53 @@ export type MosSubDeviceSettings = Record< } > +/** + * Represents a connection in mos-connection, paired with some additional data + */ +interface MosDeviceHandle { + readonly deviceId: string + readonly mosDevice: MosDevice + readonly deviceOptions: Readonly + + // Once connected, a core handler is setup + coreMosHandler?: CoreMosDeviceHandler | Promise + + // If writing back story/item status is enabled, the setup handler + statusHandler?: MosStatusHandler +} + export class MosHandler { public mos: MosConnection | undefined public mosOptions: MosConfig | undefined public debugLogging = false - private allMosDevices: { [id: string]: { mosDevice: IMOSDevice; coreMosHandler?: CoreMosDeviceHandler } } = {} - private _ownMosDevices: { [deviceId: string]: MosDevice } = {} + /** Map of mos devices that have been created */ + private readonly _allMosDevices = new Map() + private _logger: Winston.Logger private _disposed = false private _settings?: MosGatewayConfig - private _openMediaHotStandby: Record private _coreHandler: CoreHandler | undefined private _observers: Array> = [] private _triggerupdateDevicesTimeout: any = null private mosTypes: MosTypes - constructor(logger: Winston.Logger) { + public static async create( + logger: Winston.Logger, + config: MosConfig, + coreHandler: CoreHandler + ): Promise { + const handler = new MosHandler(logger) + await handler.init(config, coreHandler) + return handler + } + + private constructor(logger: Winston.Logger) { this._logger = logger - this._openMediaHotStandby = {} this.mosTypes = getMosTypes(this.strict) // temporary, another will be set upon init() } - async init(config: MosConfig, coreHandler: CoreHandler): Promise { + private async init(config: MosConfig, coreHandler: CoreHandler): Promise { this.mosOptions = config this._coreHandler = coreHandler /*{ @@ -121,11 +150,9 @@ export class MosHandler { return Promise.resolve() } } - setupObservers(): void { + private setupObservers(): void { if (this._observers.length) { - this._observers.forEach((obs) => { - obs.stop() - }) + this._observers.forEach((obs) => obs.stop()) this._observers = [] } this._logger.info('Renewing observers') @@ -141,15 +168,9 @@ export class MosHandler { const deviceObserver = this._coreHandler.core.observe( PeripheralDevicePubSubCollectionsNames.peripheralDeviceForDevice ) - deviceObserver.added = () => { - this._deviceOptionsChanged() - } - deviceObserver.changed = () => { - this._deviceOptionsChanged() - } - deviceObserver.removed = () => { - this._deviceOptionsChanged() - } + deviceObserver.added = () => this._deviceOptionsChanged() + deviceObserver.changed = () => this._deviceOptionsChanged() + deviceObserver.removed = () => this._deviceOptionsChanged() this._observers.push(deviceObserver) this._deviceOptionsChanged() @@ -193,7 +214,7 @@ export class MosHandler { } this._triggerupdateDevicesTimeout = setTimeout(() => { this._updateDevices().catch((e) => { - this._logger.error(e) + this._logger.error(stringifyError(e)) }) }, 20) } @@ -224,171 +245,203 @@ export class MosHandler { } this.debugLog('rawMessage', source, type, message) }) - this.mos.on('info', (message: any) => { - this._logger.info(message) + this.mos.on('info', (message, data) => { + this._logger.info(message, data) }) - this.mos.on('error', (error: any) => { - this._logger.error(error) + this.mos.on('error', (error) => { + this._logger.error(stringifyError(error)) }) - this.mos.on('warning', (warning: any) => { - this._logger.error(warning) + this.mos.on('warning', (warning) => { + this._logger.error(stringifyError(warning)) }) - // eslint-disable-next-line @typescript-eslint/no-misused-promises - this.mos.onConnection(async (mosDevice: IMOSDevice): Promise => { - // a new connection to a device has been made - this._logger.info('new mosConnection established: ' + mosDevice.idPrimary + ', ' + mosDevice.idSecondary) - try { - this.allMosDevices[mosDevice.idPrimary] = { mosDevice: mosDevice } + this.mos.onConnection((mosDevice: MosDevice): void => { + this.setupMosDevice(mosDevice).catch((e) => { + this._logger.error(stringifyError(e)) + }) + }) - if (!this._coreHandler) throw Error('_coreHandler is undefined!') + // Open mos-server for connections: + await this.mos.init() + } + private async setupMosDevice(mosDevice: MosDevice): Promise { + // a new connection to a device has been made + this._logger.info('new mosConnection established: ' + mosDevice.idPrimary + ', ' + mosDevice.idSecondary) + try { + const deviceEntry = Array.from(this._allMosDevices.values()).find( + (d) => + d.mosDevice.idPrimary === mosDevice.idPrimary && d.mosDevice.idSecondary === mosDevice.idSecondary + ) - const coreMosHandler = await this._coreHandler.registerMosDevice(mosDevice, this, { - openMediaHotStandby: mosDevice.idSecondary - ? this._openMediaHotStandby[mosDevice.idSecondary] - : false, - }) - // this._logger.info('mosDevice registered -------------') - // Setup message flow between the devices: - - this.allMosDevices[mosDevice.idPrimary].coreMosHandler = coreMosHandler - - // Initial Status check: - const connectionStatus = mosDevice.getConnectionStatus() - coreMosHandler.onMosConnectionChanged(connectionStatus) // initial check - // Profile 0: ------------------------------------------------- - mosDevice.onConnectionChange((newStatus: IMOSConnectionStatus) => { - // MOSDevice >>>> Core - coreMosHandler.onMosConnectionChanged(newStatus) - }) - coreMosHandler.onMosConnectionChanged(mosDevice.getConnectionStatus()) - mosDevice.onRequestMachineInfo(async () => { - // MOSDevice >>>> Core - return coreMosHandler.getMachineInfo() - }) + if (!deviceEntry) { + // We got a connection for a connection which shouldn't exist.. + this._logger.error(`Got connection for mosDevice "${mosDevice.idPrimary}" which doesn't exist!`) + return + } - // Profile 1: ------------------------------------------------- - /* + if (deviceEntry.mosDevice !== mosDevice) { + // Our state doesn't match, don't try to use the connection it could be from a previous connection attempt + this._logger.error( + `Got connection for mosDevice "${mosDevice.idPrimary}" which differs to the one setup!` + ) + return + } + + // This is either a promise, if a handler is currently being setup, or the handler itself + if (deviceEntry.coreMosHandler) { + this._logger.error(`Got connection for mosDevice "${mosDevice.idPrimary}" which is already setup!`) + return + } + + if (!this._coreHandler) throw Error('_coreHandler is undefined!') + + const openMediaHotStandby = deviceEntry.deviceOptions.secondary?.openMediaHotStandby || false + + const coreMosHandler = await this._coreHandler.registerMosDevice(mosDevice, this, { + openMediaHotStandby: mosDevice.idSecondary ? openMediaHotStandby : false, + }) + // this._logger.info('mosDevice registered -------------') + // Setup message flow between the devices: + + deviceEntry.coreMosHandler = coreMosHandler + + // Initial Status check: + // Profile 0: ------------------------------------------------- + mosDevice.onConnectionChange((newStatus: IMOSConnectionStatus) => { + // MOSDevice >>>> Core + coreMosHandler.onMosConnectionChanged(newStatus) + + // Setup the status handler upon first connection to the NRCS + const isConnected = newStatus.PrimaryConnected || newStatus.SecondaryConnected + if (deviceEntry.deviceOptions.statuses?.enabled && !deviceEntry.statusHandler && isConnected) { + // Creating the handler at this point avoids sending status messages before the connection is established, + // allowing for a sync at startup without needing manual queueing + deviceEntry.statusHandler = new MosStatusHandler( + this._logger, + mosDevice, + coreMosHandler, + deviceEntry.deviceOptions.statuses, + this.strict + ) + } + }) + coreMosHandler.onMosConnectionChanged(mosDevice.getConnectionStatus()) + mosDevice.onRequestMachineInfo(async () => { + // MOSDevice >>>> Core + return coreMosHandler.getMachineInfo() + }) + + // Profile 1: ------------------------------------------------- + /* mosDevice.onRequestMOSObject((objId: string) => { // coreMosHandler.fetchMosObject(objId) // return Promise }) */ - // onRequestMOSObject: (cb: (objId: string) => Promise) => void - // onRequestAllMOSObjects: (cb: () => Promise>) => void - // getMOSObject: (objId: string) => Promise - // getAllMOSObjects: () => Promise> - // Profile 2: ------------------------------------------------- - mosDevice.onCreateRunningOrder(async (ro: IMOSRunningOrder) => { - // MOSDevice >>>> Core - return this._getROAck(ro.ID, coreMosHandler.mosRoCreate(ro)) - }) - mosDevice.onReplaceRunningOrder(async (ro: IMOSRunningOrder) => { - // MOSDevice >>>> Core - return this._getROAck(ro.ID, coreMosHandler.mosRoReplace(ro)) - }) - mosDevice.onDeleteRunningOrder(async (runningOrderId: IMOSString128) => { - // MOSDevice >>>> Core - return this._getROAck(runningOrderId, coreMosHandler.mosRoDelete(runningOrderId)) - }) - mosDevice.onMetadataReplace(async (ro: IMOSRunningOrderBase) => { - // MOSDevice >>>> Core - return this._getROAck(ro.ID, coreMosHandler.mosRoMetadata(ro)) - }) - mosDevice.onRunningOrderStatus(async (status: IMOSRunningOrderStatus) => { - // MOSDevice >>>> Core - return this._getROAck(status.ID, coreMosHandler.mosRoStatus(status)) - }) - mosDevice.onStoryStatus(async (status: IMOSStoryStatus) => { - // MOSDevice >>>> Core - return this._getROAck(status.RunningOrderId, coreMosHandler.mosRoStoryStatus(status)) - }) - mosDevice.onItemStatus(async (status: IMOSItemStatus) => { - // MOSDevice >>>> Core - return this._getROAck(status.RunningOrderId, coreMosHandler.mosRoItemStatus(status)) - }) - mosDevice.onROInsertStories(async (Action: IMOSStoryAction, Stories: Array) => { - // MOSDevice >>>> Core - return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoStoryInsert(Action, Stories)) - }) - mosDevice.onROInsertItems(async (Action: IMOSItemAction, Items: Array) => { - // MOSDevice >>>> Core - return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemInsert(Action, Items)) - }) - mosDevice.onROReplaceStories(async (Action: IMOSStoryAction, Stories: Array) => { - // MOSDevice >>>> Core - return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoStoryReplace(Action, Stories)) - }) - mosDevice.onROReplaceItems(async (Action: IMOSItemAction, Items: Array) => { - // MOSDevice >>>> Core - return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemReplace(Action, Items)) - }) - mosDevice.onROMoveStories(async (Action: IMOSStoryAction, Stories: Array) => { - // MOSDevice >>>> Core - return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoStoryMove(Action, Stories)) - }) - mosDevice.onROMoveItems(async (Action: IMOSItemAction, Items: Array) => { - // MOSDevice >>>> Core - return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemMove(Action, Items)) - }) - mosDevice.onRODeleteStories(async (Action: IMOSROAction, Stories: Array) => { - // MOSDevice >>>> Core - return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoStoryDelete(Action, Stories)) - }) - mosDevice.onRODeleteItems(async (Action: IMOSStoryAction, Items: Array) => { - // MOSDevice >>>> Core - return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemDelete(Action, Items)) - }) - mosDevice.onROSwapStories( - async (Action: IMOSROAction, StoryID0: IMOSString128, StoryID1: IMOSString128) => { - // MOSDevice >>>> Core - return this._getROAck( - Action.RunningOrderID, - coreMosHandler.mosRoStorySwap(Action, StoryID0, StoryID1) - ) - } - ) - mosDevice.onROSwapItems( - async (Action: IMOSStoryAction, ItemID0: IMOSString128, ItemID1: IMOSString128) => { - // MOSDevice >>>> Core - return this._getROAck( - Action.RunningOrderID, - coreMosHandler.mosRoItemSwap(Action, ItemID0, ItemID1) - ) - } - ) - mosDevice.onReadyToAir(async (Action: IMOSROReadyToAir) => { + // onRequestMOSObject: (cb: (objId: string) => Promise) => void + // onRequestAllMOSObjects: (cb: () => Promise>) => void + // getMOSObject: (objId: string) => Promise + // getAllMOSObjects: () => Promise> + // Profile 2: ------------------------------------------------- + mosDevice.onCreateRunningOrder(async (ro: IMOSRunningOrder) => { + // MOSDevice >>>> Core + return this._getROAck(ro.ID, coreMosHandler.mosRoCreate(ro)) + }) + mosDevice.onReplaceRunningOrder(async (ro: IMOSRunningOrder) => { + // MOSDevice >>>> Core + return this._getROAck(ro.ID, coreMosHandler.mosRoReplace(ro)) + }) + mosDevice.onDeleteRunningOrder(async (runningOrderId: IMOSString128) => { + // MOSDevice >>>> Core + return this._getROAck(runningOrderId, coreMosHandler.mosRoDelete(runningOrderId)) + }) + mosDevice.onMetadataReplace(async (ro: IMOSRunningOrderBase) => { + // MOSDevice >>>> Core + return this._getROAck(ro.ID, coreMosHandler.mosRoMetadata(ro)) + }) + mosDevice.onRunningOrderStatus(async (status: IMOSRunningOrderStatus) => { + // MOSDevice >>>> Core + return this._getROAck(status.ID, coreMosHandler.mosRoStatus(status)) + }) + mosDevice.onStoryStatus(async (status: IMOSStoryStatus) => { + // MOSDevice >>>> Core + return this._getROAck(status.RunningOrderId, coreMosHandler.mosRoStoryStatus(status)) + }) + mosDevice.onItemStatus(async (status: IMOSItemStatus) => { + // MOSDevice >>>> Core + return this._getROAck(status.RunningOrderId, coreMosHandler.mosRoItemStatus(status)) + }) + mosDevice.onROInsertStories(async (Action: IMOSStoryAction, Stories: Array) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoStoryInsert(Action, Stories)) + }) + mosDevice.onROInsertItems(async (Action: IMOSItemAction, Items: Array) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemInsert(Action, Items)) + }) + mosDevice.onROReplaceStories(async (Action: IMOSStoryAction, Stories: Array) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoStoryReplace(Action, Stories)) + }) + mosDevice.onROReplaceItems(async (Action: IMOSItemAction, Items: Array) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemReplace(Action, Items)) + }) + mosDevice.onROMoveStories(async (Action: IMOSStoryAction, Stories: Array) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoStoryMove(Action, Stories)) + }) + mosDevice.onROMoveItems(async (Action: IMOSItemAction, Items: Array) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemMove(Action, Items)) + }) + mosDevice.onRODeleteStories(async (Action: IMOSROAction, Stories: Array) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoStoryDelete(Action, Stories)) + }) + mosDevice.onRODeleteItems(async (Action: IMOSStoryAction, Items: Array) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemDelete(Action, Items)) + }) + mosDevice.onROSwapStories( + async (Action: IMOSROAction, StoryID0: IMOSString128, StoryID1: IMOSString128) => { // MOSDevice >>>> Core - return this._getROAck(Action.ID, coreMosHandler.mosRoReadyToAir(Action)) - }) - // ---------------------------------------------------------------- - // Init actions - /* + return this._getROAck( + Action.RunningOrderID, + coreMosHandler.mosRoStorySwap(Action, StoryID0, StoryID1) + ) + } + ) + mosDevice.onROSwapItems(async (Action: IMOSStoryAction, ItemID0: IMOSString128, ItemID1: IMOSString128) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemSwap(Action, ItemID0, ItemID1)) + }) + mosDevice.onReadyToAir(async (Action: IMOSROReadyToAir) => { + // MOSDevice >>>> Core + return this._getROAck(Action.ID, coreMosHandler.mosRoReadyToAir(Action)) + }) + // ---------------------------------------------------------------- + // Init actions + /* mosDevice.getMachineInfo() .then((machineInfo: IMOSListMachInfo) => { }) */ - // Profile 3: ------------------------------------------------- - // Profile 4: ------------------------------------------------- - // onStory: (cb: (story: IMOSROFullStory) => Promise) => void - mosDevice.onRunningOrderStory(async (story: IMOSROFullStory) => { - // MOSDevice >>>> Core - return this._getROAck(story.RunningOrderId, coreMosHandler.mosRoFullStory(story)) - }) - } catch (e) { - this._logger.error('Error:', e) - } - }) - - // Open mos-server for connections: - await this.mos.init() + // Profile 3: ------------------------------------------------- + // Profile 4: ------------------------------------------------- + // onStory: (cb: (story: IMOSROFullStory) => Promise) => void + mosDevice.onRunningOrderStory(async (story: IMOSROFullStory) => { + // MOSDevice >>>> Core + return this._getROAck(story.RunningOrderId, coreMosHandler.mosRoFullStory(story)) + }) + } catch (e) { + this._logger.error(stringifyError(e)) + } } private sendStatusOfAllMosDevices() { // Send an update to Core of the status of all mos devices - for (const handler of Object.values<{ mosDevice: IMOSDevice; coreMosHandler?: CoreMosDeviceHandler }>( - this.allMosDevices - )) { - if (handler.coreMosHandler) { + for (const handler of this._allMosDevices.values()) { + if (handler.coreMosHandler && !isPromise(handler.coreMosHandler)) { handler.coreMosHandler.onMosConnectionChanged(handler.mosDevice.getConnectionStatus()) } } @@ -424,26 +477,28 @@ export class MosHandler { for (const [deviceId, device] of Object.entries<{ options: MosDeviceConfig }>(devices)) { if (device) { if (device.options.secondary) { - const fullSecondaryId = this._settings?.mosId + '_' + device.options.secondary.id - this._openMediaHotStandby[fullSecondaryId] = - device.options.secondary?.openMediaHotStandby || false // If the host isn't set, don't use secondary: if (!device.options.secondary.host || !device.options.secondary.id) delete device.options.secondary } - const oldDevice: MosDevice | null = this._getDevice(deviceId) + const oldDevice = this._allMosDevices.get(deviceId) if (!oldDevice) { this._logger.info('Initializing new device: ' + deviceId) devicesToAdd[deviceId] = device } else { - if ( - (oldDevice.primaryId || '') !== device.options.primary?.id || - (oldDevice.primaryHost || '') !== device.options.primary?.host || - (oldDevice.secondaryId || '') !== (device.options.secondary?.id || '') || - (oldDevice.secondaryHost || '') !== (device.options.secondary?.host || '') - ) { + // elsewhere the oldDevice.deviceOptions has been modified with defaults + const newOptionsWithDefaults = { + ...device.options, + primary: { + ...device.options.primary, + heartbeatInterval: + device.options.primary.heartbeatInterval || DEFAULT_MOS_HEARTBEAT_INTERVAL, + timeout: device.options.primary.timeout || DEFAULT_MOS_TIMEOUT_TIME, + }, + } + if (!_.isEqual(oldDevice.deviceOptions, newOptionsWithDefaults)) { this._logger.info('Re-initializing device: ' + deviceId) devicesToRemove[deviceId] = true devicesToAdd[deviceId] = device @@ -452,7 +507,7 @@ export class MosHandler { } } - for (const [deviceId, oldDevice] of Object.entries(this._ownMosDevices)) { + for (const [deviceId, oldDevice] of this._allMosDevices.entries()) { if (oldDevice && !devices[deviceId]) { this._logger.info('Un-initializing device: ' + deviceId) devicesToRemove[deviceId] = true @@ -472,29 +527,26 @@ export class MosHandler { ) } } - private async _addDevice(deviceId: string, deviceOptions: IMOSDeviceConnectionOptions): Promise { - if (this._getDevice(deviceId)) { + private async _addDevice(deviceId: string, deviceOptions0: MosDeviceConfig): Promise { + if (this._allMosDevices.has(deviceId)) { // the device is already there throw new Error('Unable to add device "' + deviceId + '", because it already exists!') } - if (!this.mos) { - throw Error('mos is undefined, call _initMosConnection first!') - } - - deviceOptions = JSON.parse(JSON.stringify(deviceOptions)) // deep clone - - deviceOptions.primary.timeout = deviceOptions.primary.timeout || DEFAULT_MOS_TIMEOUT_TIME - - deviceOptions.primary.heartbeatInterval = - deviceOptions.primary.heartbeatInterval || DEFAULT_MOS_HEARTBEAT_INTERVAL + if (!this.mos) throw Error('mos is undefined, call _initMosConnection first!') - if (deviceOptions.secondary?.id && this._openMediaHotStandby[deviceOptions.secondary.id]) { - deviceOptions.secondary.openMediaHotStandby = true - } + const deviceOptions: MosDeviceConfig = JSON.parse(JSON.stringify(deviceOptions0)) // deep clone + deviceOptions.primary.timeout ||= DEFAULT_MOS_TIMEOUT_TIME + deviceOptions.primary.heartbeatInterval ||= DEFAULT_MOS_HEARTBEAT_INTERVAL const mosDevice: MosDevice = await this.mos.connect(deviceOptions) - this._ownMosDevices[deviceId] = mosDevice + this._allMosDevices.set(deviceId, { + deviceId: deviceId, + mosDevice: mosDevice, + deviceOptions, + }) + + await this.setupMosDevice(mosDevice) try { const getMachineInfoUntilConnected = async (): Promise => @@ -535,23 +587,29 @@ export class MosHandler { return mosDevice } catch (e) { // something went wrong during init: - if (!this.mos) { - throw Error('mos is undefined!') - } + if (!this.mos) throw Error('mos is undefined!') this.mos.disposeMosDevice(mosDevice).catch((e2) => { - this._logger.error(e2) + this._logger.error(stringifyError(e2)) }) throw e } } private async _removeDevice(deviceId: string): Promise { - const mosDevice = this._getDevice(deviceId) as MosDevice + const deviceEntry = this._allMosDevices.get(deviceId) + this._allMosDevices.delete(deviceId) - delete this._ownMosDevices[deviceId] - if (mosDevice) { - if (!this._coreHandler) throw Error('_coreHandler is undefined!') - await this._coreHandler.unRegisterMosDevice(mosDevice) + if (deviceEntry) { + const mosDevice = deviceEntry.mosDevice + + // Cleanup the coreMosHandler from the device + if (this._coreHandler) await this._coreHandler.unRegisterMosDevice(mosDevice) + + // Stop the status handler, if enabled + if (deviceEntry.statusHandler) { + deviceEntry.statusHandler.dispose() + delete deviceEntry.statusHandler + } if (!this.mos) { throw Error('mos is undefined!') @@ -571,9 +629,6 @@ export class MosHandler { } return Promise.resolve() } - private _getDevice(deviceId: string): MosDevice | null { - return this._ownMosDevices[deviceId] || null - } private async _getROAck(roId: IMOSString128, p: Promise): Promise { return p .then(() => { @@ -585,7 +640,7 @@ export class MosHandler { return roAck }) .catch((err) => { - this._logger.error('ROAck error:', err) + this._logger.error(`ROAck error: ${stringifyError(err)}`) const roAck: IMOSROAck = { ID: roId, Status: this.mosTypes.mosString128.create('Error: ' + err.toString()), diff --git a/packages/mos-gateway/src/mosStatus/__tests__/diff.spec.ts b/packages/mos-gateway/src/mosStatus/__tests__/diff.spec.ts new file mode 100644 index 0000000000..7cbf3138e2 --- /dev/null +++ b/packages/mos-gateway/src/mosStatus/__tests__/diff.spec.ts @@ -0,0 +1,317 @@ +import { protectString } from '@sofie-automation/server-core-integration' +import { + IngestPartPlaybackStatus, + IngestRundownActiveStatus, + IngestRundownStatus, +} from '@sofie-automation/shared-lib/dist/ingest/rundownStatus' +import { diffStatuses, ItemStatusEntry, MOS_STATUS_UNKNOWN, StoryStatusEntry } from '../diff.js' +import type { MosDeviceStatusesConfig } from '@sofie-automation/shared-lib/dist/generated/MosGatewayDevicesTypes' +import { IMOSObjectStatus } from '@mos-connection/connector' + +describe('diffStatuses', () => { + const defaultConfig: MosDeviceStatusesConfig = { + enabled: true, + sendInRehearsal: true, + onlySendPlay: false, + } + const singlePartRundown: IngestRundownStatus = { + _id: protectString('rundown0'), + externalId: 'external0', + active: IngestRundownActiveStatus.ACTIVE, + segments: [ + { + externalId: 'segment0', + parts: [ + { + externalId: 'part0', + isReady: true, + itemsReady: [], + playbackStatus: IngestPartPlaybackStatus.UNKNOWN, + }, + ], + }, + ], + } + + test('diff no changes', () => { + const diff = diffStatuses(defaultConfig, singlePartRundown, singlePartRundown) + expect(diff).toHaveLength(0) + }) + + test('part playback changes', () => { + const partPlayingState = structuredClone(singlePartRundown) + partPlayingState.segments[0].parts[0].playbackStatus = IngestPartPlaybackStatus.PLAY + + { + // change to play + const diff = diffStatuses(defaultConfig, singlePartRundown, partPlayingState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: IMOSObjectStatus.PLAY, + } satisfies StoryStatusEntry) + } + + { + const partStoppedState = structuredClone(partPlayingState) + partStoppedState.segments[0].parts[0].playbackStatus = IngestPartPlaybackStatus.STOP + + // change to stop + const diff = diffStatuses(defaultConfig, partPlayingState, partStoppedState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: IMOSObjectStatus.STOP, + } satisfies StoryStatusEntry) + } + + { + const partClearState = structuredClone(partPlayingState) + partClearState.segments[0].parts[0].playbackStatus = IngestPartPlaybackStatus.UNKNOWN + + // change to clear + const diff = diffStatuses(defaultConfig, partPlayingState, partClearState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: IMOSObjectStatus.READY, + } satisfies StoryStatusEntry) + } + }) + + test('part ready changes', () => { + const partNotReadyState = structuredClone(singlePartRundown) + partNotReadyState.segments[0].parts[0].isReady = false + + { + // change to not ready + const diff = diffStatuses(defaultConfig, singlePartRundown, partNotReadyState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: IMOSObjectStatus.NOT_READY, + } satisfies StoryStatusEntry) + } + + { + // change to ready + const diff = diffStatuses(defaultConfig, partNotReadyState, singlePartRundown) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: IMOSObjectStatus.READY, + } satisfies StoryStatusEntry) + } + + { + const partClearState = structuredClone(partNotReadyState) + partClearState.segments[0].parts[0].isReady = null + + // change to unknown + const diff = diffStatuses(defaultConfig, partNotReadyState, partClearState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: MOS_STATUS_UNKNOWN, + } satisfies StoryStatusEntry) + } + }) + + test('part added to rundown', () => { + const extraPartState = structuredClone(singlePartRundown) + extraPartState.segments[0].parts.push({ + externalId: 'part1', + isReady: false, + itemsReady: [], + playbackStatus: IngestPartPlaybackStatus.UNKNOWN, + }) + + { + const diff = diffStatuses(defaultConfig, singlePartRundown, extraPartState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part1', + mosStatus: IMOSObjectStatus.NOT_READY, + } satisfies StoryStatusEntry) + } + }) + + test('part removed from rundown', () => { + const extraPartState = structuredClone(singlePartRundown) + extraPartState.segments[0].parts.push({ + externalId: 'part1', + isReady: false, + itemsReady: [], + playbackStatus: IngestPartPlaybackStatus.UNKNOWN, + }) + + { + const diff = diffStatuses(defaultConfig, extraPartState, singlePartRundown) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part1', + mosStatus: MOS_STATUS_UNKNOWN, + } satisfies StoryStatusEntry) + } + }) + + test('rundown becomes inactive', () => { + const inactiveState = structuredClone(singlePartRundown) + inactiveState.active = IngestRundownActiveStatus.INACTIVE + + { + const diff = diffStatuses(defaultConfig, singlePartRundown, inactiveState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: MOS_STATUS_UNKNOWN, + } satisfies StoryStatusEntry) + } + }) + + test('rundown becomes active', () => { + const inactiveState = structuredClone(singlePartRundown) + inactiveState.active = IngestRundownActiveStatus.INACTIVE + + { + const diff = diffStatuses(defaultConfig, inactiveState, singlePartRundown) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: IMOSObjectStatus.READY, + } satisfies StoryStatusEntry) + } + }) + + test('rundown becomes rehearsal', () => { + const inactiveState = structuredClone(singlePartRundown) + inactiveState.active = IngestRundownActiveStatus.INACTIVE + const rehearsalState = structuredClone(singlePartRundown) + rehearsalState.active = IngestRundownActiveStatus.REHEARSAL + + { + // send during rehearsal + const diff = diffStatuses(defaultConfig, inactiveState, rehearsalState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: IMOSObjectStatus.READY, + } satisfies StoryStatusEntry) + } + + { + // no send during rehearsal + const disableRehearsalConfig = { + ...defaultConfig, + sendInRehearsal: false, + } + const diff = diffStatuses(disableRehearsalConfig, inactiveState, rehearsalState) + expect(diff).toHaveLength(0) + } + }) + + test('add items', () => { + { + const itemsState = structuredClone(singlePartRundown) + itemsState.segments[0].parts[0].itemsReady.push({ externalId: 'item0', ready: true }) + + const diff = diffStatuses(defaultConfig, singlePartRundown, itemsState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'item', + rundownExternalId: 'external0', + storyId: 'part0', + itemId: 'item0', + mosStatus: IMOSObjectStatus.READY, + } satisfies ItemStatusEntry) + } + + { + const itemsState = structuredClone(singlePartRundown) + itemsState.segments[0].parts[0].itemsReady.push({ externalId: 'item0', ready: false }) + + const diff = diffStatuses(defaultConfig, singlePartRundown, itemsState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'item', + rundownExternalId: 'external0', + storyId: 'part0', + itemId: 'item0', + mosStatus: IMOSObjectStatus.NOT_READY, + } satisfies ItemStatusEntry) + } + + { + const itemsState = structuredClone(singlePartRundown) + // itemsState.segments[0].parts[0].itemsReady.item0 = undefined + + const diff = diffStatuses(defaultConfig, singlePartRundown, itemsState) + expect(diff).toHaveLength(0) + } + }) + + test('remove items', () => { + { + const itemsState = structuredClone(singlePartRundown) + itemsState.segments[0].parts[0].itemsReady.push({ externalId: 'item0', ready: true }) + + const diff = diffStatuses(defaultConfig, itemsState, singlePartRundown) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'item', + rundownExternalId: 'external0', + storyId: 'part0', + itemId: 'item0', + mosStatus: MOS_STATUS_UNKNOWN, + } satisfies ItemStatusEntry) + } + + { + const itemsState = structuredClone(singlePartRundown) + // itemsState.segments[0].parts[0].itemsReady.item0 = undefined + + const diff = diffStatuses(defaultConfig, itemsState, singlePartRundown) + expect(diff).toHaveLength(0) + } + }) + + test('change item state', () => { + const itemsState = structuredClone(singlePartRundown) + itemsState.segments[0].parts[0].itemsReady.push({ externalId: 'item0', ready: true }) + + const items2State = structuredClone(itemsState) + items2State.segments[0].parts[0].itemsReady[0].ready = false + + const diff = diffStatuses(defaultConfig, itemsState, items2State) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'item', + rundownExternalId: 'external0', + storyId: 'part0', + itemId: 'item0', + mosStatus: IMOSObjectStatus.NOT_READY, + } satisfies ItemStatusEntry) + }) +}) diff --git a/packages/mos-gateway/src/mosStatus/diff.ts b/packages/mos-gateway/src/mosStatus/diff.ts new file mode 100644 index 0000000000..de50bb178c --- /dev/null +++ b/packages/mos-gateway/src/mosStatus/diff.ts @@ -0,0 +1,175 @@ +import { IMOSObjectStatus } from '@mos-connection/connector' +import type { MosDeviceStatusesConfig } from '@sofie-automation/shared-lib/dist/generated/MosGatewayDevicesTypes' +import { + IngestPartNotifyItemReady, + IngestPartPlaybackStatus, + IngestRundownActiveStatus, + type IngestPartStatus, + type IngestRundownStatus, +} from '@sofie-automation/shared-lib/dist/ingest/rundownStatus' + +export const MOS_STATUS_UNKNOWN = '' as IMOSObjectStatus // Force the status to be empty, which isn't a valid state in the enum + +export type SomeStatusEntry = StoryStatusEntry | ItemStatusEntry + +export interface ItemStatusEntry { + type: 'item' + rundownExternalId: string + storyId: string + itemId: string + mosStatus: IMOSObjectStatus +} + +export interface StoryStatusEntry { + type: 'story' + rundownExternalId: string + storyId: string + mosStatus: IMOSObjectStatus +} + +export function diffStatuses( + config: MosDeviceStatusesConfig, + previousStatuses: IngestRundownStatus | undefined, + newStatuses: IngestRundownStatus | undefined +): SomeStatusEntry[] { + const rundownExternalId = previousStatuses?.externalId ?? newStatuses?.externalId + + if ((!previousStatuses && !newStatuses) || !rundownExternalId) return [] + + const statuses: SomeStatusEntry[] = [] + + const previousStories = buildStoriesMap(previousStatuses) + const newStories = buildStoriesMap(newStatuses) + + // Process any removed stories first + for (const [storyId, story] of previousStories) { + if (!newStories.has(storyId)) { + // The story has been removed + statuses.push({ + type: 'story', + rundownExternalId, + storyId, + mosStatus: MOS_STATUS_UNKNOWN, + }) + + // Clear any items too + for (const itemStatus of story.itemsReady) { + statuses.push({ + type: 'item', + rundownExternalId, + storyId, + itemId: itemStatus.externalId, + mosStatus: MOS_STATUS_UNKNOWN, + }) + } + } + } + + // Then any remaining stories in order + for (const [storyId, status] of newStories) { + const previousStatus = previousStories.get(storyId) + + const newMosStatus = buildMosStatus(config, status.playbackStatus, status.isReady, newStatuses?.active) + if ( + newMosStatus !== null && + (!previousStatus || + buildMosStatus( + config, + previousStatus.playbackStatus, + previousStatus.isReady, + previousStatuses?.active + ) !== newMosStatus) + ) { + statuses.push({ + type: 'story', + rundownExternalId, + storyId, + mosStatus: newMosStatus, + }) + } + + const allItemIds = new Set() + const previousItemStatuses = new Map() + const newItemStatuses = new Map() + + for (const itemStatus of previousStatus?.itemsReady ?? []) { + previousItemStatuses.set(itemStatus.externalId, itemStatus) + allItemIds.add(itemStatus.externalId) + } + for (const itemStatus of status.itemsReady) { + newItemStatuses.set(itemStatus.externalId, itemStatus) + allItemIds.add(itemStatus.externalId) + } + + // Diff each item in the story + for (const itemId of allItemIds) { + const newItemStatus = newItemStatuses.get(itemId) + const previousItemStatus = previousItemStatuses.get(itemId) + + const newMosStatus = newItemStatus + ? buildMosStatus(config, status.playbackStatus, newItemStatus.ready, newStatuses?.active) + : null + const previousMosStatus = + previousItemStatus && previousStatus + ? buildMosStatus( + config, + previousStatus.playbackStatus, + previousItemStatus.ready, + previousStatuses?.active + ) + : null + + if ((newMosStatus !== null || previousMosStatus !== null) && previousMosStatus !== newMosStatus) { + statuses.push({ + type: 'item', + rundownExternalId, + storyId, + itemId, + mosStatus: newMosStatus ?? MOS_STATUS_UNKNOWN, + }) + } + } + } + + return statuses +} + +function buildStoriesMap(state: IngestRundownStatus | undefined): Map { + const stories = new Map() + + if (state) { + for (const segment of state.segments) { + for (const part of segment.parts) { + stories.set(part.externalId, part) + } + } + } + + return stories +} + +function buildMosStatus( + config: MosDeviceStatusesConfig, + playbackStatus: IngestPartPlaybackStatus, + isReady: boolean | null | undefined, + active: IngestRundownStatus['active'] | undefined +): IMOSObjectStatus | null { + if (active === IngestRundownActiveStatus.INACTIVE) return MOS_STATUS_UNKNOWN + if (active === IngestRundownActiveStatus.REHEARSAL && !config.sendInRehearsal) return null + + switch (playbackStatus) { + case IngestPartPlaybackStatus.PLAY: + return IMOSObjectStatus.PLAY + case IngestPartPlaybackStatus.STOP: + return IMOSObjectStatus.STOP + default: + switch (isReady) { + case true: + return IMOSObjectStatus.READY + case false: + return IMOSObjectStatus.NOT_READY + default: + return MOS_STATUS_UNKNOWN + } + } +} diff --git a/packages/mos-gateway/src/mosStatus/handler.ts b/packages/mos-gateway/src/mosStatus/handler.ts new file mode 100644 index 0000000000..596ea78a6b --- /dev/null +++ b/packages/mos-gateway/src/mosStatus/handler.ts @@ -0,0 +1,163 @@ +import { + getMosTypes, + type IMOSItemStatus, + IMOSObjectStatus, + type IMOSStoryStatus, + type MosTypes, + type IMOSDevice, +} from '@mos-connection/connector' +import type { MosDeviceStatusesConfig } from '@sofie-automation/shared-lib/dist/generated/MosGatewayDevicesTypes' +import type { CoreMosDeviceHandler } from '../CoreMosDeviceHandler.js' +import { + assertNever, + type Observer, + PeripheralDevicePubSub, + PeripheralDevicePubSubCollectionsNames, + stringifyError, + SubscriptionId, +} from '@sofie-automation/server-core-integration' +import type { IngestRundownStatus } from '@sofie-automation/shared-lib/dist/ingest/rundownStatus' +import type { RundownId } from '@sofie-automation/shared-lib/dist/core/model/Ids' +import * as winston from 'winston' +import { Queue } from '@sofie-automation/server-core-integration/dist/lib/queue' +import { diffStatuses } from './diff.js' + +export class MosStatusHandler { + readonly #logger: winston.Logger + readonly #mosDevice: IMOSDevice + readonly #coreMosHandler: CoreMosDeviceHandler + readonly #config: MosDeviceStatusesConfig + readonly #mosTypes: MosTypes + + readonly #messageQueue = new Queue() + + #subId: SubscriptionId | undefined + #observer: Observer | undefined + + #destroyed = false + + readonly #lastStatuses = new Map() + + constructor( + logger: winston.Logger, + mosDevice: IMOSDevice, + coreMosHandler: CoreMosDeviceHandler, + config: MosDeviceStatusesConfig, + strictMosTypes: boolean + ) { + if (!config.enabled) throw new Error('MosStatusHandler is not enabled') + + this.#logger = logger + this.#mosDevice = mosDevice + this.#coreMosHandler = coreMosHandler + this.#config = config + this.#mosTypes = getMosTypes(strictMosTypes) + + coreMosHandler.core + .autoSubscribe(PeripheralDevicePubSub.ingestDeviceRundownStatus, coreMosHandler.core.deviceId) + .then((subId) => { + this.#subId = subId + + if (this.#destroyed) coreMosHandler.core.unsubscribe(subId) + }) + .catch((e) => { + this.#logger.error(`Error subscribing to ingestDeviceRundownStatus: ${stringifyError(e)}`) + }) + + // Setup the observer immediately, which will trigger a resync upon the documents being added + this.#observer = coreMosHandler.core.observe(PeripheralDevicePubSubCollectionsNames.ingestRundownStatus) + this.#observer.added = (id) => this.#rundownChanged(id) + this.#observer.changed = (id) => this.#rundownChanged(id) + this.#observer.removed = (id) => this.#rundownChanged(id) + + this.#logger.info(`MosStatusHandler initialized for ${coreMosHandler.core.deviceId}`) + } + + #rundownChanged(id: RundownId): void { + const collection = this.#coreMosHandler.core.getCollection( + PeripheralDevicePubSubCollectionsNames.ingestRundownStatus + ) + + const newStatuses = collection.findOne(id) + const previousStatuses = this.#lastStatuses.get(id) + + // Update the last statuses store + if (newStatuses) { + this.#lastStatuses.set(id, newStatuses) + } else { + this.#lastStatuses.delete(id) + } + + const statusDiff = diffStatuses(this.#config, previousStatuses, newStatuses) + if (statusDiff.length === 0) return + + const diffTime = this.#mosTypes.mosTime.create(Date.now()) + + // Future: should this be done with some concurrency? + for (const status of statusDiff) { + // New implementation 2022 only sends PLAY, never stop, after getting advice from AP + // Reason 1: NRK ENPS "sendt tid" (elapsed time) stopped working in ENPS 8/9 when doing STOP prior to PLAY + // Reason 2: there's a delay between the STOP (yellow line disappears) and PLAY (yellow line re-appears), which annoys the users + if (this.#config.onlySendPlay && status.mosStatus !== IMOSObjectStatus.PLAY) continue + + this.#messageQueue + .putOnQueue(async () => { + if (this.#isDeviceConnected()) { + if (status.type === 'item') { + const newStatus: IMOSItemStatus = { + RunningOrderId: this.#mosTypes.mosString128.create(status.rundownExternalId), + StoryId: this.#mosTypes.mosString128.create(status.storyId), + ID: this.#mosTypes.mosString128.create(status.itemId), + Status: status.mosStatus, + Time: diffTime, + } + this.#logger.info(`Sending Story status: ${JSON.stringify(newStatus)}`) + + // Send status + await this.#mosDevice.sendItemStatus(newStatus) + } else if (status.type === 'story') { + const newStatus: IMOSStoryStatus = { + RunningOrderId: this.#mosTypes.mosString128.create(status.rundownExternalId), + ID: this.#mosTypes.mosString128.create(status.storyId), + Status: status.mosStatus, + Time: diffTime, + } + this.#logger.info(`Sending Story status: ${JSON.stringify(newStatus)}`) + + // Send status + await this.#mosDevice.sendStoryStatus(newStatus) + } else { + this.#logger.debug(`Discarding unknown queued status: ${JSON.stringify(status)}`) + assertNever(status) + } + } else if (this.#config.onlySendPlay) { + // No need to do anything. + this.#logger.info(`Not connected, skipping play status: ${JSON.stringify(status)}`) + } else { + this.#logger.info(`Not connected, discarding status: ${JSON.stringify(status)}`) + } + }) + .catch((e) => { + this.#logger.error( + `Error sending of "${status.rundownExternalId}"-"${ + status.storyId + }" status to MOS device: ${stringifyError(e)}` + ) + }) + } + } + + #isDeviceConnected(): boolean { + return ( + this.#mosDevice.getConnectionStatus().PrimaryConnected || + this.#mosDevice.getConnectionStatus().SecondaryConnected + ) + } + + dispose(): void { + this.#destroyed = true + + this.#observer?.stop() + if (this.#subId) this.#coreMosHandler.core.unsubscribe(this.#subId) + } +} diff --git a/packages/mos-gateway/src/versions.ts b/packages/mos-gateway/src/versions.ts index c0b3293662..63ad2d96d8 100644 --- a/packages/mos-gateway/src/versions.ts +++ b/packages/mos-gateway/src/versions.ts @@ -20,7 +20,7 @@ export function getVersions(logger: Winston.Logger): { [packageName: string]: st } } } catch (e) { - logger.error(e) + logger.error(stringifyError(e)) } return versions } diff --git a/packages/openapi/api/definitions/studios.yaml b/packages/openapi/api/definitions/studios.yaml index b543628d27..1a0bc78ff3 100644 --- a/packages/openapi/api/definitions/studios.yaml +++ b/packages/openapi/api/definitions/studios.yaml @@ -561,6 +561,9 @@ components: allowPieceDirectPlay: type: boolean description: Whether to allow direct playing of a piece in the rundown + rundownGlobalPiecesPrepareTime: + type: number + description: How long before their start time a rundown owned piece be added to the timeline required: - frameRate diff --git a/packages/shared-lib/src/core/model/StudioSettings.ts b/packages/shared-lib/src/core/model/StudioSettings.ts index 09254ba173..1a117f1838 100644 --- a/packages/shared-lib/src/core/model/StudioSettings.ts +++ b/packages/shared-lib/src/core/model/StudioSettings.ts @@ -99,4 +99,9 @@ export interface IStudioSettings { * Override the piece content statuses with fake info - used for developing the UI */ mockPieceContentStatus?: boolean + + /** + * How long before their start time a rundown owned piece be added to the timeline + */ + rundownGlobalPiecesPrepareTime?: number } diff --git a/packages/shared-lib/src/generated/MosGatewayDevicesTypes.ts b/packages/shared-lib/src/generated/MosGatewayDevicesTypes.ts index f192cf7614..b6ebdc5665 100644 --- a/packages/shared-lib/src/generated/MosGatewayDevicesTypes.ts +++ b/packages/shared-lib/src/generated/MosGatewayDevicesTypes.ts @@ -31,4 +31,10 @@ export interface MosDeviceConfig { query: number } } + statuses: MosDeviceStatusesConfig +} +export interface MosDeviceStatusesConfig { + enabled: boolean + sendInRehearsal?: boolean + onlySendPlay?: boolean } diff --git a/packages/shared-lib/src/ingest/rundownStatus.ts b/packages/shared-lib/src/ingest/rundownStatus.ts new file mode 100644 index 0000000000..4e0159a4ca --- /dev/null +++ b/packages/shared-lib/src/ingest/rundownStatus.ts @@ -0,0 +1,47 @@ +import type { RundownId } from '../core/model/Ids.js' + +export interface IngestRundownStatus { + _id: RundownId + + /** Rundown external id */ + externalId: string + + active: IngestRundownActiveStatus + + segments: IngestSegmentStatus[] +} + +export enum IngestRundownActiveStatus { + ACTIVE = 'active', + REHEARSAL = 'rehearsal', + INACTIVE = 'inactive', +} + +export interface IngestSegmentStatus { + /** Segment external id */ + externalId: string + + parts: IngestPartStatus[] +} + +export interface IngestPartStatus { + /** Part external id */ + externalId: string + + isReady: boolean | null + + itemsReady: IngestPartNotifyItemReady[] + + playbackStatus: IngestPartPlaybackStatus +} + +export enum IngestPartPlaybackStatus { + UNKNOWN = 'unknown', + PLAY = 'play', + STOP = 'stop', +} + +export interface IngestPartNotifyItemReady { + externalId: string + ready: boolean +} diff --git a/packages/shared-lib/src/package-manager/publications.ts b/packages/shared-lib/src/package-manager/publications.ts index bd4f0c1d1c..a94c8fd7ea 100644 --- a/packages/shared-lib/src/package-manager/publications.ts +++ b/packages/shared-lib/src/package-manager/publications.ts @@ -1,6 +1,7 @@ import { ExpectedPackage, PackageContainer, PackageContainerOnPackage } from './package.js' import { PeripheralDeviceId, PieceInstanceId, RundownId, RundownPlaylistId } from '../core/model/Ids.js' import { ProtectedString } from '../lib/protectedString.js' +import { ReadonlyDeep } from 'type-fest' export interface PackageManagerPlayoutContext { _id: PeripheralDeviceId @@ -27,7 +28,7 @@ export interface PackageManagerPackageContainers { export type PackageManagerExpectedPackageId = ProtectedString<'PackageManagerExpectedPackage'> -export type PackageManagerExpectedPackageBase = ExpectedPackage.Base & { rundownId?: RundownId } +export type PackageManagerExpectedPackageBase = ReadonlyDeep export interface PackageManagerExpectedPackage { /** Unique id of the expectedPackage */ diff --git a/packages/shared-lib/src/peripheralDevice/peripheralDeviceAPI.ts b/packages/shared-lib/src/peripheralDevice/peripheralDeviceAPI.ts index 294b928420..e68b3c72fa 100644 --- a/packages/shared-lib/src/peripheralDevice/peripheralDeviceAPI.ts +++ b/packages/shared-lib/src/peripheralDevice/peripheralDeviceAPI.ts @@ -26,7 +26,6 @@ export type PiecePlaybackStoppedResult = PiecePlaybackStartedResult export interface TriggerRegenerationCallbackData { rundownPlaylistId: RundownPlaylistId - // partInstanceId: PartInstanceId regenerationToken: string } diff --git a/packages/shared-lib/src/pubsub/peripheralDevice.ts b/packages/shared-lib/src/pubsub/peripheralDevice.ts index 421c191a8b..c4eb440d88 100644 --- a/packages/shared-lib/src/pubsub/peripheralDevice.ts +++ b/packages/shared-lib/src/pubsub/peripheralDevice.ts @@ -10,6 +10,7 @@ import { PeripheralDeviceId, RundownId, RundownPlaylistId } from '../core/model/ import { PeripheralDeviceCommand } from '../core/model/PeripheralDeviceCommand.js' import { ExpectedPlayoutItemPeripheralDevice } from '../expectedPlayoutItem.js' import { DeviceTriggerMountedAction, PreviewWrappedAdLib } from '../input-gateway/deviceTriggerPreviews.js' +import type { IngestRundownStatus } from '../ingest/rundownStatus.js' /** * Ids of possible DDP subscriptions for any PeripheralDevice. @@ -51,6 +52,13 @@ export enum PeripheralDevicePubSub { packageManagerPackageContainers = 'packageManagerPackageContainers', /** Package manager: The expected packages in the Studio of the PeripheralDevice */ packageManagerExpectedPackages = 'packageManagerExpectedPackages', + + // Ingest gateway: + + /** + * Ingest status of rundowns for a PeripheralDevice + */ + ingestDeviceRundownStatus = 'ingestDeviceRundownStatus', } /** @@ -114,6 +122,11 @@ export interface PeripheralDevicePubSubTypes { filterPlayoutDeviceIds: PeripheralDeviceId[] | undefined, token?: string ) => PeripheralDevicePubSubCollectionsNames.packageManagerExpectedPackages + + [PeripheralDevicePubSub.ingestDeviceRundownStatus]: ( + deviceId: PeripheralDeviceId, + token?: string + ) => PeripheralDevicePubSubCollectionsNames.ingestRundownStatus } export enum PeripheralDevicePubSubCollectionsNames { @@ -134,6 +147,8 @@ export enum PeripheralDevicePubSubCollectionsNames { packageManagerPlayoutContext = 'packageManagerPlayoutContext', packageManagerPackageContainers = 'packageManagerPackageContainers', packageManagerExpectedPackages = 'packageManagerExpectedPackages', + + ingestRundownStatus = 'ingestRundownStatus', } export type PeripheralDevicePubSubCollections = { @@ -154,4 +169,6 @@ export type PeripheralDevicePubSubCollections = { [PeripheralDevicePubSubCollectionsNames.packageManagerPlayoutContext]: PackageManagerPlayoutContext [PeripheralDevicePubSubCollectionsNames.packageManagerPackageContainers]: PackageManagerPackageContainers [PeripheralDevicePubSubCollectionsNames.packageManagerExpectedPackages]: PackageManagerExpectedPackage + + [PeripheralDevicePubSubCollectionsNames.ingestRundownStatus]: IngestRundownStatus } diff --git a/packages/webui/package.json b/packages/webui/package.json index e8aeb74436..12c85c488c 100644 --- a/packages/webui/package.json +++ b/packages/webui/package.json @@ -14,7 +14,7 @@ }, "homepage": "https://github.com/nrkno/sofie-core/blob/master/packages/webui#readme", "scripts": { - "dev": "vite --port=3005", + "dev": "vite --port=3005 --force", "build": "tsc -b && vite build", "build:main": "tsc -p tsconfig.app.json --noEmit", "check-types": "tsc -p tsconfig.app.json --noEmit", diff --git a/packages/webui/public/dev/fakeThumbnail.png b/packages/webui/public/dev/fakeThumbnail.png new file mode 100644 index 0000000000..669e7837dd Binary files /dev/null and b/packages/webui/public/dev/fakeThumbnail.png differ diff --git a/packages/webui/src/client/lib/RundownResolver.ts b/packages/webui/src/client/lib/RundownResolver.ts index cb2d0c1681..d0721ec33e 100644 --- a/packages/webui/src/client/lib/RundownResolver.ts +++ b/packages/webui/src/client/lib/RundownResolver.ts @@ -62,7 +62,7 @@ function fetchPiecesThatMayBeActiveForPart( segmentsToReceiveOnRundownEndFromSet: Set, rundownsToReceiveOnShowStyleEndFrom: RundownId[], /** Map of Pieces on Parts, passed through for performance */ - allPiecesCache?: Map + allPiecesCache?: Map ): Piece[] { let piecesStartingInPart: Piece[] const allPieces = allPiecesCache?.get(part._id) @@ -131,7 +131,7 @@ export function getPieceInstancesForPartInstance( currentPartInstancePieceInstances: PieceInstance[] | undefined, allowTestingAdlibsToPersist: boolean, /** Map of Pieces on Parts, passed through for performance */ - allPiecesCache?: Map, + allPiecesCache?: Map, options?: FindOptions, pieceInstanceSimulation?: boolean ): PieceInstance[] { diff --git a/packages/webui/src/client/lib/VirtualElement.tsx b/packages/webui/src/client/lib/VirtualElement.tsx index 1b825b7292..77111cf5f0 100644 --- a/packages/webui/src/client/lib/VirtualElement.tsx +++ b/packages/webui/src/client/lib/VirtualElement.tsx @@ -1,5 +1,6 @@ -import React, { useCallback, useEffect, useLayoutEffect, useMemo, useState } from 'react' +import React, { useCallback, useEffect, useMemo, useState, useRef } from 'react' import { InView } from 'react-intersection-observer' +import { getViewPortScrollingState } from './viewPort.js' interface IElementMeasurements { width: string | number @@ -11,12 +12,12 @@ interface IElementMeasurements { id: string | undefined } -const OPTIMIZE_PERIOD = 5000 const IDLE_CALLBACK_TIMEOUT = 100 /** * This is a component that allows optimizing the amount of elements present in the DOM through replacing them * with placeholders when they aren't visible in the viewport. + * Scroll timing issues, should be handled in viewPort.tsx where the scrolling state is tracked. * * @export * @param {(React.PropsWithChildren<{ @@ -40,6 +41,7 @@ const IDLE_CALLBACK_TIMEOUT = 100 * } * @return {*} {(JSX.Element | null)} */ + export function VirtualElement({ initialShow, placeholderHeight, @@ -59,89 +61,272 @@ export function VirtualElement({ id?: string | undefined className?: string }>): JSX.Element | null { + const resizeObserverManager = ElementObserverManager.getInstance() const [inView, setInView] = useState(initialShow ?? false) + const [waitForInitialLoad, setWaitForInitialLoad] = useState(true) const [isShowingChildren, setIsShowingChildren] = useState(inView) + const [measurements, setMeasurements] = useState(null) const [ref, setRef] = useState(null) - const [childRef, setChildRef] = useState(null) - const isMeasured = !!measurements + // Timers for visibility changes: + const scrollTimeoutRef = useRef | undefined>(undefined) + const inViewChangeTimerRef = useRef | undefined>(undefined) + const skipInitialrunRef = useRef(true) + const isTransitioning = useRef(false) + + const isCurrentlyObserving = useRef(false) const styleObj = useMemo( () => ({ - width: width ?? measurements?.width ?? 'auto', - height: (measurements?.clientHeight ?? placeholderHeight ?? '0') + 'px', - marginTop: measurements?.marginTop, - marginLeft: measurements?.marginLeft, - marginRight: measurements?.marginRight, - marginBottom: measurements?.marginBottom, + width: width ?? 'auto', + height: ((placeholderHeight || ref?.clientHeight) ?? '0') + 'px', + marginTop: 0, + marginLeft: 0, + marginRight: 0, + marginBottom: 0, + // These properties are used to ensure that if a prior element is changed from + // placeHolder to element, the position of visible elements are not affected. + contentVisibility: 'auto', + containIntrinsicSize: `0 ${(placeholderHeight || ref?.clientHeight) ?? '0'}px`, + contain: 'size layout', }), - [width, measurements, placeholderHeight] + [width, placeholderHeight] ) - const onVisibleChanged = useCallback((visible: boolean) => { - setInView(visible) - }, []) + const handleResize = useCallback(() => { + if (ref) { + // Show children during measurement + setIsShowingChildren(true) + + requestAnimationFrame(() => { + const measurements = measureElement(ref, placeholderHeight) + if (measurements) { + setMeasurements(measurements) + + // Only hide children again if not in view + if (!inView && measurements.clientHeight > 0) { + setIsShowingChildren(false) + } else { + setIsShowingChildren(true) + } + } + }) + } + }, [ref, inView, placeholderHeight]) + // failsafe to ensure visible elements if resizing happens while scrolling useEffect(() => { - if (inView === true) { + if (!isShowingChildren) { + const checkVisibilityByPosition = () => { + if (ref) { + const rect = ref.getBoundingClientRect() + const isInViewport = rect.top < window.innerHeight && rect.bottom > 0 + + if (isInViewport) { + setIsShowingChildren(true) + setInView(true) + } + } + } + + // Check every second + const positionCheckInterval = setInterval(checkVisibilityByPosition, 1000) + + return () => { + clearInterval(positionCheckInterval) + } + } + }, [ref, isShowingChildren]) + + // Ensure elements are visible after a fast scroll: + useEffect(() => { + const checkVisibilityOnScroll = () => { + if (inView && !isShowingChildren) { + setIsShowingChildren(true) + } + + // Add a check after scroll stops + if (scrollTimeoutRef.current) { + clearTimeout(scrollTimeoutRef.current) + } + scrollTimeoutRef.current = setTimeout(() => { + // Recheck visibility after scroll appears to have stopped + if (inView && !isShowingChildren) { + setIsShowingChildren(true) + } + }, 200) + } + + window.addEventListener('scroll', checkVisibilityOnScroll, { passive: true }) + + return () => { + window.removeEventListener('scroll', checkVisibilityOnScroll) + if (scrollTimeoutRef.current) { + clearTimeout(scrollTimeoutRef.current) + } + } + }, [inView, isShowingChildren]) + + useEffect(() => { + if (inView) { setIsShowingChildren(true) + } + + // Startup skip: + if (skipInitialrunRef.current) { + skipInitialrunRef.current = false return } - let idleCallback: number | undefined - const optimizeTimeout = window.setTimeout(() => { - idleCallback = window.requestIdleCallback( - () => { - if (childRef) { - setMeasurements(measureElement(childRef)) + if (isTransitioning.current) { + return + } + + isTransitioning.current = true + + // Clear any existing timers + if (inViewChangeTimerRef.current) { + clearTimeout(inViewChangeTimerRef.current) + inViewChangeTimerRef.current = undefined + } + + // Delay the visibility change to avoid flickering + // But low enough for scrolling to be responsive + inViewChangeTimerRef.current = setTimeout(() => { + try { + if (inView) { + if (ref) { + if (!isCurrentlyObserving.current) { + resizeObserverManager.observe(ref, handleResize) + isCurrentlyObserving.current = true + } + } + } else { + if (ref && isCurrentlyObserving.current) { + resizeObserverManager.unobserve(ref) + isCurrentlyObserving.current = false } setIsShowingChildren(false) - }, - { - timeout: IDLE_CALLBACK_TIMEOUT, } - ) - }, OPTIMIZE_PERIOD) + } catch (error) { + console.error('Error in visibility change handler:', error) + } finally { + isTransitioning.current = false + inViewChangeTimerRef.current = undefined + } + }, 100) + }, [inView, ref, handleResize, resizeObserverManager]) - return () => { - if (idleCallback) { - window.cancelIdleCallback(idleCallback) + const onVisibleChanged = useCallback( + (visible: boolean) => { + // Only update state if there's a change + if (inView !== visible) { + setInView(visible) } + }, + [inView] + ) - window.clearTimeout(optimizeTimeout) + const isScrolling = (): boolean => { + // Don't do updates while scrolling: + if (getViewPortScrollingState().isProgrammaticScrollInProgress) { + return true } - }, [childRef, inView]) + // And wait if a programmatic scroll was done recently: + const timeSinceLastProgrammaticScroll = Date.now() - getViewPortScrollingState().lastProgrammaticScrollTime + if (timeSinceLastProgrammaticScroll < 100) { + return true + } + return false + } - const showPlaceholder = !isShowingChildren && (!initialShow || isMeasured) + useEffect(() => { + // Setup initial observer if element is in view + if (ref && inView && !isCurrentlyObserving.current) { + resizeObserverManager.observe(ref, handleResize) + isCurrentlyObserving.current = true + } + + // Cleanup function + return () => { + // Clean up resize observer + if (ref && isCurrentlyObserving.current) { + resizeObserverManager.unobserve(ref) + isCurrentlyObserving.current = false + } + + if (inViewChangeTimerRef.current) { + clearTimeout(inViewChangeTimerRef.current) + } + } + }, [ref, inView, handleResize]) - useLayoutEffect(() => { - if (!ref || showPlaceholder) return + useEffect(() => { + if (inView === true) { + setIsShowingChildren(true) - const el = ref?.firstElementChild - if (!el || el.classList.contains('virtual-element-placeholder') || !(el instanceof HTMLElement)) return + // Schedule a measurement after a short delay + if (waitForInitialLoad && ref) { + const initialMeasurementTimeout = window.setTimeout(() => { + const measurements = measureElement(ref, placeholderHeight) + if (measurements) { + setMeasurements(measurements) + setWaitForInitialLoad(false) + } + }, 800) - setChildRef(el) + return () => { + window.clearTimeout(initialMeasurementTimeout) + } + } + return + } let idleCallback: number | undefined - const refreshSizingTimeout = window.setTimeout(() => { + let optimizeTimeout: number | undefined + + const scheduleOptimization = () => { + if (optimizeTimeout) { + window.clearTimeout(optimizeTimeout) + } + // Don't proceed if we're scrolling + if (isScrolling()) { + // Reschedule for after the scroll should be complete + const scrollDelay = 400 + window.clearTimeout(optimizeTimeout) + optimizeTimeout = window.setTimeout(scheduleOptimization, scrollDelay) + return + } idleCallback = window.requestIdleCallback( () => { - setMeasurements(measureElement(el)) + // Measure the entire wrapper element instead of just the childRef + if (ref) { + const measurements = measureElement(ref, placeholderHeight) + if (measurements) { + setMeasurements(measurements) + } + } + setIsShowingChildren(false) }, { timeout: IDLE_CALLBACK_TIMEOUT, } ) - }, 1000) + } + + // Schedule the optimization: + scheduleOptimization() return () => { if (idleCallback) { window.cancelIdleCallback(idleCallback) } - window.clearTimeout(refreshSizingTimeout) + if (optimizeTimeout) { + window.clearTimeout(optimizeTimeout) + } } - }, [ref, showPlaceholder]) + }, [ref, inView, placeholderHeight]) return ( -

- {showPlaceholder ? ( +
+ {!isShowingChildren ? (
) } +function measureElement(wrapperEl: HTMLDivElement, placeholderHeight?: number): IElementMeasurements | null { + if (!wrapperEl || !wrapperEl.firstElementChild) { + return null + } -function measureElement(el: HTMLElement): IElementMeasurements | null { + const el = wrapperEl.firstElementChild as HTMLElement const style = window.getComputedStyle(el) - const clientRect = el.getBoundingClientRect() + let segmentTimeline: Element | null = null + let dashboardPanel: Element | null = null + + segmentTimeline = wrapperEl.querySelector('.segment-timeline') + dashboardPanel = wrapperEl.querySelector('.dashboard-panel') + + if (segmentTimeline) { + const segmentRect = segmentTimeline.getBoundingClientRect() + let totalHeight = segmentRect.height + + if (dashboardPanel) { + const panelRect = dashboardPanel.getBoundingClientRect() + totalHeight += panelRect.height + } + + if (totalHeight < 40) { + totalHeight = placeholderHeight ?? el.clientHeight + } + + return { + width: style.width || 'auto', + clientHeight: totalHeight, + marginTop: style.marginTop || undefined, + marginBottom: style.marginBottom || undefined, + marginLeft: style.marginLeft || undefined, + marginRight: style.marginRight || undefined, + id: el.id, + } + } + + // Fallback to just measuring the element itself if wrapper isn't found return { width: style.width || 'auto', - clientHeight: clientRect.height, + clientHeight: placeholderHeight ?? el.clientHeight, marginTop: style.marginTop || undefined, marginBottom: style.marginBottom || undefined, marginLeft: style.marginLeft || undefined, @@ -180,3 +409,88 @@ function measureElement(el: HTMLElement): IElementMeasurements | null { id: el.id, } } + +// Singleton class to manage ResizeObserver instances +export class ElementObserverManager { + private static instance: ElementObserverManager + private resizeObserver: ResizeObserver + private mutationObserver: MutationObserver + private observedElements: Map void> + + private constructor() { + this.observedElements = new Map() + + // Configure ResizeObserver + this.resizeObserver = new ResizeObserver((entries) => { + entries.forEach((entry) => { + const element = entry.target as HTMLElement + const callback = this.observedElements.get(element) + if (callback) { + callback() + } + }) + }) + + // Configure MutationObserver + this.mutationObserver = new MutationObserver((mutations) => { + const targets = new Set() + + mutations.forEach((mutation) => { + const target = mutation.target as HTMLElement + // Find the closest observed element + let element = target + while (element) { + if (this.observedElements.has(element)) { + targets.add(element) + break + } + if (!element.parentElement) break + element = element.parentElement + } + }) + + // Call callbacks for affected elements + targets.forEach((element) => { + const callback = this.observedElements.get(element) + if (callback) callback() + }) + }) + } + + public static getInstance(): ElementObserverManager { + if (!ElementObserverManager.instance) { + ElementObserverManager.instance = new ElementObserverManager() + } + return ElementObserverManager.instance + } + + public observe(element: HTMLElement, callback: () => void): void { + if (!element) return + + this.observedElements.set(element, callback) + this.resizeObserver.observe(element) + this.mutationObserver.observe(element, { + childList: true, + subtree: true, + attributes: true, + characterData: true, + }) + } + + public unobserve(element: HTMLElement): void { + if (!element) return + this.observedElements.delete(element) + this.resizeObserver.unobserve(element) + + // Disconnect and reconnect mutation observer to refresh the list of observed elements + this.mutationObserver.disconnect() + this.observedElements.forEach((_, el) => { + this.mutationObserver.observe(el, { + childList: true, + subtree: true, + attributes: true, + characterData: true, + }) + }) + } +} diff --git a/packages/webui/src/client/lib/rundown.ts b/packages/webui/src/client/lib/rundown.ts index b8208b18c9..2f70fbd347 100644 --- a/packages/webui/src/client/lib/rundown.ts +++ b/packages/webui/src/client/lib/rundown.ts @@ -26,6 +26,7 @@ import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/Rund import { literal, protectString, groupByToMap } from './tempLib.js' import { getCurrentTime } from './systemTime.js' import { + createPartCurrentTimes, processAndPrunePieceInstanceTimings, resolvePrunedPieceInstance, } from '@sofie-automation/corelib/dist/playout/processAndPrune' @@ -503,19 +504,20 @@ export namespace RundownUtils { pieceInstanceSimulation ) - const partStarted = partE.instance.timings?.plannedStartedPlayback - const nowInPart = partStarted ? getCurrentTime() - partStarted : 0 - + const partTimes = createPartCurrentTimes( + getCurrentTime(), + partE.instance.timings?.plannedStartedPlayback + ) const preprocessedPieces = processAndPrunePieceInstanceTimings( showStyleBase.sourceLayers, rawPieceInstances, - nowInPart, + partTimes, includeDisabledPieces ) // insert items into the timeline for resolution partE.pieces = preprocessedPieces.map((piece) => { - const resolvedPiece = resolvePrunedPieceInstance(nowInPart, piece) + const resolvedPiece = resolvePrunedPieceInstance(partTimes, piece) const resPiece: PieceExtended = { instance: piece, renderedDuration: resolvedPiece.resolvedDuration ?? null, diff --git a/packages/webui/src/client/lib/rundownLayouts.ts b/packages/webui/src/client/lib/rundownLayouts.ts index fffbe01be4..ecee8fa8d8 100644 --- a/packages/webui/src/client/lib/rundownLayouts.ts +++ b/packages/webui/src/client/lib/rundownLayouts.ts @@ -4,7 +4,10 @@ import { RundownPlaylistActivationId, StudioId, } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { processAndPrunePieceInstanceTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' +import { + createPartCurrentTimes, + processAndPrunePieceInstanceTimings, +} from '@sofie-automation/corelib/dist/playout/processAndPrune' import { UIShowStyleBase } from '@sofie-automation/meteor-lib/dist/api/showStyles' import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { @@ -138,13 +141,11 @@ export function getUnfinishedPieceInstancesReactive( playlistActivationId: playlistActivationId, }).fetch() - const nowInPart = partInstance.timings?.plannedStartedPlayback - ? now - partInstance.timings.plannedStartedPlayback - : 0 + const partTimes = createPartCurrentTimes(now, partInstance.timings?.plannedStartedPlayback) prospectivePieces = processAndPrunePieceInstanceTimings( showStyleBase.sourceLayers, prospectivePieces, - nowInPart + partTimes ) let nearestEnd = Number.POSITIVE_INFINITY diff --git a/packages/webui/src/client/lib/rundownPlaylistUtil.ts b/packages/webui/src/client/lib/rundownPlaylistUtil.ts index 62c20d4539..6164a21c17 100644 --- a/packages/webui/src/client/lib/rundownPlaylistUtil.ts +++ b/packages/webui/src/client/lib/rundownPlaylistUtil.ts @@ -164,7 +164,7 @@ export class RundownPlaylistClientUtil { static getPiecesForParts( parts: Array, piecesOptions?: Omit, 'projection'> // We are mangling fields, so block projection - ): Map { + ): Map { const allPieces = Pieces.find( { startPartId: { $in: parts } }, { diff --git a/packages/webui/src/client/lib/shelf.ts b/packages/webui/src/client/lib/shelf.ts index 0df8b5d75c..a79cfd301d 100644 --- a/packages/webui/src/client/lib/shelf.ts +++ b/packages/webui/src/client/lib/shelf.ts @@ -3,13 +3,17 @@ import { PartInstance } from '@sofie-automation/meteor-lib/dist/collections/Part import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' -import { processAndPrunePieceInstanceTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' +import { + createPartCurrentTimes, + processAndPrunePieceInstanceTimings, +} from '@sofie-automation/corelib/dist/playout/processAndPrune' import { getUnfinishedPieceInstancesReactive } from './rundownLayouts.js' import { UIShowStyleBase } from '@sofie-automation/meteor-lib/dist/api/showStyles' import { PieceId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { PieceInstances } from '../collections/index.js' import { ReadonlyDeep } from 'type-fest' import { AdLibPieceUi } from '@sofie-automation/meteor-lib/dist/uiTypes/Adlib' +import { getCurrentTimeReactive } from './currentTimeReactive.js' export type { AdLibPieceUi } from '@sofie-automation/meteor-lib/dist/uiTypes/Adlib' @@ -60,10 +64,11 @@ export function getNextPiecesReactive( }).fetch() } + const partTimes = createPartCurrentTimes(getCurrentTimeReactive(), null) prospectivePieceInstances = processAndPrunePieceInstanceTimings( showsStyleBase.sourceLayers, prospectivePieceInstances, - 0 + partTimes ) return prospectivePieceInstances diff --git a/packages/webui/src/client/lib/ui/pieceUiClassNames.ts b/packages/webui/src/client/lib/ui/pieceUiClassNames.ts index 6ccad63c82..2b1149dbd0 100644 --- a/packages/webui/src/client/lib/ui/pieceUiClassNames.ts +++ b/packages/webui/src/client/lib/ui/pieceUiClassNames.ts @@ -32,10 +32,12 @@ export function pieceUiClassNames( : undefined, 'super-infinite': + !innerPiece.enable.isAbsolute && innerPiece.lifespan !== PieceLifespan.WithinPart && innerPiece.lifespan !== PieceLifespan.OutOnSegmentChange && innerPiece.lifespan !== PieceLifespan.OutOnSegmentEnd, 'infinite-starts': + !innerPiece.enable.isAbsolute && innerPiece.lifespan !== PieceLifespan.WithinPart && innerPiece.lifespan !== PieceLifespan.OutOnSegmentChange && innerPiece.lifespan !== PieceLifespan.OutOnSegmentEnd && diff --git a/packages/webui/src/client/lib/viewPort.ts b/packages/webui/src/client/lib/viewPort.ts index cb03a8102b..a9d72f1538 100644 --- a/packages/webui/src/client/lib/viewPort.ts +++ b/packages/webui/src/client/lib/viewPort.ts @@ -10,8 +10,24 @@ import { parse as queryStringParse } from 'query-string' const HEADER_MARGIN = 24 // TODOSYNC: TV2 uses 15. If it's needed to be different, it needs to be made generic somehow.. const FALLBACK_HEADER_HEIGHT = 65 -let focusInterval: NodeJS.Timeout | undefined -let _dontClearInterval = false +// Replace the global variable with a more structured approach +const focusState = { + interval: undefined as NodeJS.Timeout | undefined, + isScrolling: false, + startTime: 0, +} + +const viewPortScrollingState = { + isProgrammaticScrollInProgress: false, + lastProgrammaticScrollTime: 0, +} + +export function getViewPortScrollingState(): { + isProgrammaticScrollInProgress: boolean + lastProgrammaticScrollTime: number +} { + return viewPortScrollingState +} export function maintainFocusOnPartInstance( partInstanceId: PartInstanceId, @@ -19,32 +35,47 @@ export function maintainFocusOnPartInstance( forceScroll?: boolean, noAnimation?: boolean ): void { - const startTime = Date.now() - const focus = () => { - if (Date.now() - startTime < timeWindow) { - _dontClearInterval = true - scrollToPartInstance(partInstanceId, forceScroll, noAnimation) - .then(() => { - _dontClearInterval = false - }) - .catch(() => { - _dontClearInterval = false - }) - } else { + focusState.startTime = Date.now() + + const focus = async () => { + // Only proceed if we're not already scrolling and within the time window + if (!focusState.isScrolling && Date.now() - focusState.startTime < timeWindow) { + focusState.isScrolling = true + + try { + await scrollToPartInstance(partInstanceId, forceScroll, noAnimation) + } catch (_error) { + // Handle error if needed + } finally { + focusState.isScrolling = false + } + } else if (Date.now() - focusState.startTime >= timeWindow) { quitFocusOnPart() } } + document.addEventListener('wheel', onWheelWhenMaintainingFocus, { once: true, capture: true, passive: true, }) - focusInterval = setInterval(focus, 500) + + // Clear any existing interval before creating a new one + if (focusState.interval) { + clearInterval(focusState.interval) + } + focus() + .then(() => { + focusState.interval = setInterval(focus, 500) + }) + .catch(() => { + // Handle error if needed + }) } export function isMaintainingFocus(): boolean { - return !!focusInterval + return !!focusState.interval } function onWheelWhenMaintainingFocus() { @@ -55,9 +86,10 @@ function quitFocusOnPart() { document.removeEventListener('wheel', onWheelWhenMaintainingFocus, { capture: true, }) - if (!_dontClearInterval && focusInterval) { - clearInterval(focusInterval) - focusInterval = undefined + + if (focusState.interval) { + clearInterval(focusState.interval) + focusState.interval = undefined } } @@ -69,11 +101,7 @@ export async function scrollToPartInstance( quitFocusOnPart() const partInstance = UIPartInstances.findOne(partInstanceId) if (partInstance) { - RundownViewEventBus.emit(RundownViewEvents.GO_TO_PART_INSTANCE, { - segmentId: partInstance.segmentId, - partInstanceId: partInstanceId, - }) - return scrollToSegment(partInstance.segmentId, forceScroll, noAnimation, partInstanceId) + return scrollToSegment(partInstance.segmentId, forceScroll, noAnimation) } throw new Error('Could not find PartInstance') } @@ -126,39 +154,10 @@ let currentScrollingElement: HTMLElement | undefined export async function scrollToSegment( elementToScrollToOrSegmentId: HTMLElement | SegmentId, forceScroll?: boolean, - noAnimation?: boolean, - partInstanceId?: PartInstanceId + noAnimation?: boolean ): Promise { - const getElementToScrollTo = (showHistory: boolean): HTMLElement | null => { - if (isProtectedString(elementToScrollToOrSegmentId)) { - let targetElement = document.querySelector( - `#${SEGMENT_TIMELINE_ELEMENT_ID}${elementToScrollToOrSegmentId}` - ) - - if (showHistory && Settings.followOnAirSegmentsHistory && targetElement) { - let i = Settings.followOnAirSegmentsHistory - while (i > 0) { - // Segment timeline is wrapped by
...
when rendered - const next: any = targetElement?.parentElement?.parentElement?.previousElementSibling?.children - .item(0) - ?.children.item(0) - if (next) { - targetElement = next - i-- - } else { - i = 0 - } - } - } - - return targetElement - } - - return elementToScrollToOrSegmentId - } - - const elementToScrollTo: HTMLElement | null = getElementToScrollTo(false) - const historyTarget: HTMLElement | null = getElementToScrollTo(true) + const elementToScrollTo: HTMLElement | null = getElementToScrollTo(elementToScrollToOrSegmentId, false) + const historyTarget: HTMLElement | null = getElementToScrollTo(elementToScrollToOrSegmentId, true) // historyTarget will be === to elementToScrollTo if history is not used / not found if (!elementToScrollTo || !historyTarget) { @@ -169,24 +168,71 @@ export async function scrollToSegment( historyTarget, forceScroll || !regionInViewport(historyTarget, elementToScrollTo), noAnimation, - false, - partInstanceId + false ) } +function getElementToScrollTo( + elementToScrollToOrSegmentId: HTMLElement | SegmentId, + showHistory: boolean +): HTMLElement | null { + if (isProtectedString(elementToScrollToOrSegmentId)) { + // Get the current segment element + let targetElement = document.querySelector( + `#${SEGMENT_TIMELINE_ELEMENT_ID}${elementToScrollToOrSegmentId}` + ) + if (showHistory && Settings.followOnAirSegmentsHistory && targetElement) { + let i = Settings.followOnAirSegmentsHistory + + // Find previous segments + while (i > 0 && targetElement) { + const currentSegmentId = targetElement.id + const allSegments = Array.from(document.querySelectorAll(`[id^="${SEGMENT_TIMELINE_ELEMENT_ID}"]`)) + + // Find current segment's index in the array of all segments + const currentIndex = allSegments.findIndex((el) => el.id === currentSegmentId) + + // Find the previous segment + if (currentIndex > 0) { + targetElement = allSegments[currentIndex - 1] as HTMLElement + i-- + } else { + // No more previous segments + break + } + } + } + + return targetElement + } + + return elementToScrollToOrSegmentId +} + +let pendingFirstStageTimeout: NodeJS.Timeout | undefined + async function innerScrollToSegment( elementToScrollTo: HTMLElement, forceScroll?: boolean, noAnimation?: boolean, - secondStage?: boolean, - partInstanceId?: PartInstanceId + secondStage?: boolean ): Promise { if (!secondStage) { + if (pendingFirstStageTimeout) { + clearTimeout(pendingFirstStageTimeout) + pendingFirstStageTimeout = undefined + } currentScrollingElement = elementToScrollTo } else if (secondStage && elementToScrollTo !== currentScrollingElement) { throw new Error('Scroll overriden by another scroll') } + // Ensure that the element is ready to be scrolled: + if (!secondStage) { + await new Promise((resolve) => setTimeout(resolve, 100)) + } + await new Promise((resolve) => requestAnimationFrame(resolve)) + let { top, bottom } = elementToScrollTo.getBoundingClientRect() top = Math.floor(top) bottom = Math.floor(bottom) @@ -199,36 +245,25 @@ async function innerScrollToSegment( return scrollToPosition(top + window.scrollY, noAnimation).then( async () => { - // retry scroll in case we have to load some data - if (pendingSecondStageScroll) window.cancelIdleCallback(pendingSecondStageScroll) return new Promise((resolve, reject) => { - // scrollToPosition will resolve after some time, at which point a new pendingSecondStageScroll may have been created - - pendingSecondStageScroll = window.requestIdleCallback( - () => { - if (!secondStage) { - let { top, bottom } = elementToScrollTo.getBoundingClientRect() - top = Math.floor(top) - bottom = Math.floor(bottom) - - if (bottom > Math.floor(window.innerHeight) || top < headerHeight) { - innerScrollToSegment( - elementToScrollTo, - forceScroll, - true, - true, - partInstanceId - ).then(resolve, reject) - } else { - resolve(true) - } + if (!secondStage) { + // Wait to settle 1 atemt to scroll + pendingFirstStageTimeout = setTimeout(() => { + pendingFirstStageTimeout = undefined + let { top, bottom } = elementToScrollTo.getBoundingClientRect() + top = Math.floor(top) + bottom = Math.floor(bottom) + if (bottom > Math.floor(window.innerHeight) || top < headerHeight) { + // If not in place atempt to scroll again + innerScrollToSegment(elementToScrollTo, forceScroll, true, true).then(resolve, reject) } else { - currentScrollingElement = undefined resolve(true) } - }, - { timeout: 250 } - ) + }, 420) + } else { + currentScrollingElement = undefined + resolve(true) + } }) }, (error) => { @@ -258,44 +293,29 @@ function getRegionPosition(topElement: HTMLElement, bottomElement: HTMLElement): return { top, bottom } } -let scrollToPositionRequest: number | undefined -let scrollToPositionRequestReject: ((reason?: any) => void) | undefined - export async function scrollToPosition(scrollPosition: number, noAnimation?: boolean): Promise { + // Calculate the exact position + const headerOffset = getHeaderHeight() + HEADER_MARGIN + const targetTop = Math.max(0, scrollPosition - headerOffset) + if (noAnimation) { window.scroll({ - top: Math.max(0, scrollPosition - getHeaderHeight() - HEADER_MARGIN), + top: targetTop, left: 0, + behavior: 'instant', }) return Promise.resolve() } else { - return new Promise((resolve, reject) => { - if (scrollToPositionRequest !== undefined) window.cancelIdleCallback(scrollToPositionRequest) - if (scrollToPositionRequestReject !== undefined) - scrollToPositionRequestReject('Prevented by another scroll') - - scrollToPositionRequestReject = reject - const currentTop = window.scrollY - const targetTop = Math.max(0, scrollPosition - getHeaderHeight() - HEADER_MARGIN) - scrollToPositionRequest = window.requestIdleCallback( - () => { - window.scroll({ - top: targetTop, - left: 0, - behavior: 'smooth', - }) - setTimeout( - () => { - resolve() - scrollToPositionRequestReject = undefined - // this formula was experimentally created from Chrome 86 behavior - }, - 3000 * Math.log(Math.abs(currentTop - targetTop) / 2000 + 1) - ) - }, - { timeout: 250 } - ) + viewPortScrollingState.isProgrammaticScrollInProgress = true + viewPortScrollingState.lastProgrammaticScrollTime = Date.now() + + window.scroll({ + top: targetTop, + left: 0, + behavior: 'smooth', }) + await new Promise((resolve) => setTimeout(resolve, 300)) + viewPortScrollingState.isProgrammaticScrollInProgress = false } } diff --git a/packages/webui/src/client/styles/shelf/dashboard-rundownView.scss b/packages/webui/src/client/styles/shelf/dashboard-rundownView.scss index 2f4e5c328a..6e0e1eff21 100644 --- a/packages/webui/src/client/styles/shelf/dashboard-rundownView.scss +++ b/packages/webui/src/client/styles/shelf/dashboard-rundownView.scss @@ -11,6 +11,9 @@ } .dashboard-panel__panel__button { + margin-top: 10px; + height: 110px; + max-width: 170px !important; > .dashboard-panel__panel__button__content { display: grid; grid-template-columns: 1fr min-content; @@ -31,7 +34,7 @@ > .dashboard-panel__panel__button__thumbnail { position: relative; - height: auto; + height: 85px; z-index: 1; overflow: hidden; grid-column: auto / span 2; diff --git a/packages/webui/src/client/ui/ClipTrimPanel/ClipTrimDialog.tsx b/packages/webui/src/client/ui/ClipTrimPanel/ClipTrimDialog.tsx index 829e4b4518..efff6d8649 100644 --- a/packages/webui/src/client/ui/ClipTrimPanel/ClipTrimDialog.tsx +++ b/packages/webui/src/client/ui/ClipTrimPanel/ClipTrimDialog.tsx @@ -56,6 +56,9 @@ export function ClipTrimDialog({ const handleAccept = useCallback((e: SomeEvent) => { onClose?.() + const startPartId = selectedPiece.startPartId + if (!startPartId) return + doUserAction( t, e, @@ -65,7 +68,7 @@ export function ClipTrimDialog({ e, ts, playlistId, - selectedPiece.startPartId, + startPartId, selectedPiece._id, state.inPoint, state.duration diff --git a/packages/webui/src/client/ui/MediaStatus/MediaStatus.tsx b/packages/webui/src/client/ui/MediaStatus/MediaStatus.tsx index 7c138b3d55..430807894e 100644 --- a/packages/webui/src/client/ui/MediaStatus/MediaStatus.tsx +++ b/packages/webui/src/client/ui/MediaStatus/MediaStatus.tsx @@ -458,14 +458,14 @@ function usePieceItems(partIds: PartId[], partMeta: Map) { const pieceItems = useTracker( () => pieces.map((piece) => { - const meta = partMeta.get(piece.startPartId) + const meta = piece.startPartId && partMeta.get(piece.startPartId) if (!meta) return return getListItemFromPieceAndPartMeta( piece._id, piece, meta, - piece.startPartId, + piece.startPartId ?? undefined, undefined, meta.segmentId, false diff --git a/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUp.scss b/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUp.scss index ab5105bcc2..e7182b8e32 100644 --- a/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUp.scss +++ b/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUp.scss @@ -3,17 +3,16 @@ .preview-popUp { border: 1px solid var(--sofie-segment-layer-hover-popup-border); background: var(--sofie-segment-layer-hover-popup-background); - box-shadow: 0 0 4px 0 rgba(0, 0, 0, 0.5); + box-shadow: 0 0 20px 0 rgba(0, 0, 0, 0.8); border-radius: 5px; overflow: hidden; pointer-events: none; - box-shadow: 0 0 20px 0 rgba(0, 0, 0, 0.6); - z-index: 9999; &--large { width: 482px; + padding-bottom: 10px; --preview-max-dimension: 480; } @@ -25,18 +24,65 @@ &--hidden { visibility: none; } + + font-family: Roboto Flex; + + font-style: normal; + font-weight: 500; + font-size: 16px; + line-height: 110%; + /* identical to box height, or 15px */ + letter-spacing: 0.02em; + font-feature-settings: + 'tnum', + 'liga' off; + color: #ffffff; + font-variation-settings: + 'GRAD' 0, + 'opsz' 15, + 'slnt' 0, + 'wdth' 30, + 'XOPQ' 96, + 'XTRA' 468, + 'YOPQ' 79, + 'YTAS' 750, + 'YTDE' -203, + 'YTFI' 738, + 'YTLC' 548, + 'YTUC' 712; } .preview-popUp__preview { width: 100%; - font-family: 'Roboto Condensed'; - font-size: 0.9375rem; // 15px; .preview-popUp__script, .preview-popUp__script-comment, .preview-popUp__script-last-modified { - padding: 0.4em 0.4em 0.4em 0.6em; - font-style: italic; + padding: 5px; + padding-left: 2%; + padding-right: 2%; + font-weight: 300; + font-size: 16px; + line-height: 120%; + letter-spacing: 0.03em; + font-feature-settings: + 'tnum', + 'liga' off; + + color: #ffffff; + font-variation-settings: + 'GRAD' 0, + 'opsz' 16, + 'slnt' -10, + 'wdth' 75, + 'XOPQ' 96, + 'XTRA' 468, + 'YOPQ' 79, + 'YTAS' 750, + 'YTDE' -203, + 'YTFI' 738, + 'YTLC' 548, + 'YTUC' 712; } .preview-popUp__script-comment, @@ -54,6 +100,72 @@ letter-spacing: 0.02rem; padding: 5px; + padding-left: 2%; + } + + .preview-popUp__element-with-time-info { + width: 100%; + display: flex; + + margin-bottom: 7px; + + .preview-popUp__element-with-time-info__layer-color { + height: 13px; + aspect-ratio: 1; + margin-left: 2%; + margin-top: 7px; + flex-shrink: 0; + @include item-type-colors(); + } + + .preview-popUp__element-with-time-info__text { + margin: 5px; + width: calc(100% - 35px); + flex-grow: 1; + } + + .preview-popUp__element-with-time-info__timing { + margin-left: 5px; + overflow: none; + white-space: nowrap; + text-overflow: ellipsis; + font-feature-settings: 'liga' off; + + font-weight: 500; + line-height: 100%; /* 15px */ + + .label { + font-weight: 100; + line-height: 100%; + /* identical to box height, or 15px */ + letter-spacing: 0.02em; + font-feature-settings: + 'tnum', + 'liga' off; + color: #b2b2b2; + font-variation-settings: + 'GRAD' 0, + 'opsz' 30, + 'slnt' 0, + 'wdth' 25, + 'XOPQ' 96, + 'XTRA' 468, + 'YOPQ' 79, + 'YTAS' 750, + 'YTDE' -203, + 'YTFI' 738, + 'YTLC' 548, + 'YTUC' 712; + } + } + } + + .preview-popup__separation-line { + width: 96%; + margin-left: 2%; + background-color: #5b5b5b; + margin-top: 0px; + margin-bottom: 0px; } .preview-popUp__warning { @@ -174,21 +286,42 @@ } .preview-popUp__in-out-words { - letter-spacing: 0em; + font-weight: 300; + font-size: 16px; + line-height: 100%; + letter-spacing: 0.02em; + font-feature-settings: + 'tnum', + 'liga' off; + color: #ffffff; + font-variation-settings: + 'GRAD' 0, + 'opsz' 16, + 'slnt' -10, + 'wdth' 75, + 'XOPQ' 96, + 'XTRA' 468, + 'YOPQ' 79, + 'YTAS' 750, + 'YTDE' -203, + 'YTFI' 738, + 'YTLC' 548, + 'YTUC' 712; width: 100%; overflow: hidden; text-overflow: clip; white-space: nowrap; - margin-top: -25px; //Pull up the in/out words a bit - padding: 7px; + padding: 5px; + padding-left: 2%; + padding-right: 2%; .separation-line { width: 100%; height: 1px; background-color: #5b5b5b; - margin-bottom: 5px; + margin-bottom: 7px; } .in-words, @@ -201,7 +334,7 @@ } .out-words { - direction: rtl; + text-align: right; } } diff --git a/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUpContent.tsx b/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUpContent.tsx index b07d940678..5c1c3a35d0 100644 --- a/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUpContent.tsx +++ b/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUpContent.tsx @@ -1,5 +1,4 @@ import React from 'react' -import { PreviewContent } from './PreviewPopUpContext.js' import { WarningIconSmall } from '../../lib/ui/icons/notifications.js' import { translateMessage } from '@sofie-automation/corelib/dist/TranslatableMessage' import { TFunction, useTranslation } from 'react-i18next' @@ -11,9 +10,11 @@ import { RundownUtils } from '../../lib/rundown.js' import { PieceInstancePiece } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { ReadonlyObjectDeep } from 'type-fest/source/readonly-deep' import { PieceLifespan } from '@sofie-automation/blueprints-integration' +import { LayerInfoPreview } from './Previews/LayerInfoPreview.js' +import { PreviewContentUI } from './PreviewPopUpContext.js' interface PreviewPopUpContentProps { - content: PreviewContent + content: PreviewContentUI time: number | null } @@ -38,7 +39,6 @@ export function PreviewPopUpContent({ content, time }: PreviewPopUpContentProps) case 'inOutWords': return (
-
{content.in}
{content.out}
@@ -59,6 +59,10 @@ export function PreviewPopUpContent({ content, time }: PreviewPopUpContentProps)
) + case 'layerInfo': + return + case 'separationLine': + return
case 'boxLayout': return case 'warning': @@ -108,17 +112,17 @@ function getDurationText( function getLifeSpanText(t: TFunction, lifespan: PieceLifespan): string { switch (lifespan) { case PieceLifespan.WithinPart: - return t('Until next take') + return t('Until Next Take') case PieceLifespan.OutOnSegmentChange: - return t('Until next segment') + return t('Until Next Segment') case PieceLifespan.OutOnSegmentEnd: - return t('Until end of segment') + return t('Until End of Segment') case PieceLifespan.OutOnRundownChange: - return t('Until next rundown') + return t('Until Next Rundown') case PieceLifespan.OutOnRundownEnd: - return t('Until end of rundown') + return t('Until End of Rundown') case PieceLifespan.OutOnShowStyleEnd: - return t('Until end of showstyle') + return t('Until End of Showstyle') default: return '' } diff --git a/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUpContext.tsx b/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUpContext.tsx index 53459b8aeb..9e446287d3 100644 --- a/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUpContext.tsx +++ b/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUpContext.tsx @@ -6,6 +6,7 @@ import { JSONBlobParse, NoraPayload, PieceLifespan, + PreviewContent, PreviewType, ScriptContent, SourceLayerType, @@ -33,11 +34,11 @@ export function convertSourceLayerItemToPreview( item: ReadonlyObjectDeep | IAdLibListItem, contentStatus?: ReadonlyObjectDeep, timeAsRendered?: { in?: number | null; dur?: number | null } -): { contents: PreviewContent[]; options: Readonly> } { +): { contents: PreviewContentUI[]; options: Readonly> } { // first try to read the popup preview if (item.content.popUpPreview) { const popupPreview = item.content.popUpPreview - const contents: PreviewContent[] = [] + const contents: PreviewContentUI[] = [] const options: Partial = {} if (popupPreview.name) { @@ -99,6 +100,7 @@ export function convertSourceLayerItemToPreview( break case PreviewType.VT: if (popupPreview.preview.outWords) { + contents.push({ type: 'separationLine' }) contents.push({ type: 'inOutWords', in: popupPreview.preview.inWords, @@ -120,10 +122,14 @@ export function convertSourceLayerItemToPreview( } break } + // Add any additional preview content to the popup: + popupPreview.additionalPreviewContent?.forEach((content) => { + contents.push(content as PreviewContentUI) + }) } if (popupPreview.warnings) { - contents.push(...popupPreview.warnings.map((w): PreviewContent => ({ type: 'warning', content: w.reason }))) + contents.push(...popupPreview.warnings.map((w): PreviewContentUI => ({ type: 'warning', content: w.reason }))) } return { contents, options } @@ -136,7 +142,7 @@ export function convertSourceLayerItemToPreview( const content = item.content as VTContent return { - contents: _.compact<(PreviewContent | undefined)[]>([ + contents: _.compact<(PreviewContentUI | undefined)[]>([ { type: 'title', content: content.fileName, @@ -159,11 +165,11 @@ export function convertSourceLayerItemToPreview( src: contentStatus.thumbnailUrl, } : undefined, - ...(contentStatus?.messages?.map((m) => ({ + ...(contentStatus?.messages?.map((m) => ({ type: 'warning', content: m as any, })) || []), - ]) as PreviewContent[], + ]) as PreviewContentUI[], options: { size: contentStatus?.previewUrl ? 'large' : undefined, }, @@ -220,7 +226,7 @@ export function convertSourceLayerItemToPreview( current: item.content.step.current, count: item.content.step.count, }, - ]) as PreviewContent[], + ]) as PreviewContentUI[], options: { size: 'large' }, } } catch (e) { @@ -237,7 +243,7 @@ export function convertSourceLayerItemToPreview( current: item.content.step.current, count: item.content.step.count, }, - ]) as PreviewContent[], + ]) as PreviewContentUI[], options: {}, } } @@ -287,43 +293,9 @@ export function convertSourceLayerItemToPreview( return { contents: [], options: {} } } - -export type PreviewContent = - | { - type: 'iframe' - href: string - postMessage?: any - dimensions?: { width: number; height: number } - } - | { - type: 'image' - src: string - } - | { - type: 'video' - src: string - } - | { - type: 'script' - script?: string - firstWords?: string - lastWords?: string - comment?: string - lastModified?: number - } - | { - type: 'title' - content: string - } - | { - type: 'inOutWords' - in?: string - out: string - } - | { - type: 'data' - content: { key: string; value: string }[] - } +// PreviewContentUI should be the same as PreviewContent, but we need to extend it with some more types: +export type PreviewContentUI = + | PreviewContent | { type: 'boxLayout' boxSourceConfiguration: ReadonlyDeep<(SplitsContentBoxContent & SplitsContentBoxProperties)[]> @@ -351,7 +323,7 @@ export interface IPreviewPopUpSession { * Update the open preview with new content or modify the content already being previewed, such as change current showing * time in the video, etc. */ - readonly update: (content?: PreviewContent[]) => void + readonly update: (content?: PreviewContentUI[]) => void /** * Set the time that the current pointer position is representing in the scope of the preview contents */ @@ -390,7 +362,7 @@ export interface IPreviewPopUpContext { */ requestPreview( anchor: HTMLElement | VirtualElement, - content: PreviewContent[], + content: PreviewContentUI[], opts?: PreviewRequestOptions ): IPreviewPopUpSession } @@ -415,7 +387,7 @@ export function PreviewPopUpContextProvider({ children }: React.PropsWithChildre const previewRef = useRef(null) const [previewSession, setPreviewSession] = useState(null) - const [previewContent, setPreviewContent] = useState(null) + const [previewContent, setPreviewContent] = useState(null) const [t, setTime] = useState(null) const context: IPreviewPopUpContext = { diff --git a/packages/webui/src/client/ui/PreviewPopUp/Previews/IFramePreview.tsx b/packages/webui/src/client/ui/PreviewPopUp/Previews/IFramePreview.tsx index 4be2f655f9..b93067a262 100644 --- a/packages/webui/src/client/ui/PreviewPopUp/Previews/IFramePreview.tsx +++ b/packages/webui/src/client/ui/PreviewPopUp/Previews/IFramePreview.tsx @@ -17,8 +17,8 @@ export function IFramePreview({ content }: IFramePreviewProps): React.ReactEleme const onLoadListener = useCallback(() => { if (content.postMessage) { - const url = new URL(content.href) - iFrameElement.current?.contentWindow?.postMessage(content.postMessage, url.origin) + // use * as URL reference to avoid cors when posting message with new reference: + iFrameElement.current?.contentWindow?.postMessage(content.postMessage, '*') } }, [content.postMessage, content.href]) @@ -31,6 +31,14 @@ export function IFramePreview({ content }: IFramePreviewProps): React.ReactEleme return () => currentIFrame.removeEventListener('load', onLoadListener) }, [onLoadListener]) + // Handle postMessage updates when iframe is already loaded + useEffect(() => { + if (content.postMessage && iFrameElement.current?.contentWindow) { + // use * as URL reference to avoid cors when posting message with new reference: + iFrameElement.current.contentWindow.postMessage(content.postMessage, '*') + } + }, [content.postMessage, content.href]) + const style: Record = {} if (content.dimensions) { style['--preview-render-width'] = content.dimensions.width diff --git a/packages/webui/src/client/ui/PreviewPopUp/Previews/LayerInfoPreview.tsx b/packages/webui/src/client/ui/PreviewPopUp/Previews/LayerInfoPreview.tsx new file mode 100644 index 0000000000..cc87ee3a49 --- /dev/null +++ b/packages/webui/src/client/ui/PreviewPopUp/Previews/LayerInfoPreview.tsx @@ -0,0 +1,53 @@ +import { PreviewContent } from '@sofie-automation/blueprints-integration' +import { RundownUtils } from '../../../lib/rundown' +import { useTranslation } from 'react-i18next' +import classNames from 'classnames' + +type layerInfoContent = Extract + +export function LayerInfoPreview(content: layerInfoContent): React.ReactElement { + const { t } = useTranslation() + const sourceLayerClassName = + content.layerType !== undefined ? RundownUtils.getSourceLayerClassName(content.layerType) : undefined + + return ( +
+
+
+ {content.text.map((line, index) => ( +
+ {line} +
+ ))} +
+ {content.inTime !== undefined && ( + <> + {t('IN')}: + {typeof content.inTime === 'number' + ? RundownUtils.formatTimeToShortTime(content.inTime || 0) + : content.inTime} + + )} +  {' '} + {content.duration !== undefined && ( + <> + {t('DURATION')}: + {typeof content.duration === 'number' + ? RundownUtils.formatTimeToShortTime(content.duration || 0) + : content.duration} + + )} +  {' '} + {content.outTime !== undefined && ( + <> + {t('OUT')}: + {typeof content.outTime === 'number' + ? RundownUtils.formatTimeToShortTime(content.outTime || 0) + : content.outTime} + + )} +
+
+
+ ) +} diff --git a/packages/webui/src/client/ui/Prompter/prompter.ts b/packages/webui/src/client/ui/Prompter/prompter.ts index 03e9a80975..4aaf429d31 100644 --- a/packages/webui/src/client/ui/Prompter/prompter.ts +++ b/packages/webui/src/client/ui/Prompter/prompter.ts @@ -15,7 +15,10 @@ import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { RundownUtils } from '../../lib/rundown.js' import { RundownPlaylistClientUtil } from '../../lib/rundownPlaylistUtil.js' import { SourceLayers } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' -import { processAndPrunePieceInstanceTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' +import { + createPartCurrentTimes, + processAndPrunePieceInstanceTimings, +} from '@sofie-automation/corelib/dist/playout/processAndPrune' import _ from 'underscore' import { FindOptions } from '../../collections/lib.js' import { RundownPlaylistCollectionUtil } from '../../collections/rundownPlaylistUtil.js' @@ -23,6 +26,7 @@ import { normalizeArrayToMap, protectString } from '../../lib/tempLib.js' import { PieceInstances, Pieces, RundownPlaylists, Segments } from '../../collections/index.js' import { getPieceInstancesForPartInstance } from '../../lib/RundownResolver.js' import { UIShowStyleBases } from '../Collections.js' +import { getCurrentTime } from '../../lib/systemTime.js' // export interface NewPrompterAPI { // getPrompterData (playlistId: RundownPlaylistId): Promise @@ -149,7 +153,7 @@ export namespace PrompterAPI { let previousRundown: Rundown | null = null const rundownIds = rundowns.map((rundown) => rundown._id) - const allPiecesCache = new Map() + const allPiecesCache = new Map() Pieces.find({ startRundownId: { $in: rundownIds }, }).forEach((piece) => { @@ -243,10 +247,11 @@ export namespace PrompterAPI { const sourceLayers = rundownIdsToShowStyleBase.get(partInstance.rundownId) if (sourceLayers) { + const partTimes = createPartCurrentTimes(getCurrentTime(), null) const preprocessedPieces = processAndPrunePieceInstanceTimings( sourceLayers, rawPieceInstances, - 0, + partTimes, true ) diff --git a/packages/webui/src/client/ui/RundownView.tsx b/packages/webui/src/client/ui/RundownView.tsx index 59822a4031..2fb51d01fc 100644 --- a/packages/webui/src/client/ui/RundownView.tsx +++ b/packages/webui/src/client/ui/RundownView.tsx @@ -500,9 +500,14 @@ const RundownViewContent = translateWithTracker { - if (!error.toString().match(/another scroll/)) console.warn(error) - }) + // add small delay to ensure the nextPartInfo is available + setTimeout(() => { + if (this.props.playlist && this.props.playlist.nextPartInfo) { + scrollToPartInstance(this.props.playlist.nextPartInfo.partInstanceId).catch((error) => { + if (!error.toString().match(/another scroll/)) console.warn(error) + }) + } + }, 120) } else if ( // after take this.props.playlist && @@ -639,24 +644,36 @@ const RundownViewContent = translateWithTracker { - if (this.state.followLiveSegments && this.props.playlist && this.props.playlist.activationId) { - const liveSegmentComponent = document.querySelector('.segment-timeline.live') - if (liveSegmentComponent) { - const offsetPosition = liveSegmentComponent.getBoundingClientRect() - // if it's closer to the top edge than the headerHeight - const segmentComponentTooHigh = offsetPosition.top < getHeaderHeight() - // or if it's closer to the bottom edge than very close to the top - const segmentComponentTooLow = - offsetPosition.bottom < window.innerHeight - getHeaderHeight() - 20 - (offsetPosition.height * 3) / 2 - if (segmentComponentTooHigh || segmentComponentTooLow) { - this.setState({ - followLiveSegments: false, - }) + private onWheelScrollInner = _.throttle( + () => { + if (this.state.followLiveSegments && this.props.playlist && this.props.playlist.activationId) { + const liveSegmentComponent = document.querySelector('.segment-timeline.live') + if (liveSegmentComponent) { + const offsetPosition = liveSegmentComponent.getBoundingClientRect() + const headerHeight = getHeaderHeight() + + // Use a buffer zone to prevent oscillation + const topBuffer = headerHeight + 10 + const bottomBuffer = window.innerHeight - headerHeight - 20 - (offsetPosition.height * 3) / 2 + + // Check if segment is outside the comfortable viewing area + const segmentComponentTooHigh = offsetPosition.top < topBuffer + const segmentComponentTooLow = offsetPosition.bottom < bottomBuffer + + if (segmentComponentTooHigh || segmentComponentTooLow) { + // Only change state if we need to + if (this.state.followLiveSegments) { + this.setState({ + followLiveSegments: false, + }) + } + } } } - } - }, 250) + }, + 100, + { leading: true, trailing: true } + ) private onWheel = (e: React.WheelEvent) => { if (e.deltaX === 0 && e.deltaY !== 0 && !e.altKey && !e.shiftKey && !e.ctrlKey && !e.metaKey) { @@ -689,9 +706,14 @@ const RundownViewContent = translateWithTracker { - if (!error.toString().match(/another scroll/)) console.warn(error) - }) + // Small delay to ensure the nextPartInfo is available + setTimeout(() => { + if (this.props.playlist && this.props.playlist.nextPartInfo) { + scrollToPartInstance(this.props.playlist.nextPartInfo.partInstanceId, true).catch((error) => { + if (!error.toString().match(/another scroll/)) console.warn(error) + }) + } + }, 120) setTimeout(() => { this.setState({ followLiveSegments: true, diff --git a/packages/webui/src/client/ui/RundownView/RundownRightHandControls.tsx b/packages/webui/src/client/ui/RundownView/RundownRightHandControls.tsx index 3a573d3337..775d55c326 100644 --- a/packages/webui/src/client/ui/RundownView/RundownRightHandControls.tsx +++ b/packages/webui/src/client/ui/RundownView/RundownRightHandControls.tsx @@ -183,20 +183,22 @@ export function RundownRightHandControls(props: Readonly): JSX.Element { > - {!props.isFollowingOnAir && ( - - )} +
+ {!props.isFollowingOnAir && ( + + )} +
diff --git a/packages/webui/src/client/ui/RundownView/SelectedElementsContext.tsx b/packages/webui/src/client/ui/RundownView/SelectedElementsContext.tsx index 3bdc731715..fdbf8b0f75 100644 --- a/packages/webui/src/client/ui/RundownView/SelectedElementsContext.tsx +++ b/packages/webui/src/client/ui/RundownView/SelectedElementsContext.tsx @@ -221,7 +221,7 @@ export function useSelectedElements( const computation = Tracker.nonreactive(() => Tracker.autorun(() => { const piece = Pieces.findOne(selectedElement?.elementId) - const part = UIParts.findOne({ _id: piece ? piece.startPartId : selectedElement?.elementId }) + const part = UIParts.findOne({ _id: piece?.startPartId ?? selectedElement?.elementId }) const segment = Segments.findOne({ _id: part ? part.segmentId : selectedElement?.elementId }) setPiece(piece) diff --git a/packages/webui/src/client/ui/SegmentTimeline/Renderers/TransitionSourceRenderer.tsx b/packages/webui/src/client/ui/SegmentTimeline/Renderers/TransitionSourceRenderer.tsx index ef310ed7a4..b1368e9c47 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/Renderers/TransitionSourceRenderer.tsx +++ b/packages/webui/src/client/ui/SegmentTimeline/Renderers/TransitionSourceRenderer.tsx @@ -1,7 +1,5 @@ import { getElementWidth } from '../../../utils/dimensions.js' - import { TransitionContent } from '@sofie-automation/blueprints-integration' - import { CustomLayerItemRenderer, ICustomLayerItemProps } from './CustomLayerItemRenderer.js' import { createPrivateApiPath } from '../../../url.js' diff --git a/packages/webui/src/client/ui/SegmentTimeline/SegmentContextMenu.tsx b/packages/webui/src/client/ui/SegmentTimeline/SegmentContextMenu.tsx index 0118421bd1..2123735eb6 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/SegmentContextMenu.tsx +++ b/packages/webui/src/client/ui/SegmentTimeline/SegmentContextMenu.tsx @@ -132,13 +132,13 @@ export const SegmentContextMenu = withTranslation()( {startsAt !== null && part && this.props.enablePlayFromAnywhere ? ( <> - {/* this.onSetAsNextFromHere(part.instance.part, e)} disabled={isCurrentPart || !!part.instance.orphaned || !canSetAsNext} > Next Here') }}> ( {RundownUtils.formatTimeToShortTime(Math.floor((startsAt + timecode) / 1000) * 1000)}) - */} + this.onPlayFromHere(part.instance.part, e)} disabled={!!part.instance.orphaned || !canSetAsNext} @@ -252,10 +252,10 @@ export const SegmentContextMenu = withTranslation()( } } - // private onSetAsNextFromHere = (part: DBPart, e) => { - // const offset = this.getTimePosition() - // this.props.onSetNext(part, e, offset || 0) - // } + private onSetAsNextFromHere = (part: DBPart, e: React.MouseEvent | React.TouchEvent) => { + const offset = this.getTimePosition() + this.props.onSetNext(part, e, offset || 0) + } private onPlayFromHere = (part: DBPart, e: React.MouseEvent | React.TouchEvent) => { const offset = this.getTimePosition() diff --git a/packages/webui/src/client/ui/SegmentTimeline/SegmentTimelineContainer.tsx b/packages/webui/src/client/ui/SegmentTimeline/SegmentTimelineContainer.tsx index db847f0e44..4b3dd394da 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/SegmentTimelineContainer.tsx +++ b/packages/webui/src/client/ui/SegmentTimeline/SegmentTimelineContainer.tsx @@ -149,6 +149,7 @@ const SegmentTimelineContainerContent = withResolvedSegment( declare context: React.ContextType isVisible: boolean + visibilityChangeTimeout: NodeJS.Timeout | undefined rundownCurrentPartInstanceId: PartInstanceId | null = null timelineDiv: HTMLDivElement | null = null intersectionObserver: IntersectionObserver | undefined @@ -198,14 +199,17 @@ const SegmentTimelineContainerContent = withResolvedSegment( RundownViewEventBus.on(RundownViewEvents.REWIND_SEGMENTS, this.onRewindSegment) RundownViewEventBus.on(RundownViewEvents.GO_TO_PART, this.onGoToPart) RundownViewEventBus.on(RundownViewEvents.GO_TO_PART_INSTANCE, this.onGoToPartInstance) - window.requestAnimationFrame(() => { - this.mountedTime = Date.now() - if (this.state.isLiveSegment && this.props.followLiveSegments && !this.isVisible) { - scrollToSegment(this.props.segmentId, true).catch((error) => { - if (!error.toString().match(/another scroll/)) console.warn(error) - }) - } - }) + // Delay is to ensure UI has settled before checking: + setTimeout(() => { + window.requestAnimationFrame(() => { + this.mountedTime = Date.now() + if (this.state.isLiveSegment && this.props.followLiveSegments && !this.isVisible) { + scrollToSegment(this.props.segmentId, true).catch((error) => { + if (!error.toString().match(/another scroll/)) console.warn(error) + }) + } + }) + }, 500) window.addEventListener('resize', this.onWindowResize) this.updateMaxTimeScale() .then(() => this.showEntireSegment()) @@ -541,12 +545,19 @@ const SegmentTimelineContainerContent = withResolvedSegment( } visibleChanged = (entries: IntersectionObserverEntry[]) => { - if (entries[0].intersectionRatio < 0.99 && !isMaintainingFocus() && Date.now() - this.mountedTime > 2000) { - if (typeof this.props.onSegmentScroll === 'function') this.props.onSegmentScroll() - this.isVisible = false - } else { - this.isVisible = true + // Add a small debounce to ensure UI has settled before checking + if (this.visibilityChangeTimeout) { + clearTimeout(this.visibilityChangeTimeout) } + + this.visibilityChangeTimeout = setTimeout(() => { + if (entries[0].intersectionRatio < 0.99 && !isMaintainingFocus() && Date.now() - this.mountedTime > 2000) { + if (typeof this.props.onSegmentScroll === 'function') this.props.onSegmentScroll() + this.isVisible = false + } else { + this.isVisible = true + } + }, 1800) } startLive = () => { diff --git a/packages/webui/src/client/ui/Settings/BlueprintSettings.tsx b/packages/webui/src/client/ui/Settings/BlueprintSettings.tsx index bd81bd5f5b..f4e015b100 100644 --- a/packages/webui/src/client/ui/Settings/BlueprintSettings.tsx +++ b/packages/webui/src/client/ui/Settings/BlueprintSettings.tsx @@ -20,10 +20,10 @@ import { MeteorCall } from '../../lib/meteorApi.js' import { BlueprintId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { Blueprints, CoreSystem, ShowStyleBases, Studios } from '../../collections/index.js' import { LabelActual } from '../../lib/Components/LabelAndOverrides.js' +import { createPrivateApiPath } from '../../url.js' import Button from 'react-bootstrap/esm/Button' import { useTranslation } from 'react-i18next' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' -import { createPrivateApiPath } from '../../url.js' interface IProps { blueprintId: BlueprintId diff --git a/packages/webui/src/client/ui/Settings/DevicePackageManagerSettings.tsx b/packages/webui/src/client/ui/Settings/DevicePackageManagerSettings.tsx index 82d0408991..c6c917cd2f 100644 --- a/packages/webui/src/client/ui/Settings/DevicePackageManagerSettings.tsx +++ b/packages/webui/src/client/ui/Settings/DevicePackageManagerSettings.tsx @@ -23,38 +23,53 @@ export const DevicePackageManagerSettings: React.FC PeripheralDevices.findOne(deviceId), [deviceId], undefined) - const reloadingNow = useRef(false) + const reloadingNow = useRef(null) const [status, setStatus] = useState(undefined) - const reloadStatus = useCallback((silent = false) => { - if (reloadingNow.current) return // if there is a method currently being executed, skip + const reloadStatus = useCallback( + (silent = false) => { + if (reloadingNow.current === deviceId) return // if there is a method currently being executed, skip - reloadingNow.current = true + reloadingNow.current = deviceId - MeteorCall.client - .callBackgroundPeripheralDeviceFunction(deviceId, 1000, 'getExpetationManagerStatus') - .then((result: Status) => setStatus(result)) - .catch((error) => { - if (silent) { - logger.error('callBackgroundPeripheralDeviceFunction getExpetationManagerStatus', error) - return - } + MeteorCall.client + .callBackgroundPeripheralDeviceFunction(deviceId, 1000, 'getExpetationManagerStatus') + .then((result: Status) => { + if (reloadingNow.current !== deviceId) return // if the deviceId has changed, abort - doModalDialog({ - message: t('There was an error: {{error}}', { error: error.toString() }), - title: t('Error'), - warning: true, - onAccept: () => { - // Do nothing - }, + setStatus(result) }) - }) - .finally(() => { - reloadingNow.current = false - }) - }, []) + .catch((error) => { + if (reloadingNow.current !== deviceId) return // if the deviceId has changed, abort + + if (silent) { + logger.error('callBackgroundPeripheralDeviceFunction getExpetationManagerStatus', error) + return + } + + doModalDialog({ + message: t('There was an error: {{error}}', { error: error.toString() }), + title: t('Error'), + warning: true, + onAccept: () => { + // Do nothing + }, + }) + }) + .finally(() => { + reloadingNow.current = null + }) + }, + [deviceId] + ) useEffect(() => { + // Clear cached status when deviceId changes + setStatus(undefined) + + // Trigger a load now + reloadStatus(true) + const reloadInterval = Meteor.setInterval(() => { if (deviceId) { reloadStatus(true) @@ -64,7 +79,7 @@ export const DevicePackageManagerSettings: React.FC { Meteor.clearInterval(reloadInterval) } - }, []) + }, [deviceId, reloadStatus]) function killApp(e: string, appId: string) { MeteorCall.client diff --git a/packages/webui/src/client/ui/Settings/Studio/Generic.tsx b/packages/webui/src/client/ui/Settings/Studio/Generic.tsx index dab6291688..b106f1971f 100644 --- a/packages/webui/src/client/ui/Settings/Studio/Generic.tsx +++ b/packages/webui/src/client/ui/Settings/Studio/Generic.tsx @@ -420,6 +420,16 @@ function StudioSettings({ studio }: { studio: DBStudio }): JSX.Element { > {(value, handleUpdate) => } + + + {(value, handleUpdate) => } + ) } diff --git a/packages/webui/src/client/ui/Settings/SystemManagement.tsx b/packages/webui/src/client/ui/Settings/SystemManagement.tsx index 140d195ac1..bc37fbf551 100644 --- a/packages/webui/src/client/ui/Settings/SystemManagement.tsx +++ b/packages/webui/src/client/ui/Settings/SystemManagement.tsx @@ -33,8 +33,8 @@ import { MultiLineTextInputControl, } from '../../lib/Components/MultiLineTextInput.js' import { TextInputControl } from '../../lib/Components/TextInput.js' -import Button from 'react-bootstrap/esm/Button' import { createPrivateApiPath } from '../../url.js' +import Button from 'react-bootstrap/esm/Button' interface WithCoreSystemProps { coreSystem: ICoreSystem diff --git a/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx b/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx index 3220c6c371..18616962d8 100644 --- a/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx +++ b/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx @@ -29,10 +29,10 @@ import { SourceLayers, OutputLayers } from '@sofie-automation/corelib/dist/dataM import { RundownPlaylistCollectionUtil } from '../../../../collections/rundownPlaylistUtil.js' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' import { UIPartInstances, UIParts } from '../../../Collections.js' +import { createPrivateApiPath } from '../../../../url.js' import Form from 'react-bootstrap/esm/Form' import Button from 'react-bootstrap/esm/Button' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' -import { createPrivateApiPath } from '../../../../url.js' export interface PreviewContext { rundownPlaylist: DBRundownPlaylist | null diff --git a/packages/webui/src/client/ui/Status/package-status/PackageStatus.tsx b/packages/webui/src/client/ui/Status/package-status/PackageStatus.tsx index 15336ab253..37b2be42cf 100644 --- a/packages/webui/src/client/ui/Status/package-status/PackageStatus.tsx +++ b/packages/webui/src/client/ui/Status/package-status/PackageStatus.tsx @@ -24,7 +24,7 @@ export const PackageStatus: React.FC<{ const { t } = useTranslation() const getPackageName = useCallback((): string => { - const p2: ExpectedPackage.Any = props.package as any + const p2 = props.package.package if (p2.type === ExpectedPackage.PackageType.MEDIA_FILE) { return p2.content.filePath || unprotectString(props.package._id) } else if (p2.type === ExpectedPackage.PackageType.QUANTEL_CLIP) { diff --git a/packages/webui/src/client/ui/Status/package-status/index.tsx b/packages/webui/src/client/ui/Status/package-status/index.tsx index bb482e2ee4..658aae130d 100644 --- a/packages/webui/src/client/ui/Status/package-status/index.tsx +++ b/packages/webui/src/client/ui/Status/package-status/index.tsx @@ -67,11 +67,6 @@ export const ExpectedPackagesStatus: React.FC<{}> = function ExpectedPackagesSta ) } function renderExpectedPackageStatuses() { - const packageRef: { [packageId: string]: ExpectedPackageDB } = {} - for (const expPackage of expectedPackages) { - packageRef[unprotectString(expPackage._id)] = expPackage - } - const packagesWithWorkStatuses: { [packageId: string]: { package: ExpectedPackageDB | undefined @@ -79,20 +74,33 @@ export const ExpectedPackagesStatus: React.FC<{}> = function ExpectedPackagesSta device: PeripheralDevice | undefined } } = {} + + for (const expPackage of expectedPackages) { + packagesWithWorkStatuses[unprotectString(expPackage._id)] = { + package: expPackage, + statuses: [], + device: undefined, + } + } + for (const work of expectedPackageWorkStatuses) { - const device = peripheralDevicesMap.get(work.deviceId) // todo: make this better: - const key = unprotectString(work.fromPackages[0]?.id) || 'unknown_work_' + work._id - // const referencedPackage = packageRef[packageId] - let packageWithWorkStatus = packagesWithWorkStatuses[key] - if (!packageWithWorkStatus) { - packagesWithWorkStatuses[key] = packageWithWorkStatus = { - package: packageRef[key] || undefined, - statuses: [], - device, + let fromPackageIds = work.fromPackages.map((p) => unprotectString(p.id)) + if (fromPackageIds.length === 0) fromPackageIds = ['unknown_work_' + work._id] + + for (const key of fromPackageIds) { + // const referencedPackage = packageRef[packageId] + let packageWithWorkStatus = packagesWithWorkStatuses[key] + if (!packageWithWorkStatus) { + packagesWithWorkStatuses[key] = packageWithWorkStatus = { + package: undefined, + statuses: [], + device: undefined, + } } + packageWithWorkStatus.statuses.push(work) + packageWithWorkStatus.device = peripheralDevicesMap.get(work.deviceId) } - packageWithWorkStatus.statuses.push(work) } for (const id of Object.keys(packagesWithWorkStatuses)) { @@ -137,6 +145,8 @@ export const ExpectedPackagesStatus: React.FC<{}> = function ExpectedPackagesSta return keys.map(({ packageId }) => { const p = packagesWithWorkStatuses[packageId] + console.log('p', p) + return p.package ? ( ) : ( @@ -149,6 +159,7 @@ export const ExpectedPackagesStatus: React.FC<{}> = function ExpectedPackagesSta function renderPackageContainerStatuses() { return packageContainerStatuses.map((packageContainerStatus) => { const device = peripheralDevicesMap.get(packageContainerStatus.deviceId) + console.log(device, packageContainerStatus.deviceId) return ( ( - PeripheralDevicePubSubCollectionsNames.mountedTriggers -) -const MountedTriggersPreviews = new Mongo.Collection( - PeripheralDevicePubSubCollectionsNames.mountedTriggersPreviews -) - interface DeviceTriggersViewRouteParams { peripheralDeviceId: string } diff --git a/packages/webui/src/client/ui/TestTools/IngestRundownStatus.tsx b/packages/webui/src/client/ui/TestTools/IngestRundownStatus.tsx new file mode 100644 index 0000000000..15c916bf4e --- /dev/null +++ b/packages/webui/src/client/ui/TestTools/IngestRundownStatus.tsx @@ -0,0 +1,129 @@ +import { useSubscription, useTracker } from '../../lib/ReactMeteorData/react-meteor-data.js' +import { unprotectString } from '../../lib/tempLib.js' +import { makeTableOfObject } from '../../lib/utilComponents.js' +import { PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { useTranslation } from 'react-i18next' +import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' +import { PeripheralDevices } from '../../collections/index.js' +import { Link } from 'react-router-dom' +import { PeripheralDeviceCategory } from '@sofie-automation/shared-lib/dist/peripheralDevice/peripheralDeviceAPI' +import { IngestRundownStatuses } from './collections.js' +import { IngestPartStatus, IngestRundownStatus } from '@sofie-automation/shared-lib/dist/ingest/rundownStatus' +import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' +import Row from 'react-bootstrap/Row' +import Col from 'react-bootstrap/Col' + +interface IMappingsViewProps { + match?: { + params?: { + peripheralDeviceId: PeripheralDeviceId + } + } +} +function IngestRundownStatusView(props: Readonly): JSX.Element { + const { t } = useTranslation() + + return ( +
+
+

{t('Ingest Rundown Status')}

+
+
+ {props.match && props.match.params && ( + + )} +
+
+ ) +} + +interface ComponentMappingsTableProps { + peripheralDeviceId: PeripheralDeviceId +} +function ComponentMappingsTable({ peripheralDeviceId }: Readonly): JSX.Element { + useSubscription(MeteorPubSub.ingestDeviceRundownStatusTestTool, peripheralDeviceId) + + const rundowns = useTracker(() => IngestRundownStatuses.find({}).fetch(), [], []) + + return ( + <> + {rundowns.map((rundown) => ( + + ))} + + ) +} + +function StatusesForRundown({ rundown }: { rundown: IngestRundownStatus }): JSX.Element { + return ( + + +

+ {rundown.externalId} ({unprotectString(rundown._id)}) +

+ +

Status: {rundown.active}

+ + + + + + + + + + + {rundown.segments.flatMap((segment) => + segment.parts.map((part) => ( + + )) + )} + +
Segment IdPart IdReadyStatusItems
+ +
+ ) +} + +interface StatusesForSegmentRowProps { + segmentId: string + part: IngestPartStatus +} +function StatusesForSegmentRow({ segmentId, part }: Readonly) { + return ( + + {segmentId} + {part.externalId} + {JSON.stringify(part.isReady)} + {part.playbackStatus} + {makeTableOfObject(part.itemsReady)} + + ) +} + +function IngestRundownStatusSelect(): JSX.Element | null { + const { t } = useTranslation() + + useSubscription(CorelibPubSub.peripheralDevices, null) + const devices = useTracker(() => PeripheralDevices.find({ category: PeripheralDeviceCategory.INGEST }).fetch(), []) + + return ( +
+
+

{t('Ingest Rundown Statuses')}

+
+
+ Peripheral Device +
    + {devices?.map((device) => ( +
  • + {device.name} +
  • + ))} +
+
+
+ ) +} + +export { IngestRundownStatusView, IngestRundownStatusSelect } diff --git a/packages/webui/src/client/ui/TestTools/Mappings.tsx b/packages/webui/src/client/ui/TestTools/Mappings.tsx index 27456c4832..38abc92b70 100644 --- a/packages/webui/src/client/ui/TestTools/Mappings.tsx +++ b/packages/webui/src/client/ui/TestTools/Mappings.tsx @@ -6,17 +6,12 @@ import { makeTableOfObject } from '../../lib/utilComponents.js' import { StudioSelect } from './StudioSelect.js' import { MappingExt } from '@sofie-automation/corelib/dist/dataModel/Studio' import { LookaheadMode, TSR } from '@sofie-automation/blueprints-integration' -import { createSyncPeripheralDeviceCustomPublicationMongoCollection } from '../../collections/lib.js' import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { PeripheralDevicePubSubCollectionsNames } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' import { useTranslation } from 'react-i18next' +import { StudioMappings } from './collections.js' import Row from 'react-bootstrap/Row' import Col from 'react-bootstrap/Col' -const StudioMappings = createSyncPeripheralDeviceCustomPublicationMongoCollection( - PeripheralDevicePubSubCollectionsNames.studioMappings -) - interface IMappingsViewProps { match?: { params?: { diff --git a/packages/webui/src/client/ui/TestTools/Timeline.tsx b/packages/webui/src/client/ui/TestTools/Timeline.tsx index bf8a61290c..0d46a3822b 100644 --- a/packages/webui/src/client/ui/TestTools/Timeline.tsx +++ b/packages/webui/src/client/ui/TestTools/Timeline.tsx @@ -20,18 +20,13 @@ import { useTranslation } from 'react-i18next' import { useParams } from 'react-router-dom' import { useCallback, useEffect, useMemo, useState } from 'react' import Classnames from 'classnames' -import { createSyncPeripheralDeviceCustomPublicationMongoCollection } from '../../collections/lib.js' import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { PeripheralDevicePubSubCollectionsNames } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' +import { StudioTimeline } from './collections.js' import Row from 'react-bootstrap/Row' import Col from 'react-bootstrap/Col' import Button from 'react-bootstrap/Button' import Form from 'react-bootstrap/Form' -export const StudioTimeline = createSyncPeripheralDeviceCustomPublicationMongoCollection( - PeripheralDevicePubSubCollectionsNames.studioTimeline -) - interface TimelineViewRouteParams { studioId: string | undefined } diff --git a/packages/webui/src/client/ui/TestTools/TimelineDatastore.tsx b/packages/webui/src/client/ui/TestTools/TimelineDatastore.tsx index 797e182ea7..e1594bda33 100644 --- a/packages/webui/src/client/ui/TestTools/TimelineDatastore.tsx +++ b/packages/webui/src/client/ui/TestTools/TimelineDatastore.tsx @@ -1,17 +1,14 @@ import { useSubscription, useTracker } from '../../lib/ReactMeteorData/react-meteor-data.js' import { StudioSelect } from './StudioSelect.js' -import { Mongo } from 'meteor/mongo' -import { DBTimelineDatastoreEntry } from '@sofie-automation/corelib/dist/dataModel/TimelineDatastore' import { protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { useTranslation } from 'react-i18next' import { useParams } from 'react-router-dom' import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' +import { TimelineDatastore } from './collections.js' import Row from 'react-bootstrap/Row' import Col from 'react-bootstrap/Col' -const TimelineDatastore = new Mongo.Collection('timelineDatastore') - interface TimelineDatastoreViewRouteParams { studioId: string } diff --git a/packages/webui/src/client/ui/TestTools/collections.ts b/packages/webui/src/client/ui/TestTools/collections.ts new file mode 100644 index 0000000000..528f4cc862 --- /dev/null +++ b/packages/webui/src/client/ui/TestTools/collections.ts @@ -0,0 +1,30 @@ +import { PeripheralDevicePubSubCollectionsNames } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' +import { createSyncPeripheralDeviceCustomPublicationMongoCollection } from '../../collections/lib.js' + +/** + * These collections are not public and are for the use of the TestTools only. + * They are defined in this file, as hot reloading them is not supported + */ + +export const IngestRundownStatuses = createSyncPeripheralDeviceCustomPublicationMongoCollection( + PeripheralDevicePubSubCollectionsNames.ingestRundownStatus +) + +export const MountedTriggers = createSyncPeripheralDeviceCustomPublicationMongoCollection( + PeripheralDevicePubSubCollectionsNames.mountedTriggers +) +export const MountedTriggersPreviews = createSyncPeripheralDeviceCustomPublicationMongoCollection( + PeripheralDevicePubSubCollectionsNames.mountedTriggersPreviews +) + +export const StudioMappings = createSyncPeripheralDeviceCustomPublicationMongoCollection( + PeripheralDevicePubSubCollectionsNames.studioMappings +) + +export const StudioTimeline = createSyncPeripheralDeviceCustomPublicationMongoCollection( + PeripheralDevicePubSubCollectionsNames.studioTimeline +) + +export const TimelineDatastore = createSyncPeripheralDeviceCustomPublicationMongoCollection( + PeripheralDevicePubSubCollectionsNames.timelineDatastore +) diff --git a/packages/webui/src/client/ui/TestTools/index.tsx b/packages/webui/src/client/ui/TestTools/index.tsx index c51c32651e..11807423eb 100644 --- a/packages/webui/src/client/ui/TestTools/index.tsx +++ b/packages/webui/src/client/ui/TestTools/index.tsx @@ -7,6 +7,7 @@ import { MappingsStudioSelect, MappingsView } from './Mappings.js' import { TimelineDatastoreStudioSelect, TimelineDatastoreView } from './TimelineDatastore.js' import { DeviceTriggersDeviceSelect, DeviceTriggersView } from './DeviceTriggers.js' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' +import { IngestRundownStatusSelect, IngestRundownStatusView } from './IngestRundownStatus.js' import Row from 'react-bootstrap/Row' import Col from 'react-bootstrap/Col' import Container from 'react-bootstrap/esm/Container' @@ -44,6 +45,13 @@ function StatusMenu() { >

{t('Device Triggers')}

+ +

{t('Ingest Rundown Statuses')}

+
) } @@ -71,6 +79,8 @@ export default function Status(): JSX.Element { {' '} + + diff --git a/packages/webui/vite.config.mts b/packages/webui/vite.config.mts index 17c1524bd5..8472c6d4b3 100644 --- a/packages/webui/vite.config.mts +++ b/packages/webui/vite.config.mts @@ -41,6 +41,13 @@ export default defineConfig(({ command }) => ({ // Add all sofie paths, ensuring they use unix path syntax ...commonJsPaths.map((p) => p.replaceAll('\\', '/')), + // Commonjs monorepo dependencies + '@sofie-automation/blueprints-integration', + ], + exclude: [ + // Add all sofie paths, ensuring they use unix path syntax + ...commonJsPaths.map((p) => p.replaceAll('\\', '/')), + // Commonjs monorepo dependencies '@sofie-automation/blueprints-integration', ],