diff --git a/.github/workflows/node.yaml b/.github/workflows/node.yaml index c067d58149..ae2351eba0 100644 --- a/.github/workflows/node.yaml +++ b/.github/workflows/node.yaml @@ -502,16 +502,19 @@ jobs: - node-version: 22.x package-name: job-worker send-coverage: true - # No tests for the gateways yet + # No tests for some gateways yet # - node-version: 22.x # package-name: playout-gateway - # - node-version: 22.x - # package-name: mos-gateway + # send-coverage: true + - node-version: 22.x + package-name: mos-gateway + send-coverage: true - node-version: 22.x package-name: live-status-gateway send-coverage: true - node-version: 22.x package-name: webui + send-coverage: true # manual meteor-lib as it only needs a couple of versions - node-version: 22.x package-name: meteor-lib diff --git a/meteor/__mocks__/helpers/database.ts b/meteor/__mocks__/helpers/database.ts index e19ea399e3..8ac29117ec 100644 --- a/meteor/__mocks__/helpers/database.ts +++ b/meteor/__mocks__/helpers/database.ts @@ -476,6 +476,7 @@ export async function setupMockShowStyleBlueprint( rundown, globalAdLibPieces: [], globalActions: [], + globalPieces: [], baseline: { timelineObjects: [] }, } }, diff --git a/meteor/server/api/blueprints/api.ts b/meteor/server/api/blueprints/api.ts index 7c4229d9f0..578a3581ef 100644 --- a/meteor/server/api/blueprints/api.ts +++ b/meteor/server/api/blueprints/api.ts @@ -31,6 +31,7 @@ import { DBStudio } from '@sofie-automation/corelib/dist/dataModel/Studio' import { UserPermissions } from '@sofie-automation/meteor-lib/dist/userPermissions' import { assertConnectionHasOneOfPermissions, RequestCredentials } from '../../security/auth' import { blueprintsPerformDevelopmentMode } from './development' +import { inspect } from 'util' const PERMISSIONS_FOR_MANAGE_BLUEPRINTS: Array = ['configure'] @@ -174,8 +175,12 @@ async function innerUploadBlueprint( let blueprintManifest: SomeBlueprintManifest | undefined try { blueprintManifest = evalBlueprint(newBlueprint) - } catch (_e) { - throw new Meteor.Error(400, `Blueprint ${blueprintId} failed to parse`) + } catch (error) { + console.log('Parsing error:', error) + throw new Meteor.Error( + 400, + `Blueprint ${blueprintId} failed to parse; error: ${(error as Error).message}.\n${inspect(error, { depth: 5 })}` + ) } if (!_.isObject(blueprintManifest)) diff --git a/meteor/server/api/deviceTriggers/TagsService.ts b/meteor/server/api/deviceTriggers/TagsService.ts index 6983c3d888..0ec3e53df7 100644 --- a/meteor/server/api/deviceTriggers/TagsService.ts +++ b/meteor/server/api/deviceTriggers/TagsService.ts @@ -4,12 +4,14 @@ import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceIns import { PieceInstanceFields, ContentCache } from './reactiveContentCacheForPieceInstances' import { SourceLayers } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' import { + createPartCurrentTimes, PieceInstanceWithTimings, processAndPrunePieceInstanceTimings, } from '@sofie-automation/corelib/dist/playout/processAndPrune' import { applyAndValidateOverrides } from '@sofie-automation/corelib/dist/settings/objectWithOverrides' import { IWrappedAdLib } from '@sofie-automation/meteor-lib/dist/triggers/actionFilterChainCompilers' import { areSetsEqual, doSetsIntersect } from '@sofie-automation/corelib/dist/lib' +import { getCurrentTime } from '../../lib/lib' export class TagsService { protected onAirPiecesTags: Set = new Set() @@ -130,12 +132,11 @@ export class TagsService { ): PieceInstanceWithTimings[] { // Approximate when 'now' is in the PartInstance, so that any adlibbed Pieces will be timed roughly correctly const partStarted = partInstanceTimings?.plannedStartedPlayback - const nowInPart = partStarted === undefined ? 0 : Date.now() - partStarted return processAndPrunePieceInstanceTimings( sourceLayers, pieceInstances as PieceInstance[], - nowInPart, + createPartCurrentTimes(getCurrentTime(), partStarted), false, false ) diff --git a/meteor/server/api/ingest/packageInfo.ts b/meteor/server/api/ingest/packageInfo.ts index d3d52ccaf6..7d60627df9 100644 --- a/meteor/server/api/ingest/packageInfo.ts +++ b/meteor/server/api/ingest/packageInfo.ts @@ -35,6 +35,7 @@ export async function onUpdatedPackageInfo(packageId: ExpectedPackageId, _doc: P case ExpectedPackageDBType.ADLIB_ACTION: case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: + case ExpectedPackageDBType.BASELINE_PIECE: case ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS: onUpdatedPackageInfoForRundownDebounce(pkg) break diff --git a/meteor/server/api/rest/v1/typeConversion.ts b/meteor/server/api/rest/v1/typeConversion.ts index be9332e14a..9d79058cc6 100644 --- a/meteor/server/api/rest/v1/typeConversion.ts +++ b/meteor/server/api/rest/v1/typeConversion.ts @@ -396,6 +396,7 @@ export function studioSettingsFrom(apiStudioSettings: APIStudioSettings): Comple enableBuckets: apiStudioSettings.enableBuckets ?? true, // Backwards compatible enableEvaluationForm: apiStudioSettings.enableEvaluationForm ?? true, // Backwards compatible mockPieceContentStatus: apiStudioSettings.mockPieceContentStatus, + rundownGlobalPiecesPrepareTime: apiStudioSettings.rundownGlobalPiecesPrepareTime, } } @@ -423,6 +424,7 @@ export function APIStudioSettingsFrom(settings: IStudioSettings): Complete | DBInterface['_id'], callbacks: PromisifyCallbacks>, - options?: Omit, 'fields'> + findOptions?: Omit, 'fields'>, + callbackOptions?: { nonMutatingCallbacks?: boolean | undefined } ): Promise /** diff --git a/meteor/server/collections/implementations/asyncCollection.ts b/meteor/server/collections/implementations/asyncCollection.ts index 52bb47eca6..7a4349c26a 100644 --- a/meteor/server/collections/implementations/asyncCollection.ts +++ b/meteor/server/collections/implementations/asyncCollection.ts @@ -141,7 +141,8 @@ export class WrappedAsyncMongoCollection | DBInterface['_id'], callbacks: PromisifyCallbacks>, - options?: FindOptions + findOptions?: FindOptions, + callbackOptions?: { nonMutatingCallbacks?: boolean | undefined } ): Promise { const span = profiler.startSpan(`MongoCollection.${this.name}.observeChanges`) if (span) { @@ -152,8 +153,8 @@ export class WrappedAsyncMongoCollection() for (const piece of pieces) { - partIdLookup.set(piece._id, piece.startPartId) + if (piece.startPartId) partIdLookup.set(piece._id, piece.startPartId) } for (const adlib of adlibPieces) { if (adlib.partId) partIdLookup.set(adlib._id, adlib.partId) diff --git a/meteor/server/publications/_publications.ts b/meteor/server/publications/_publications.ts index 8bcb30b0b1..64a027a279 100644 --- a/meteor/server/publications/_publications.ts +++ b/meteor/server/publications/_publications.ts @@ -3,6 +3,7 @@ import './lib/lib' import './buckets' import './blueprintUpgradeStatus/publication' +import './ingestStatus/publication' import './packageManager/expectedPackages/publication' import './packageManager/packageContainers' import './packageManager/playoutContext' diff --git a/meteor/server/publications/ingestStatus/createIngestRundownStatus.ts b/meteor/server/publications/ingestStatus/createIngestRundownStatus.ts new file mode 100644 index 0000000000..b90c14b12f --- /dev/null +++ b/meteor/server/publications/ingestStatus/createIngestRundownStatus.ts @@ -0,0 +1,191 @@ +import type { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { NrcsIngestCacheType } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import { + IngestRundownStatus, + IngestPartPlaybackStatus, + IngestRundownActiveStatus, + IngestPartStatus, + IngestPartNotifyItemReady, +} from '@sofie-automation/shared-lib/dist/ingest/rundownStatus' +import type { ReadonlyDeep } from 'type-fest' +import _ from 'underscore' +import type { ContentCache, PartCompact, PartInstanceCompact, PlaylistCompact } from './reactiveContentCache' +import { ReactiveCacheCollection } from '../lib/ReactiveCacheCollection' +import { unprotectString } from '@sofie-automation/corelib/dist/protectedString' + +export function createIngestRundownStatus( + cache: ReadonlyDeep, + rundownId: RundownId +): IngestRundownStatus | null { + const rundown = cache.Rundowns.findOne(rundownId) + if (!rundown) return null + + const newDoc: IngestRundownStatus = { + _id: rundownId, + externalId: rundown.externalId, + + active: IngestRundownActiveStatus.INACTIVE, + + segments: [], + } + + const playlist = cache.Playlists.findOne({ + _id: rundown.playlistId, + activationId: { $exists: true }, + }) + + if (playlist) { + newDoc.active = playlist.rehearsal ? IngestRundownActiveStatus.REHEARSAL : IngestRundownActiveStatus.ACTIVE + } + + const nrcsSegments = cache.NrcsIngestData.find({ rundownId, type: NrcsIngestCacheType.SEGMENT }).fetch() + for (const nrcsSegment of nrcsSegments) { + const nrcsParts = cache.NrcsIngestData.find({ + rundownId, + segmentId: nrcsSegment.segmentId, + type: NrcsIngestCacheType.PART, + }).fetch() + + newDoc.segments.push({ + externalId: nrcsSegment.data.externalId, + parts: _.compact( + nrcsParts.map((nrcsPart) => { + if (!nrcsPart.partId || !nrcsPart.segmentId) return null + + const parts = cache.Parts.find({ + rundownId: rundownId, + $or: [ + { + externalId: nrcsPart.data.externalId, + ingestNotifyPartExternalId: { $exists: false }, + }, + { + ingestNotifyPartExternalId: nrcsPart.data.externalId, + }, + ], + }).fetch() + const partInstances = findPartInstancesForIngestPart( + playlist, + rundownId, + cache.PartInstances, + nrcsPart.data.externalId + ) + + return createIngestPartStatus(playlist, partInstances, parts, nrcsPart.data.externalId) + }) + ), + }) + } + + return newDoc +} + +function findPartInstancesForIngestPart( + playlist: PlaylistCompact | undefined, + rundownId: RundownId, + partInstancesCache: ReadonlyDeep>, + partExternalId: string +) { + const result: Record = {} + if (!playlist) return result + + const candidatePartInstances = partInstancesCache + .find({ + rundownId: rundownId, + $or: [ + { + 'part.externalId': partExternalId, + 'part.ingestNotifyPartExternalId': { $exists: false }, + }, + { + 'part.ingestNotifyPartExternalId': partExternalId, + }, + ], + }) + .fetch() + + for (const partInstance of candidatePartInstances) { + if (partInstance.rundownId !== rundownId) continue + // Ignore the next partinstance + if (partInstance._id === playlist.nextPartInfo?.partInstanceId) continue + + const partId = unprotectString(partInstance.part._id) + + // The current part instance is the most important + if (partInstance._id === playlist.currentPartInfo?.partInstanceId) { + result[partId] = partInstance + continue + } + + // Take the part with the highest takeCount + const existingEntry = result[partId] + if (!existingEntry || existingEntry.takeCount < partInstance.takeCount) { + result[partId] = partInstance + } + } + + return result +} + +function createIngestPartStatus( + playlist: PlaylistCompact | undefined, + partInstances: Record, + parts: PartCompact[], + ingestPartExternalId: string +): IngestPartStatus { + // Determine the playback status from the PartInstance + let playbackStatus = IngestPartPlaybackStatus.UNKNOWN + + let isReady: boolean | null = null // Start off as null, the first value will make this true or false + + const itemsReady: IngestPartNotifyItemReady[] = [] + + const updateStatusWithPart = (part: PartCompact) => { + // If the part affects the ready status, update it + if (typeof part.ingestNotifyPartReady === 'boolean') { + isReady = (isReady ?? true) && part.ingestNotifyPartReady + } + + // Include the items + if (part.ingestNotifyItemsReady) { + itemsReady.push(...part.ingestNotifyItemsReady) + } + } + + // Loop through the partInstances, starting off the state + if (playlist) { + for (const partInstance of Object.values(partInstances)) { + if (!partInstance) continue + + if (partInstance.part.shouldNotifyCurrentPlayingPart) { + const isCurrentPartInstance = playlist.currentPartInfo?.partInstanceId === partInstance._id + + if (isCurrentPartInstance) { + // If the current, it is playing + playbackStatus = IngestPartPlaybackStatus.PLAY + } else if (playbackStatus === IngestPartPlaybackStatus.UNKNOWN) { + // If not the current, but has been played, it is stopped + playbackStatus = IngestPartPlaybackStatus.STOP + } + } + + updateStatusWithPart(partInstance.part) + } + } + + for (const part of parts) { + // Check if the part has already been handled by a partInstance + if (partInstances[unprotectString(part._id)]) continue + + updateStatusWithPart(part) + } + + return { + externalId: ingestPartExternalId, + + isReady: isReady, + itemsReady: itemsReady, + + playbackStatus, + } +} diff --git a/meteor/server/publications/ingestStatus/publication.ts b/meteor/server/publications/ingestStatus/publication.ts new file mode 100644 index 0000000000..a28dd3844f --- /dev/null +++ b/meteor/server/publications/ingestStatus/publication.ts @@ -0,0 +1,217 @@ +import { PeripheralDeviceId, RundownId, RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { ReadonlyDeep } from 'type-fest' +import { + CustomPublish, + CustomPublishCollection, + meteorCustomPublish, + setUpCollectionOptimizedObserver, + SetupObserversResult, + TriggerUpdate, +} from '../../lib/customPublication' +import { logger } from '../../logging' +import { ContentCache, createReactiveContentCache } from './reactiveContentCache' +import { RundownsObserver } from '../lib/rundownsObserver' +import { RundownContentObserver } from './rundownContentObserver' +import { + PeripheralDevicePubSub, + PeripheralDevicePubSubCollectionsNames, +} from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' +import { checkAccessAndGetPeripheralDevice } from '../../security/check' +import { check } from '../../lib/check' +import { IngestRundownStatus } from '@sofie-automation/shared-lib/dist/ingest/rundownStatus' +import { protectString } from '@sofie-automation/corelib/dist/protectedString' +import { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import { createIngestRundownStatus } from './createIngestRundownStatus' +import { assertConnectionHasOneOfPermissions } from '../../security/auth' +import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' + +interface IngestRundownStatusArgs { + readonly deviceId: PeripheralDeviceId +} + +export interface IngestRundownStatusState { + contentCache: ReadonlyDeep +} + +interface IngestRundownStatusUpdateProps { + newCache: ContentCache + + invalidateRundownIds: RundownId[] + invalidatePlaylistIds: RundownPlaylistId[] +} + +async function setupIngestRundownStatusPublicationObservers( + args: ReadonlyDeep, + triggerUpdate: TriggerUpdate +): Promise { + const rundownsObserver = await RundownsObserver.createForPeripheralDevice(args.deviceId, async (rundownIds) => { + logger.silly(`Creating new RundownContentObserver`, rundownIds) + + // TODO - can this be done cheaper? + const cache = createReactiveContentCache(rundownIds) + + // Push update + triggerUpdate({ newCache: cache }) + + const contentObserver = await RundownContentObserver.create(rundownIds, cache) + + const innerQueries = [ + cache.Playlists.find({}).observeChanges( + { + added: (docId) => triggerUpdate({ invalidatePlaylistIds: [protectString(docId)] }), + changed: (docId) => triggerUpdate({ invalidatePlaylistIds: [protectString(docId)] }), + removed: (docId) => triggerUpdate({ invalidatePlaylistIds: [protectString(docId)] }), + }, + { nonMutatingCallbacks: true } + ), + cache.Rundowns.find({}).observeChanges( + { + added: (docId) => { + triggerUpdate({ invalidateRundownIds: [protectString(docId)] }) + contentObserver.checkPlaylistIds() + }, + changed: (docId) => { + triggerUpdate({ invalidateRundownIds: [protectString(docId)] }) + contentObserver.checkPlaylistIds() + }, + removed: (docId) => { + triggerUpdate({ invalidateRundownIds: [protectString(docId)] }) + contentObserver.checkPlaylistIds() + }, + }, + { nonMutatingCallbacks: true } + ), + cache.Parts.find({}).observe({ + added: (doc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId] }), + changed: (doc, oldDoc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId, oldDoc.rundownId] }), + removed: (doc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId] }), + }), + cache.PartInstances.find({}).observe({ + added: (doc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId] }), + changed: (doc, oldDoc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId, oldDoc.rundownId] }), + removed: (doc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId] }), + }), + cache.NrcsIngestData.find({}).observe({ + added: (doc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId] }), + changed: (doc, oldDoc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId, oldDoc.rundownId] }), + removed: (doc) => triggerUpdate({ invalidateRundownIds: [doc.rundownId] }), + }), + ] + + return () => { + contentObserver.dispose() + + for (const query of innerQueries) { + query.stop() + } + } + }) + + // Set up observers: + return [rundownsObserver] +} + +async function manipulateIngestRundownStatusPublicationData( + _args: IngestRundownStatusArgs, + state: Partial, + collection: CustomPublishCollection, + updateProps: Partial> | undefined +): Promise { + // Prepare data for publication: + + if (updateProps?.newCache !== undefined) { + state.contentCache = updateProps.newCache ?? undefined + } + + if (!state.contentCache) { + // Remove all the notes + collection.remove(null) + + return + } + + const updateAll = !updateProps || !!updateProps?.newCache + if (updateAll) { + // Remove all the notes + collection.remove(null) + + const knownRundownIds = new Set(state.contentCache.RundownIds) + + for (const rundownId of knownRundownIds) { + const newDoc = createIngestRundownStatus(state.contentCache, rundownId) + if (newDoc) collection.replace(newDoc) + } + } else { + const regenerateForRundownIds = new Set(updateProps.invalidateRundownIds) + + // Include anything where the playlist has changed + if (updateProps.invalidatePlaylistIds && updateProps.invalidatePlaylistIds.length > 0) { + const rundownsToUpdate = state.contentCache.Rundowns.find( + { + playlistId: { $in: updateProps.invalidatePlaylistIds }, + }, + { + projection: { + _id: 1, + }, + } + ).fetch() as Pick[] + + for (const rundown of rundownsToUpdate) { + regenerateForRundownIds.add(rundown._id) + } + } + + for (const rundownId of regenerateForRundownIds) { + const newDoc = createIngestRundownStatus(state.contentCache, rundownId) + if (newDoc) { + collection.replace(newDoc) + } else { + collection.remove(rundownId) + } + } + } +} + +async function startOrJoinIngestStatusPublication( + pub: CustomPublish, + deviceId: PeripheralDeviceId +) { + await setUpCollectionOptimizedObserver< + IngestRundownStatus, + IngestRundownStatusArgs, + IngestRundownStatusState, + IngestRundownStatusUpdateProps + >( + `pub_${PeripheralDevicePubSub.ingestDeviceRundownStatus}_${deviceId}`, + { deviceId }, + setupIngestRundownStatusPublicationObservers, + manipulateIngestRundownStatusPublicationData, + pub, + 100 + ) +} + +meteorCustomPublish( + PeripheralDevicePubSub.ingestDeviceRundownStatus, + PeripheralDevicePubSubCollectionsNames.ingestRundownStatus, + async function (pub, deviceId: PeripheralDeviceId, token: string | undefined) { + check(deviceId, String) + + await checkAccessAndGetPeripheralDevice(deviceId, token, this) + + await startOrJoinIngestStatusPublication(pub, deviceId) + } +) + +meteorCustomPublish( + MeteorPubSub.ingestDeviceRundownStatusTestTool, + PeripheralDevicePubSubCollectionsNames.ingestRundownStatus, + async function (pub, deviceId: PeripheralDeviceId) { + check(deviceId, String) + + assertConnectionHasOneOfPermissions(this.connection, 'testing') + + await startOrJoinIngestStatusPublication(pub, deviceId) + } +) diff --git a/meteor/server/publications/ingestStatus/reactiveContentCache.ts b/meteor/server/publications/ingestStatus/reactiveContentCache.ts new file mode 100644 index 0000000000..a755ee4f02 --- /dev/null +++ b/meteor/server/publications/ingestStatus/reactiveContentCache.ts @@ -0,0 +1,99 @@ +import type { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' +import { ReactiveCacheCollection } from '../lib/ReactiveCacheCollection' +import { literal } from '@sofie-automation/corelib/dist/lib' +import type { MongoFieldSpecifierOnesStrict } from '@sofie-automation/corelib/dist/mongo' +import type { PartInstance } from '@sofie-automation/meteor-lib/dist/collections/PartInstances' +import type { NrcsIngestDataCacheObj } from '@sofie-automation/corelib/dist/dataModel/NrcsIngestDataCache' +import type { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import type { DBRundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' +import type { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' + +export type PlaylistCompact = Pick< + DBRundownPlaylist, + '_id' | 'activationId' | 'rehearsal' | 'currentPartInfo' | 'nextPartInfo' +> +export const playlistFieldSpecifier = literal>({ + _id: 1, + activationId: 1, + rehearsal: 1, + currentPartInfo: 1, + nextPartInfo: 1, +}) + +export type RundownCompact = Pick +export const rundownFieldSpecifier = literal>({ + _id: 1, + externalId: 1, + playlistId: 1, +}) + +export type PartCompact = Pick< + DBPart, + | '_id' + | 'rundownId' + | 'segmentId' + | 'externalId' + | 'shouldNotifyCurrentPlayingPart' + | 'ingestNotifyPartReady' + | 'ingestNotifyItemsReady' + | 'ingestNotifyPartExternalId' +> +export const partFieldSpecifier = literal>({ + _id: 1, + rundownId: 1, + segmentId: 1, + externalId: 1, + shouldNotifyCurrentPlayingPart: 1, + ingestNotifyPartReady: 1, + ingestNotifyItemsReady: 1, + ingestNotifyPartExternalId: 1, +}) + +export type PartInstanceCompact = Pick +export const partInstanceFieldSpecifier = literal>({ + _id: 1, + rundownId: 1, + segmentId: 1, + part: 1, // This could be more granular, but it should be pretty stable + takeCount: 1, +}) + +export type NrcsIngestDataCacheObjCompact = Pick< + NrcsIngestDataCacheObj, + '_id' | 'type' | 'rundownId' | 'segmentId' | 'partId' +> & { data: { externalId: string } } +export const nrcsIngestDataCacheObjSpecifier = literal>({ + _id: 1, + type: 1, + rundownId: 1, + segmentId: 1, + partId: 1, + data: { + // We need to be very selective here, as the payload portion could contain data not safe for minimongo + externalId: 1, + }, +}) + +export interface ContentCache { + RundownIds: RundownId[] + + Playlists: ReactiveCacheCollection + Rundowns: ReactiveCacheCollection + NrcsIngestData: ReactiveCacheCollection + Parts: ReactiveCacheCollection + PartInstances: ReactiveCacheCollection +} + +export function createReactiveContentCache(rundownIds: RundownId[]): ContentCache { + const cache: ContentCache = { + RundownIds: rundownIds, + + Playlists: new ReactiveCacheCollection('playlists'), + Rundowns: new ReactiveCacheCollection('rundowns'), + NrcsIngestData: new ReactiveCacheCollection('nrcsIngestData'), + Parts: new ReactiveCacheCollection('parts'), + PartInstances: new ReactiveCacheCollection('partInstances'), + } + + return cache +} diff --git a/meteor/server/publications/ingestStatus/rundownContentObserver.ts b/meteor/server/publications/ingestStatus/rundownContentObserver.ts new file mode 100644 index 0000000000..6894e97136 --- /dev/null +++ b/meteor/server/publications/ingestStatus/rundownContentObserver.ts @@ -0,0 +1,153 @@ +import { Meteor } from 'meteor/meteor' +import { RundownId, RundownPlaylistId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { logger } from '../../logging' +import { + ContentCache, + nrcsIngestDataCacheObjSpecifier, + partFieldSpecifier, + partInstanceFieldSpecifier, + playlistFieldSpecifier, + rundownFieldSpecifier, + // segmentFieldSpecifier, +} from './reactiveContentCache' +import { NrcsIngestDataCache, PartInstances, Parts, RundownPlaylists, Rundowns } from '../../collections' +import { waitForAllObserversReady } from '../lib/lib' +import _ from 'underscore' +import { ReactiveMongoObserverGroup, ReactiveMongoObserverGroupHandle } from '../lib/observerGroup' +import { equivalentArrays } from '@sofie-automation/shared-lib/dist/lib/lib' + +const REACTIVITY_DEBOUNCE = 20 + +export class RundownContentObserver { + #observers: Meteor.LiveQueryHandle[] = [] + readonly #cache: ContentCache + + #playlistIds: RundownPlaylistId[] = [] + #playlistIdObserver!: ReactiveMongoObserverGroupHandle + + #disposed = false + + private constructor(cache: ContentCache) { + this.#cache = cache + } + + static async create(rundownIds: RundownId[], cache: ContentCache): Promise { + logger.silly(`Creating RundownContentObserver for rundowns "${rundownIds.join(',')}"`) + + const observer = new RundownContentObserver(cache) + + observer.#playlistIdObserver = await ReactiveMongoObserverGroup(async () => { + // Clear already cached data + cache.Playlists.remove({}) + + return [ + RundownPlaylists.observe( + { + // We can use the `this.#playlistIds` here, as this is restarted every time that property changes + _id: { $in: observer.#playlistIds }, + }, + { + added: (doc) => { + cache.Playlists.upsert(doc._id, doc) + }, + changed: (doc) => { + cache.Playlists.upsert(doc._id, doc) + }, + removed: (doc) => { + cache.Playlists.remove(doc._id) + }, + }, + { + projection: playlistFieldSpecifier, + } + ), + ] + }) + + observer.#observers = await waitForAllObserversReady([ + Rundowns.observeChanges( + { + _id: { + $in: rundownIds, + }, + }, + cache.Rundowns.link(), + { + projection: rundownFieldSpecifier, + }, + { + nonMutatingCallbacks: true, + } + ), + Parts.observeChanges( + { + rundownId: { + $in: rundownIds, + }, + }, + cache.Parts.link(), + { + projection: partFieldSpecifier, + }, + { + nonMutatingCallbacks: true, + } + ), + PartInstances.observeChanges( + { + rundownId: { $in: rundownIds }, + reset: { $ne: true }, + orphaned: { $exists: false }, + }, + cache.PartInstances.link(), + { projection: partInstanceFieldSpecifier }, + { + nonMutatingCallbacks: true, + } + ), + NrcsIngestDataCache.observeChanges( + { + rundownId: { + $in: rundownIds, + }, + }, + cache.NrcsIngestData.link(), + { + projection: nrcsIngestDataCacheObjSpecifier, + }, + { + nonMutatingCallbacks: true, + } + ), + + observer.#playlistIdObserver, + ]) + + return observer + } + + public checkPlaylistIds = _.debounce( + Meteor.bindEnvironment(() => { + if (this.#disposed) return + + const playlistIds = Array.from(new Set(this.#cache.Rundowns.find({}).map((rundown) => rundown.playlistId))) + + if (!equivalentArrays(playlistIds, this.#playlistIds)) { + this.#playlistIds = playlistIds + // trigger the playlist group to restart + this.#playlistIdObserver.restart() + } + }), + REACTIVITY_DEBOUNCE + ) + + public get cache(): ContentCache { + return this.#cache + } + + public dispose = (): void => { + this.#disposed = true + + this.#observers.forEach((observer) => observer.stop()) + } +} diff --git a/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts b/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts index ffeb44577b..06760d6c94 100644 --- a/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts +++ b/meteor/server/publications/lib/__tests__/rundownsObserver.test.ts @@ -1,4 +1,9 @@ -import { RundownId, RundownPlaylistId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { + PeripheralDeviceId, + RundownId, + RundownPlaylistId, + StudioId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' import { protectString } from '@sofie-automation/corelib/dist/protectedString' import { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { Rundowns } from '../../../collections' @@ -25,7 +30,7 @@ describe('RundownsObserver', () => { // should not be any observers yet expect(RundownsMock.observers).toHaveLength(0) - const observer = await RundownsObserver.create(studioId, playlistId, onChanged) + const observer = await RundownsObserver.createForPlaylist(studioId, playlistId, onChanged) try { // should now be an observer expect(RundownsMock.observers).toHaveLength(1) @@ -78,7 +83,7 @@ describe('RundownsObserver', () => { // should not be any observers yet expect(RundownsMock.observers).toHaveLength(0) - const observer = await RundownsObserver.create(studioId, playlistId, onChanged) + const observer = await RundownsObserver.createForPlaylist(studioId, playlistId, onChanged) try { // ensure starts correct await waitUntil(async () => { @@ -132,7 +137,7 @@ describe('RundownsObserver', () => { // should not be any observers yet expect(RundownsMock.observers).toHaveLength(0) - const observer = await RundownsObserver.create(studioId, playlistId, onChanged) + const observer = await RundownsObserver.createForPlaylist(studioId, playlistId, onChanged) try { // ensure starts correct // ensure starts correct @@ -186,7 +191,7 @@ describe('RundownsObserver', () => { // should not be any observers yet expect(RundownsMock.observers).toHaveLength(0) - const observer = await RundownsObserver.create(studioId, playlistId, onChanged) + const observer = await RundownsObserver.createForPlaylist(studioId, playlistId, onChanged) try { // ensure starts correct // ensure starts correct @@ -263,4 +268,56 @@ describe('RundownsObserver', () => { observer.stop() } }) + + test('create and destroy observer - for peripheraldevice', async () => { + const deviceId = protectString('device0') + + const onChangedCleanup = jest.fn() + const onChanged = jest.fn(async () => onChangedCleanup) + + // should not be any observers yet + expect(RundownsMock.observers).toHaveLength(0) + + const observer = await RundownsObserver.createForPeripheralDevice(deviceId, onChanged) + try { + // should now be an observer + expect(RundownsMock.observers).toHaveLength(1) + + // Before debounce + expect(onChanged).toHaveBeenCalledTimes(0) + + // After debounce + await waitUntil(async () => { + // Run timers, so that promises in the observer has a chance to resolve: + await runAllTimers() + expect(onChanged).toHaveBeenCalledTimes(1) + expect(onChangedCleanup).toHaveBeenCalledTimes(0) + }, MAX_WAIT_TIME) + + // still got an observer + expect(RundownsMock.observers).toHaveLength(1) + + // get the mock observer, and ensure to looks sane + expect(RundownsMock.observers).toHaveLength(1) + const mockObserver = RundownsMock.observers[0] + expect(mockObserver).toBeTruthy() + expect(mockObserver.callbacksChanges).toBeFalsy() + expect(mockObserver.callbacksObserve).toBeTruthy() + expect(mockObserver.callbacksObserve?.added).toBeTruthy() + expect(mockObserver.callbacksObserve?.changed).toBeTruthy() + expect(mockObserver.callbacksObserve?.removed).toBeTruthy() + expect(mockObserver.query).toEqual({ + 'source.peripheralDeviceId': 'device0', + 'source.type': 'nrcs', + }) + } finally { + // Make sure to cleanup + observer.stop() + + // Check it stopped + expect(onChanged).toHaveBeenCalledTimes(1) + expect(onChangedCleanup).toHaveBeenCalledTimes(1) + expect(RundownsMock.observers).toHaveLength(0) + } + }) }) diff --git a/meteor/server/publications/lib/rundownsObserver.ts b/meteor/server/publications/lib/rundownsObserver.ts index 6aec996d81..421a6c3f46 100644 --- a/meteor/server/publications/lib/rundownsObserver.ts +++ b/meteor/server/publications/lib/rundownsObserver.ts @@ -1,7 +1,14 @@ import { Meteor } from 'meteor/meteor' -import { RundownId, RundownPlaylistId, StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import type { + PeripheralDeviceId, + RundownId, + RundownPlaylistId, + StudioId, +} from '@sofie-automation/corelib/dist/dataModel/Ids' import { Rundowns } from '../../collections' import { PromiseDebounce } from './PromiseDebounce' +import type { MongoQuery } from '@sofie-automation/corelib/dist/mongo' +import type { Rundown } from '@sofie-automation/corelib/dist/dataModel/Rundown' import { logger } from '../../logging' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' @@ -44,24 +51,39 @@ export class RundownsObserver implements Meteor.LiveQueryHandle { this.#changed = onChanged } - static async create( + static async createForPlaylist( studioId: StudioId, playlistId: RundownPlaylistId, onChanged: ChangedHandler ): Promise { const observer = new RundownsObserver(onChanged) - await observer.init(studioId, playlistId) + await observer.init({ + playlistId, + studioId, + }) return observer } - private async init(studioId: StudioId, playlistId: RundownPlaylistId) { + static async createForPeripheralDevice( + // studioId: StudioId, // TODO - this? + deviceId: PeripheralDeviceId, + onChanged: ChangedHandler + ): Promise { + const observer = new RundownsObserver(onChanged) + + await observer.init({ + 'source.type': 'nrcs', + 'source.peripheralDeviceId': deviceId, + }) + + return observer + } + + private async init(query: MongoQuery) { this.#rundownsLiveQuery = await Rundowns.observe( - { - playlistId, - studioId, - }, + query, { added: (doc) => { this.#rundownIds.add(doc._id) @@ -97,5 +119,6 @@ export class RundownsObserver implements Meteor.LiveQueryHandle { this.#rundownsLiveQuery.stop() this.#changed = undefined this.#cleanup?.() + this.#cleanup = undefined } } diff --git a/meteor/server/publications/partInstancesUI/publication.ts b/meteor/server/publications/partInstancesUI/publication.ts index 15ef24d707..5c30cea8a2 100644 --- a/meteor/server/publications/partInstancesUI/publication.ts +++ b/meteor/server/publications/partInstancesUI/publication.ts @@ -66,60 +66,64 @@ async function setupUIPartInstancesPublicationObservers( )) as Pick | undefined if (!playlist) throw new Error(`RundownPlaylist with activationId="${args.playlistActivationId}" not found!`) - const rundownsObserver = await RundownsObserver.create(playlist.studioId, playlist._id, async (rundownIds) => { - logger.silly(`Creating new RundownContentObserver`) - - const cache = createReactiveContentCache() - - // Push update - triggerUpdate({ newCache: cache }) - - const obs1 = await RundownContentObserver.create( - playlist.studioId, - args.playlistActivationId, - rundownIds, - cache - ) + const rundownsObserver = await RundownsObserver.createForPlaylist( + playlist.studioId, + playlist._id, + async (rundownIds) => { + logger.silly(`Creating new RundownContentObserver`) + + const cache = createReactiveContentCache() + + // Push update + triggerUpdate({ newCache: cache }) + + const obs1 = await RundownContentObserver.create( + playlist.studioId, + args.playlistActivationId, + rundownIds, + cache + ) - const innerQueries = [ - cache.Segments.find({}).observeChanges({ - added: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - }), - cache.PartInstances.find({}).observe({ - added: (doc) => triggerUpdate({ invalidatePartInstanceIds: [doc._id] }), - changed: (doc, oldDoc) => { - if (doc.part._rank !== oldDoc.part._rank) { - // with part rank change we need to invalidate the entire segment, - // as the order may affect which unchanged parts are/aren't in quickLoop - triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }) - } else { - triggerUpdate({ invalidatePartInstanceIds: [doc._id] }) - } - }, - removed: (doc) => triggerUpdate({ invalidatePartInstanceIds: [doc._id] }), - }), - cache.RundownPlaylists.find({}).observeChanges({ - added: () => triggerUpdate({ invalidateQuickLoop: true }), - changed: () => triggerUpdate({ invalidateQuickLoop: true }), - removed: () => triggerUpdate({ invalidateQuickLoop: true }), - }), - cache.StudioSettings.find({}).observeChanges({ - added: () => triggerUpdate({ invalidateQuickLoop: true }), - changed: () => triggerUpdate({ invalidateQuickLoop: true }), - removed: () => triggerUpdate({ invalidateQuickLoop: true }), - }), - ] - - return () => { - obs1.dispose() - - for (const query of innerQueries) { - query.stop() + const innerQueries = [ + cache.Segments.find({}).observeChanges({ + added: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + }), + cache.PartInstances.find({}).observe({ + added: (doc) => triggerUpdate({ invalidatePartInstanceIds: [doc._id] }), + changed: (doc, oldDoc) => { + if (doc.part._rank !== oldDoc.part._rank) { + // with part rank change we need to invalidate the entire segment, + // as the order may affect which unchanged parts are/aren't in quickLoop + triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }) + } else { + triggerUpdate({ invalidatePartInstanceIds: [doc._id] }) + } + }, + removed: (doc) => triggerUpdate({ invalidatePartInstanceIds: [doc._id] }), + }), + cache.RundownPlaylists.find({}).observeChanges({ + added: () => triggerUpdate({ invalidateQuickLoop: true }), + changed: () => triggerUpdate({ invalidateQuickLoop: true }), + removed: () => triggerUpdate({ invalidateQuickLoop: true }), + }), + cache.StudioSettings.find({}).observeChanges({ + added: () => triggerUpdate({ invalidateQuickLoop: true }), + changed: () => triggerUpdate({ invalidateQuickLoop: true }), + removed: () => triggerUpdate({ invalidateQuickLoop: true }), + }), + ] + + return () => { + obs1.dispose() + + for (const query of innerQueries) { + query.stop() + } } } - }) + ) // Set up observers: return [rundownsObserver] diff --git a/meteor/server/publications/partInstancesUI/reactiveContentCache.ts b/meteor/server/publications/partInstancesUI/reactiveContentCache.ts index 66e1e0658e..7a35485814 100644 --- a/meteor/server/publications/partInstancesUI/reactiveContentCache.ts +++ b/meteor/server/publications/partInstancesUI/reactiveContentCache.ts @@ -5,6 +5,7 @@ import { MongoFieldSpecifierOnesStrict, MongoFieldSpecifierZeroes } from '@sofie import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { DBStudio, IStudioSettings } from '@sofie-automation/corelib/dist/dataModel/Studio' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' +import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' diff --git a/meteor/server/publications/partsUI/publication.ts b/meteor/server/publications/partsUI/publication.ts index d2f1a100e3..c52fe141db 100644 --- a/meteor/server/publications/partsUI/publication.ts +++ b/meteor/server/publications/partsUI/publication.ts @@ -57,55 +57,59 @@ async function setupUIPartsPublicationObservers( })) as Pick | undefined if (!playlist) throw new Error(`RundownPlaylist "${args.playlistId}" not found!`) - const rundownsObserver = await RundownsObserver.create(playlist.studioId, playlist._id, async (rundownIds) => { - logger.silly(`Creating new RundownContentObserver`) - - const cache = createReactiveContentCache() - - // Push update - triggerUpdate({ newCache: cache }) - - const obs1 = await RundownContentObserver.create(playlist.studioId, playlist._id, rundownIds, cache) - - const innerQueries = [ - cache.Segments.find({}).observeChanges({ - added: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - }), - cache.Parts.find({}).observe({ - added: (doc) => triggerUpdate({ invalidatePartIds: [doc._id] }), - changed: (doc, oldDoc) => { - if (doc._rank !== oldDoc._rank) { - // with part rank change we need to invalidate the entire segment, - // as the order may affect which unchanged parts are/aren't in quickLoop - triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }) - } else { - triggerUpdate({ invalidatePartIds: [doc._id] }) - } - }, - removed: (doc) => triggerUpdate({ invalidatePartIds: [doc._id] }), - }), - cache.RundownPlaylists.find({}).observeChanges({ - added: () => triggerUpdate({ invalidateQuickLoop: true }), - changed: () => triggerUpdate({ invalidateQuickLoop: true }), - removed: () => triggerUpdate({ invalidateQuickLoop: true }), - }), - cache.StudioSettings.find({}).observeChanges({ - added: () => triggerUpdate({ invalidateQuickLoop: true }), - changed: () => triggerUpdate({ invalidateQuickLoop: true }), - removed: () => triggerUpdate({ invalidateQuickLoop: true }), - }), - ] - - return () => { - obs1.dispose() - - for (const query of innerQueries) { - query.stop() + const rundownsObserver = await RundownsObserver.createForPlaylist( + playlist.studioId, + playlist._id, + async (rundownIds) => { + logger.silly(`Creating new RundownContentObserver`) + + const cache = createReactiveContentCache() + + // Push update + triggerUpdate({ newCache: cache }) + + const obs1 = await RundownContentObserver.create(playlist.studioId, playlist._id, rundownIds, cache) + + const innerQueries = [ + cache.Segments.find({}).observeChanges({ + added: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + }), + cache.Parts.find({}).observe({ + added: (doc) => triggerUpdate({ invalidatePartIds: [doc._id] }), + changed: (doc, oldDoc) => { + if (doc._rank !== oldDoc._rank) { + // with part rank change we need to invalidate the entire segment, + // as the order may affect which unchanged parts are/aren't in quickLoop + triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }) + } else { + triggerUpdate({ invalidatePartIds: [doc._id] }) + } + }, + removed: (doc) => triggerUpdate({ invalidatePartIds: [doc._id] }), + }), + cache.RundownPlaylists.find({}).observeChanges({ + added: () => triggerUpdate({ invalidateQuickLoop: true }), + changed: () => triggerUpdate({ invalidateQuickLoop: true }), + removed: () => triggerUpdate({ invalidateQuickLoop: true }), + }), + cache.StudioSettings.find({}).observeChanges({ + added: () => triggerUpdate({ invalidateQuickLoop: true }), + changed: () => triggerUpdate({ invalidateQuickLoop: true }), + removed: () => triggerUpdate({ invalidateQuickLoop: true }), + }), + ] + + return () => { + obs1.dispose() + + for (const query of innerQueries) { + query.stop() + } } } - }) + ) // Set up observers: return [rundownsObserver] diff --git a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts index 3247c10a91..762dfabbf8 100644 --- a/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts +++ b/meteor/server/publications/pieceContentStatusUI/checkPieceContentStatus.ts @@ -240,7 +240,7 @@ export async function checkPieceContentStatusAndDependencies( blacks: [], scenes: [], - thumbnailUrl: undefined, + thumbnailUrl: '/dev/fakeThumbnail.png', previewUrl: '/dev/fakePreview.mp4', packageName: null, diff --git a/meteor/server/publications/pieceContentStatusUI/rundown/publication.ts b/meteor/server/publications/pieceContentStatusUI/rundown/publication.ts index 0103802d91..6f78d87ed2 100644 --- a/meteor/server/publications/pieceContentStatusUI/rundown/publication.ts +++ b/meteor/server/publications/pieceContentStatusUI/rundown/publication.ts @@ -125,86 +125,90 @@ async function setupUIPieceContentStatusesPublicationObservers( })) as Pick | undefined if (!playlist) throw new Error(`RundownPlaylist "${args.rundownPlaylistId}" not found!`) - const rundownsObserver = await RundownsObserver.create(playlist.studioId, playlist._id, async (rundownIds) => { - logger.silly(`Creating new RundownContentObserver`) - - // TODO - can this be done cheaper? - const contentCache = createReactiveContentCache() - triggerUpdate({ newCache: contentCache }) - - const obs1 = await RundownContentObserver.create(rundownIds, contentCache) - - const innerQueries = [ - contentCache.Segments.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedSegmentIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedSegmentIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedSegmentIds: [protectString(id)] }), - }), - contentCache.Parts.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedPartIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedPartIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedPartIds: [protectString(id)] }), - }), - contentCache.Pieces.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedPieceIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedPieceIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedPieceIds: [protectString(id)] }), - }), - contentCache.PartInstances.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedPartInstanceIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedPartInstanceIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedPartInstanceIds: [protectString(id)] }), - }), - contentCache.PieceInstances.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedPieceInstanceIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedPieceInstanceIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedPieceInstanceIds: [protectString(id)] }), - }), - contentCache.AdLibPieces.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedAdlibPieceIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedAdlibPieceIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedAdlibPieceIds: [protectString(id)] }), - }), - contentCache.AdLibActions.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedAdlibActionIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedAdlibActionIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedAdlibActionIds: [protectString(id)] }), - }), - contentCache.BaselineAdLibPieces.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedBaselineAdlibPieceIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedBaselineAdlibPieceIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedBaselineAdlibPieceIds: [protectString(id)] }), - }), - contentCache.BaselineAdLibActions.find({}).observeChanges({ - added: (id) => triggerUpdate({ updatedBaselineAdlibActionIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ updatedBaselineAdlibActionIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ updatedBaselineAdlibActionIds: [protectString(id)] }), - }), - contentCache.Rundowns.find({}).observeChanges({ - added: () => triggerUpdate({ invalidateAll: true }), - changed: () => triggerUpdate({ invalidateAll: true }), - removed: () => triggerUpdate({ invalidateAll: true }), - }), - contentCache.Blueprints.find({}).observeChanges({ - added: () => triggerUpdate({ invalidateAll: true }), - changed: () => triggerUpdate({ invalidateAll: true }), - removed: () => triggerUpdate({ invalidateAll: true }), - }), - contentCache.ShowStyleSourceLayers.find({}).observeChanges({ - added: () => triggerUpdate({ invalidateAll: true }), - changed: () => triggerUpdate({ invalidateAll: true }), - removed: () => triggerUpdate({ invalidateAll: true }), - }), - ] - - return () => { - obs1.dispose() - - for (const query of innerQueries) { - query.stop() + const rundownsObserver = await RundownsObserver.createForPlaylist( + playlist.studioId, + playlist._id, + async (rundownIds) => { + logger.silly(`Creating new RundownContentObserver`) + + // TODO - can this be done cheaper? + const contentCache = createReactiveContentCache() + triggerUpdate({ newCache: contentCache }) + + const obs1 = await RundownContentObserver.create(rundownIds, contentCache) + + const innerQueries = [ + contentCache.Segments.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedSegmentIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedSegmentIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedSegmentIds: [protectString(id)] }), + }), + contentCache.Parts.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedPartIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedPartIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedPartIds: [protectString(id)] }), + }), + contentCache.Pieces.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedPieceIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedPieceIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedPieceIds: [protectString(id)] }), + }), + contentCache.PartInstances.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedPartInstanceIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedPartInstanceIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedPartInstanceIds: [protectString(id)] }), + }), + contentCache.PieceInstances.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedPieceInstanceIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedPieceInstanceIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedPieceInstanceIds: [protectString(id)] }), + }), + contentCache.AdLibPieces.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedAdlibPieceIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedAdlibPieceIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedAdlibPieceIds: [protectString(id)] }), + }), + contentCache.AdLibActions.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedAdlibActionIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedAdlibActionIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedAdlibActionIds: [protectString(id)] }), + }), + contentCache.BaselineAdLibPieces.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedBaselineAdlibPieceIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedBaselineAdlibPieceIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedBaselineAdlibPieceIds: [protectString(id)] }), + }), + contentCache.BaselineAdLibActions.find({}).observeChanges({ + added: (id) => triggerUpdate({ updatedBaselineAdlibActionIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ updatedBaselineAdlibActionIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ updatedBaselineAdlibActionIds: [protectString(id)] }), + }), + contentCache.Rundowns.find({}).observeChanges({ + added: () => triggerUpdate({ invalidateAll: true }), + changed: () => triggerUpdate({ invalidateAll: true }), + removed: () => triggerUpdate({ invalidateAll: true }), + }), + contentCache.Blueprints.find({}).observeChanges({ + added: () => triggerUpdate({ invalidateAll: true }), + changed: () => triggerUpdate({ invalidateAll: true }), + removed: () => triggerUpdate({ invalidateAll: true }), + }), + contentCache.ShowStyleSourceLayers.find({}).observeChanges({ + added: () => triggerUpdate({ invalidateAll: true }), + changed: () => triggerUpdate({ invalidateAll: true }), + removed: () => triggerUpdate({ invalidateAll: true }), + }), + ] + + return () => { + obs1.dispose() + + for (const query of innerQueries) { + query.stop() + } } } - }) + ) // Set up observers: return [ diff --git a/meteor/server/publications/pieceContentStatusUI/rundown/regenerateItems.ts b/meteor/server/publications/pieceContentStatusUI/rundown/regenerateItems.ts index 078f91361b..6d1c613a29 100644 --- a/meteor/server/publications/pieceContentStatusUI/rundown/regenerateItems.ts +++ b/meteor/server/publications/pieceContentStatusUI/rundown/regenerateItems.ts @@ -107,7 +107,7 @@ export async function regenerateForPieceIds( { _id: protectString(`piece_${pieceId}`), - partId: pieceDoc.startPartId, + partId: pieceDoc.startPartId ?? undefined, rundownId: pieceDoc.startRundownId, pieceId: pieceId, @@ -193,7 +193,7 @@ export async function regenerateForPieceInstanceIds( const res: UIPieceContentStatus = { _id: protectString(`piece_${pieceId}`), - partId: pieceDoc.piece.startPartId, + partId: pieceDoc.piece.startPartId ?? undefined, rundownId: pieceDoc.rundownId, pieceId: pieceId, diff --git a/meteor/server/publications/segmentPartNotesUI/publication.ts b/meteor/server/publications/segmentPartNotesUI/publication.ts index d01a55c66a..05d4d86a3e 100644 --- a/meteor/server/publications/segmentPartNotesUI/publication.ts +++ b/meteor/server/publications/segmentPartNotesUI/publication.ts @@ -64,48 +64,54 @@ async function setupUISegmentPartNotesPublicationObservers( })) as Pick | undefined if (!playlist) throw new Error(`RundownPlaylist "${args.playlistId}" not found!`) - const rundownsObserver = await RundownsObserver.create(playlist.studioId, playlist._id, async (rundownIds) => { - logger.silly(`Creating new RundownContentObserver`) - - // TODO - can this be done cheaper? - const cache = createReactiveContentCache() - - // Push update - triggerUpdate({ newCache: cache }) - - const obs1 = await RundownContentObserver.create(rundownIds, cache) - - const innerQueries = [ - cache.Segments.find({}).observeChanges({ - added: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), - }), - cache.Parts.find({}).observe({ - added: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), - changed: (doc, oldDoc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId, oldDoc.segmentId] }), - removed: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), - }), - cache.DeletedPartInstances.find({}).observe({ - added: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), - changed: (doc, oldDoc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId, oldDoc.segmentId] }), - removed: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), - }), - cache.Rundowns.find({}).observeChanges({ - added: (id) => triggerUpdate({ invalidateRundownIds: [protectString(id)] }), - changed: (id) => triggerUpdate({ invalidateRundownIds: [protectString(id)] }), - removed: (id) => triggerUpdate({ invalidateRundownIds: [protectString(id)] }), - }), - ] - - return () => { - obs1.dispose() - - for (const query of innerQueries) { - query.stop() + const rundownsObserver = await RundownsObserver.createForPlaylist( + playlist.studioId, + playlist._id, + async (rundownIds) => { + logger.silly(`Creating new RundownContentObserver`) + + // TODO - can this be done cheaper? + const cache = createReactiveContentCache() + + // Push update + triggerUpdate({ newCache: cache }) + + const obs1 = await RundownContentObserver.create(rundownIds, cache) + + const innerQueries = [ + cache.Segments.find({}).observeChanges({ + added: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ invalidateSegmentIds: [protectString(id)] }), + }), + cache.Parts.find({}).observe({ + added: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), + changed: (doc, oldDoc) => + triggerUpdate({ invalidateSegmentIds: [doc.segmentId, oldDoc.segmentId] }), + removed: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), + }), + cache.DeletedPartInstances.find({}).observe({ + added: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), + changed: (doc, oldDoc) => + triggerUpdate({ invalidateSegmentIds: [doc.segmentId, oldDoc.segmentId] }), + removed: (doc) => triggerUpdate({ invalidateSegmentIds: [doc.segmentId] }), + }), + cache.Rundowns.find({}).observeChanges({ + added: (id) => triggerUpdate({ invalidateRundownIds: [protectString(id)] }), + changed: (id) => triggerUpdate({ invalidateRundownIds: [protectString(id)] }), + removed: (id) => triggerUpdate({ invalidateRundownIds: [protectString(id)] }), + }), + ] + + return () => { + obs1.dispose() + + for (const query of innerQueries) { + query.stop() + } } } - }) + ) // Set up observers: return [rundownsObserver] diff --git a/meteor/server/publications/system.ts b/meteor/server/publications/system.ts index 306014f446..77968b2d99 100644 --- a/meteor/server/publications/system.ts +++ b/meteor/server/publications/system.ts @@ -27,7 +27,6 @@ meteorPublish(MeteorPubSub.coreSystem, async function (_token: string | undefine }) meteorPublish(MeteorPubSub.notificationsForRundown, async function (studioId: StudioId, rundownId: RundownId) { - // HACK: This should do real auth triggerWriteAccessBecauseNoCheckNecessary() check(studioId, String) @@ -43,7 +42,6 @@ meteorPublish(MeteorPubSub.notificationsForRundown, async function (studioId: St meteorPublish( MeteorPubSub.notificationsForRundownPlaylist, async function (studioId: StudioId, playlistId: RundownPlaylistId) { - // HACK: This should do real auth triggerWriteAccessBecauseNoCheckNecessary() check(studioId, String) diff --git a/packages/blueprints-integration/src/api/showStyle.ts b/packages/blueprints-integration/src/api/showStyle.ts index 51bee4fe10..2cd09b99e2 100644 --- a/packages/blueprints-integration/src/api/showStyle.ts +++ b/packages/blueprints-integration/src/api/showStyle.ts @@ -35,6 +35,8 @@ import type { IBlueprintSegment, IBlueprintPiece, IBlueprintPart, + IBlueprintRundownPiece, + IBlueprintRundownPieceDB, } from '../documents/index.js' import type { IBlueprintShowStyleVariant, IOutputLayer, ISourceLayer } from '../showStyle.js' import type { TSR, OnGenerateTimelineObj, TimelineObjectCoreExt } from '../timeline.js' @@ -271,6 +273,7 @@ export interface BlueprintResultRundown { rundown: IBlueprintRundown globalAdLibPieces: IBlueprintAdLibPiece[] globalActions: IBlueprintActionManifest[] + globalPieces: IBlueprintRundownPiece[] baseline: BlueprintResultBaseline } export interface BlueprintResultSegment { @@ -297,6 +300,11 @@ export interface BlueprintSyncIngestNewData { actions: IBlueprintActionManifest[] /** A list of adlibs that have pieceInstances in the partInstance in question */ referencedAdlibs: IBlueprintAdLibPieceDB[] + /** + * The list of pieces which belong to the Rundown, and may be active + * Note: Some of these may have played and been stopped before the current PartInstance + */ + rundownPieces: IBlueprintRundownPieceDB[] } // TODO: add something like this later? diff --git a/packages/blueprints-integration/src/documents/index.ts b/packages/blueprints-integration/src/documents/index.ts index d635e43be4..895a42ebae 100644 --- a/packages/blueprints-integration/src/documents/index.ts +++ b/packages/blueprints-integration/src/documents/index.ts @@ -7,5 +7,6 @@ export * from './pieceInstance.js' export * from './pieceGeneric.js' export * from './playlistTiming.js' export * from './rundown.js' +export * from './rundownPiece.js' export * from './rundownPlaylist.js' export * from './segment.js' diff --git a/packages/blueprints-integration/src/documents/part.ts b/packages/blueprints-integration/src/documents/part.ts index a45a5f8310..09e88c44e6 100644 --- a/packages/blueprints-integration/src/documents/part.ts +++ b/packages/blueprints-integration/src/documents/part.ts @@ -1,6 +1,7 @@ import { UserEditingDefinition, UserEditingProperties } from '../userEditing.js' import type { NoteSeverity } from '../lib.js' import type { ITranslatableMessage } from '../translations.js' +import type { IngestPartNotifyItemReady } from '@sofie-automation/shared-lib/dist/ingest/rundownStatus' /** Timings for the inTransition, when supported and allowed */ export interface IBlueprintPartInTransition { @@ -58,9 +59,18 @@ export interface IBlueprintMutatablePart * it will trigger a user edit operation of type DefaultUserOperationEditProperties */ userEditProperties?: UserEditingProperties + /** * Whether to stop this piece before the 'keepalive' period of the part */ diff --git a/packages/blueprints-integration/src/documents/rundownPiece.ts b/packages/blueprints-integration/src/documents/rundownPiece.ts new file mode 100644 index 0000000000..f84a786f15 --- /dev/null +++ b/packages/blueprints-integration/src/documents/rundownPiece.ts @@ -0,0 +1,29 @@ +import { IBlueprintPieceGeneric } from './pieceGeneric.js' + +/** + * A variant of a Piece, that is owned by the Rundown. + * This + */ +export interface IBlueprintRundownPiece + extends Omit, 'lifespan'> { + /** When the piece should be active on the timeline. */ + enable: { + start: number + duration?: number + + // For now, these pieces are always absolute (using wall time) rather than relative to the rundown + isAbsolute: true + } + + /** Whether the piece is a real piece, or exists as a marker to stop an infinite piece. If virtual, it does not add any contents to the timeline */ + virtual?: boolean + + /** Whether the piece affects the output of the Studio or is describing an invisible state within the Studio */ + notInVision?: boolean +} + +/** The Rundown piece sent from Core */ +export interface IBlueprintRundownPieceDB + extends IBlueprintRundownPiece { + _id: string +} diff --git a/packages/blueprints-integration/src/ingest.ts b/packages/blueprints-integration/src/ingest.ts index 5a96109936..dd97b3c361 100644 --- a/packages/blueprints-integration/src/ingest.ts +++ b/packages/blueprints-integration/src/ingest.ts @@ -130,6 +130,7 @@ export enum DefaultUserOperationsTypes { REVERT_RUNDOWN = '__sofie-revert-rundown', UPDATE_PROPS = '__sofie-update-props', IMPORT_MOS_ITEM = '__sofie-import-mos', + RETIME_PIECE = '__sofie-retime-piece', } export interface DefaultUserOperationRevertRundown { @@ -161,12 +162,24 @@ export type DefaultUserOperationImportMOSItem = { payload: any } +export type DefaultUserOperationRetimePiece = { + id: DefaultUserOperationsTypes.RETIME_PIECE + payload: { + segmentExternalId: string + partExternalId: string + + inPoint: number + // note - at some point this could also include an updated duration + } +} + export type DefaultUserOperations = | DefaultUserOperationRevertRundown | DefaultUserOperationRevertSegment | DefaultUserOperationRevertPart | DefaultUserOperationEditProperties | DefaultUserOperationImportMOSItem + | DefaultUserOperationRetimePiece export interface UserOperationChange { /** Indicate that this change is from user operations */ diff --git a/packages/blueprints-integration/src/previews.ts b/packages/blueprints-integration/src/previews.ts index 038461a359..fbfb31ac65 100644 --- a/packages/blueprints-integration/src/previews.ts +++ b/packages/blueprints-integration/src/previews.ts @@ -1,4 +1,4 @@ -import { SplitsContentBoxContent, SplitsContentBoxProperties } from './content.js' +import { SourceLayerType, SplitsContentBoxContent, SplitsContentBoxProperties } from './content.js' import { NoteSeverity } from './lib.js' import { ITranslatableMessage } from './translations.js' @@ -6,6 +6,10 @@ export interface PopupPreview

{ name?: string preview?: P warnings?: InvalidPreview[] + /** + * Add custom content preview content + */ + additionalPreviewContent?: Array } export type Previews = TablePreview | ScriptPreview | HTMLPreview | SplitPreview | VTPreview | BlueprintImagePreview @@ -19,6 +23,55 @@ export enum PreviewType { BlueprintImage = 'blueprintImage', } +// The PreviewContent types are a partly replica of the types in PreviewPopUpContext.tsx +export type PreviewContent = + | { + type: 'iframe' + href: string + postMessage?: any + dimensions?: { width: number; height: number } + } + | { + type: 'image' + src: string + } + | { + type: 'video' + src: string + } + | { + type: 'script' + script?: string + firstWords?: string + lastWords?: string + comment?: string + lastModified?: number + } + | { + type: 'title' + content: string + } + | { + type: 'inOutWords' + in?: string + out: string + } + | { + type: 'layerInfo' + layerType: SourceLayerType + text: Array + inTime?: number | string + outTime?: number | string + duration?: number | string + } + | { + type: 'separationLine' + } + | { + type: 'data' + content: { key: string; value: string }[] + } + interface PreviewBase { type: PreviewType } diff --git a/packages/blueprints-integration/src/triggers.ts b/packages/blueprints-integration/src/triggers.ts index 22691a309b..bcaf6279fb 100644 --- a/packages/blueprints-integration/src/triggers.ts +++ b/packages/blueprints-integration/src/triggers.ts @@ -272,6 +272,12 @@ export interface IShelfAction extends ITriggeredActionBase { filterChain: IGUIContextFilterLink[] } +export interface IEditModeAction extends ITriggeredActionBase { + action: ClientActions.editMode + state: true | false | 'toggle' + filterChain: IGUIContextFilterLink[] +} + export interface IGoToOnAirLineAction extends ITriggeredActionBase { action: ClientActions.goToOnAirLine filterChain: IGUIContextFilterLink[] @@ -325,6 +331,7 @@ export type SomeAction = | IRundownPlaylistResetAction | IRundownPlaylistResyncAction | IShelfAction + | IEditModeAction | IGoToOnAirLineAction | IRewindSegmentsAction | IShowEntireCurrentSegmentAction diff --git a/packages/corelib/src/dataModel/ExpectedPackages.ts b/packages/corelib/src/dataModel/ExpectedPackages.ts index 1597ed1d45..2e91000143 100644 --- a/packages/corelib/src/dataModel/ExpectedPackages.ts +++ b/packages/corelib/src/dataModel/ExpectedPackages.ts @@ -32,6 +32,7 @@ export type ExpectedPackageFromRundownBaseline = | ExpectedPackageDBFromBaselineAdLibAction | ExpectedPackageDBFromBaselineAdLibPiece | ExpectedPackageDBFromRundownBaselineObjects + | ExpectedPackageDBFromBaselinePiece export type ExpectedPackageDBFromBucket = ExpectedPackageDBFromBucketAdLib | ExpectedPackageDBFromBucketAdLibAction @@ -47,6 +48,7 @@ export enum ExpectedPackageDBType { ADLIB_ACTION = 'adlib_action', BASELINE_ADLIB_PIECE = 'baseline_adlib_piece', BASELINE_ADLIB_ACTION = 'baseline_adlib_action', + BASELINE_PIECE = 'baseline_piece', BUCKET_ADLIB = 'bucket_adlib', BUCKET_ADLIB_ACTION = 'bucket_adlib_action', RUNDOWN_BASELINE_OBJECTS = 'rundown_baseline_objects', @@ -79,6 +81,13 @@ export interface ExpectedPackageDBFromPiece extends ExpectedPackageDBBase { /** The rundown of the Piece this package belongs to */ rundownId: RundownId } +export interface ExpectedPackageDBFromBaselinePiece extends ExpectedPackageDBBase { + fromPieceType: ExpectedPackageDBType.BASELINE_PIECE + /** The Piece this package belongs to */ + pieceId: PieceId + /** The rundown of the Piece this package belongs to */ + rundownId: RundownId +} export interface ExpectedPackageDBFromBaselineAdLibPiece extends ExpectedPackageDBBase { fromPieceType: ExpectedPackageDBType.BASELINE_ADLIB_PIECE diff --git a/packages/corelib/src/dataModel/Piece.ts b/packages/corelib/src/dataModel/Piece.ts index 6afc05519f..def89318e9 100644 --- a/packages/corelib/src/dataModel/Piece.ts +++ b/packages/corelib/src/dataModel/Piece.ts @@ -53,6 +53,15 @@ export interface PieceGeneric extends Omit { export interface Piece extends PieceGeneric, Omit { + /** Timeline enabler. When the piece should be active on the timeline. */ + enable: { + start: number | 'now' // TODO - now will be removed from this eventually, but as it is not an acceptable value 99% of the time, that is not really breaking + duration?: number + + // Pieces owned by the Rundown should always be absolute + isAbsolute?: boolean + } + /** * This is the id of the rundown this piece starts playing in. * Currently this is the only rundown the piece could be playing in @@ -62,12 +71,12 @@ export interface Piece * This is the id of the segment this piece starts playing in. * It is the only segment the piece could be playing in, unless the piece has a lifespan which spans beyond the segment */ - startSegmentId: SegmentId + startSegmentId: SegmentId | null /** * This is the id of the part this piece starts playing in. * If the lifespan is WithinPart, it is the only part the piece could be playing in. */ - startPartId: PartId + startPartId: PartId | null /** Whether this piece is a special piece */ pieceType: IBlueprintPieceType diff --git a/packages/corelib/src/dataModel/PieceInstance.ts b/packages/corelib/src/dataModel/PieceInstance.ts index c21d3716ca..b7f2e3f67a 100644 --- a/packages/corelib/src/dataModel/PieceInstance.ts +++ b/packages/corelib/src/dataModel/PieceInstance.ts @@ -34,7 +34,7 @@ export interface PieceInstance { _id: PieceInstanceId /** The rundown this piece belongs to */ rundownId: RundownId - /** The part instace this piece belongs to */ + /** The part instance this piece belongs to. */ partInstanceId: PartInstanceId /** Whether this PieceInstance is a temprorary wrapping of a Piece */ diff --git a/packages/corelib/src/dataModel/Rundown.ts b/packages/corelib/src/dataModel/Rundown.ts index 61b1159eb9..a3f7a3a38a 100644 --- a/packages/corelib/src/dataModel/Rundown.ts +++ b/packages/corelib/src/dataModel/Rundown.ts @@ -57,9 +57,6 @@ export interface Rundown { */ orphaned?: RundownOrphanedReason - /** Last sent storyStatus to ingestDevice (MOS) */ - notifiedCurrentPlayingPartExternalId?: string - /** Holds notes (warnings / errors) thrown by the blueprints during creation */ notes?: Array diff --git a/packages/corelib/src/dataModel/UserEditingDefinitions.ts b/packages/corelib/src/dataModel/UserEditingDefinitions.ts index c9ddfcf0dd..fd53660fb6 100644 --- a/packages/corelib/src/dataModel/UserEditingDefinitions.ts +++ b/packages/corelib/src/dataModel/UserEditingDefinitions.ts @@ -47,6 +47,12 @@ export interface CoreUserEditingDefinitionForm { translationNamespaces: string[] } +export interface CoreUserEditingDefinitionSofie { + type: UserEditingType.SOFIE + /** Id of this operation */ + id: DefaultUserOperationsTypes +} + export interface CoreUserEditingProperties { /** * These properties are dependent on the (primary) piece type, the user will get the option diff --git a/packages/corelib/src/playout/__tests__/processAndPrune.test.ts b/packages/corelib/src/playout/__tests__/processAndPrune.test.ts index 582223f75c..9ad5cdf4c0 100644 --- a/packages/corelib/src/playout/__tests__/processAndPrune.test.ts +++ b/packages/corelib/src/playout/__tests__/processAndPrune.test.ts @@ -5,6 +5,8 @@ import { PieceInstance, PieceInstancePiece, ResolvedPieceInstance } from '../../ import { literal } from '../../lib.js' import { protectString } from '../../protectedString.js' import { + createPartCurrentTimes, + PartCurrentTimes, PieceInstanceWithTimings, processAndPrunePieceInstanceTimings, resolvePrunedPieceInstance, @@ -44,7 +46,7 @@ describe('processAndPrunePieceInstanceTimings', () => { }) } - function runAndTidyResult(pieceInstances: PieceInstance[], nowInPart: number, includeVirtual?: boolean) { + function runAndTidyResult(pieceInstances: PieceInstance[], partTimes: PartCurrentTimes, includeVirtual?: boolean) { const resolvedInstances = processAndPrunePieceInstanceTimings( { one: { @@ -61,7 +63,7 @@ describe('processAndPrunePieceInstanceTimings', () => { }, }, pieceInstances, - nowInPart, + partTimes, undefined, includeVirtual ) @@ -79,7 +81,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('two', { start: 1000 }, 'two', PieceLifespan.OutOnRundownEnd), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'one', @@ -101,7 +103,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('two', { start: 1000, duration: 5000 }, 'one', PieceLifespan.OutOnRundownEnd), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'one', @@ -127,7 +129,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('five', { start: 4000 }, 'one', PieceLifespan.OutOnShowStyleEnd), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'zero', @@ -177,7 +179,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('zero', { start: 6000 }, 'one', PieceLifespan.OutOnShowStyleEnd, true), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'zero', @@ -209,7 +211,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('five', { start: 6000 }, 'one', PieceLifespan.OutOnShowStyleEnd, true), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500, true) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0), true) expect(resolvedInstances).toEqual([ { _id: 'zero', @@ -259,7 +261,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('five', { start: 6000 }, 'one', PieceLifespan.OutOnShowStyleEnd), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'zero', @@ -305,7 +307,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('two', { start: 1000 }, 'one', PieceLifespan.OutOnSegmentEnd, 5500), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'one', @@ -323,7 +325,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('four', { start: 1000 }, 'one', PieceLifespan.OutOnRundownChange, 4000), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'three', @@ -339,7 +341,7 @@ describe('processAndPrunePieceInstanceTimings', () => { createPieceInstance('two', { start: 1000 }, 'one', PieceLifespan.OutOnShowStyleEnd, 5500), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'one', @@ -366,7 +368,7 @@ describe('processAndPrunePieceInstanceTimings', () => { }), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'one', @@ -399,7 +401,7 @@ describe('processAndPrunePieceInstanceTimings', () => { }), ] - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { _id: 'two', @@ -427,7 +429,7 @@ describe('processAndPrunePieceInstanceTimings', () => { pieceInstances[1].piece.virtual = true - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) // don't expect virtual Pieces in the results, but 'one' should be pruned too expect(resolvedInstances).toEqual([]) @@ -457,7 +459,7 @@ describe('processAndPrunePieceInstanceTimings', () => { pieceInstances[0].piece.prerollDuration = 200 pieceInstances[1].piece.prerollDuration = 200 - const resolvedInstances = runAndTidyResult(pieceInstances, 500) + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(500, 0)) expect(resolvedInstances).toEqual([ { @@ -468,6 +470,100 @@ describe('processAndPrunePieceInstanceTimings', () => { }, ]) }) + + describe('absolute timed (rundown owned) pieces', () => { + test('simple collision', () => { + const now = 9000 + const partStart = 8000 + + const pieceInstances = [ + createPieceInstance('one', { start: 0 }, 'one', PieceLifespan.OutOnRundownChange), + createPieceInstance( + 'two', + { start: now + 2000, isAbsolute: true }, + 'one', + PieceLifespan.OutOnRundownChange + ), + createPieceInstance('three', { start: 6000 }, 'one', PieceLifespan.OutOnRundownChange), + ] + + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(now, partStart)) + expect(resolvedInstances).toEqual([ + { + _id: 'one', + priority: 5, + start: 0, + end: 3000, + }, + { + _id: 'two', + priority: 5, + start: partStart + 3000, + end: partStart + 6000, + }, + { + _id: 'three', + priority: 5, + start: 6000, + end: undefined, + }, + ]) + }) + + test('collision with same start time', () => { + const now = 9000 + const partStart = 8000 + + const pieceInstances = [ + createPieceInstance('one', { start: 0 }, 'one', PieceLifespan.OutOnRundownChange), + createPieceInstance( + 'two', + { start: partStart + 2000, isAbsolute: true }, + 'one', + PieceLifespan.OutOnRundownChange + ), + createPieceInstance('three', { start: 2000 }, 'one', PieceLifespan.OutOnRundownChange), + ] + + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(now, partStart)) + expect(resolvedInstances).toEqual([ + { + _id: 'one', + priority: 5, + start: 0, + end: 2000, + }, + { + _id: 'two', + priority: 5, + start: partStart + 2000, + end: undefined, + }, + ]) + + { + // check stability + pieceInstances[1].piece.enable = { start: 2000 } + pieceInstances[2].piece.enable = { start: partStart + 2000, isAbsolute: true } + + const resolvedInstances = runAndTidyResult(pieceInstances, createPartCurrentTimes(now, partStart)) + expect(resolvedInstances).toEqual([ + { + _id: 'one', + priority: 5, + start: 0, + end: 2000, + }, + { + _id: 'three', + priority: 5, + start: partStart + 2000, + end: undefined, + }, + ]) + } + }) + }) }) describe('resolvePrunedPieceInstances', () => { @@ -503,10 +599,10 @@ describe('resolvePrunedPieceInstances', () => { } test('numeric start, no duration', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 2000 }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, resolvedStart: 2000, @@ -515,10 +611,10 @@ describe('resolvePrunedPieceInstances', () => { }) test('numeric start, with planned duration', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 2000, duration: 3400 }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, resolvedStart: 2000, @@ -527,127 +623,127 @@ describe('resolvePrunedPieceInstances', () => { }) test('now start, no duration', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now' }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, + resolvedStart: partTimes.nowInPart, resolvedDuration: undefined, } satisfies ResolvedPieceInstance) }) test('now start, with planned duration', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now', duration: 3400 }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, + resolvedStart: partTimes.nowInPart, resolvedDuration: 3400, } satisfies ResolvedPieceInstance) }) test('now start, with end cap', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now' }, 5000) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, - resolvedDuration: 5000 - nowInPart, + resolvedStart: partTimes.nowInPart, + resolvedDuration: 5000 - partTimes.nowInPart, } satisfies ResolvedPieceInstance) }) test('now start, with end cap and longer planned duration', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now', duration: 6000 }, 5000) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, - resolvedDuration: 5000 - nowInPart, + resolvedStart: partTimes.nowInPart, + resolvedDuration: 5000 - partTimes.nowInPart, } satisfies ResolvedPieceInstance) }) test('now start, with end cap and shorter planned duration', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now', duration: 3000 }, 5000) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, + resolvedStart: partTimes.nowInPart, resolvedDuration: 3000, } satisfies ResolvedPieceInstance) }) test('now start, with userDuration.endRelativeToPart', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now' }, undefined, { endRelativeToPart: 4000, }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, - resolvedDuration: 4000 - nowInPart, + resolvedStart: partTimes.nowInPart, + resolvedDuration: 4000 - partTimes.nowInPart, } satisfies ResolvedPieceInstance) }) test('numeric start, with userDuration.endRelativeToNow', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 500 }, undefined, { endRelativeToNow: 4000, }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, resolvedStart: 500, - resolvedDuration: 4000 - 500 + nowInPart, + resolvedDuration: 4000 - 500 + partTimes.nowInPart, } satisfies ResolvedPieceInstance) }) test('now start, with userDuration.endRelativeToNow', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now' }, undefined, { endRelativeToNow: 4000, }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, + resolvedStart: partTimes.nowInPart, resolvedDuration: 4000, } satisfies ResolvedPieceInstance) }) test('now start, with end cap, planned duration and userDuration.endRelativeToPart', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now', duration: 3000 }, 5000, { endRelativeToPart: 2800 }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, - resolvedDuration: 2800 - nowInPart, + resolvedStart: partTimes.nowInPart, + resolvedDuration: 2800 - partTimes.nowInPart, } satisfies ResolvedPieceInstance) }) test('now start, with end cap, planned duration and userDuration.endRelativeToNow', async () => { - const nowInPart = 123 + const partTimes = createPartCurrentTimes(123, 0) const piece = createPieceInstance({ start: 'now', duration: 3000 }, 5000, { endRelativeToNow: 2800 }) - expect(resolvePrunedPieceInstance(nowInPart, clone(piece))).toStrictEqual({ + expect(resolvePrunedPieceInstance(partTimes, clone(piece))).toStrictEqual({ instance: clone(piece), timelinePriority: piece.priority, - resolvedStart: nowInPart, + resolvedStart: partTimes.nowInPart, resolvedDuration: 2800, } satisfies ResolvedPieceInstance) }) diff --git a/packages/corelib/src/playout/infinites.ts b/packages/corelib/src/playout/infinites.ts index ccda649691..9d136974d4 100644 --- a/packages/corelib/src/playout/infinites.ts +++ b/packages/corelib/src/playout/infinites.ts @@ -206,6 +206,7 @@ export function getPlayheadTrackingInfinitesForPart( case PieceLifespan.OutOnSegmentEnd: isValid = currentPartInstance.segmentId === intoPart.segmentId && + !!candidatePiece.piece.startPartId && partsToReceiveOnSegmentEndFromSet.has(candidatePiece.piece.startPartId) break case PieceLifespan.OutOnRundownEnd: @@ -243,13 +244,16 @@ export function getPlayheadTrackingInfinitesForPart( markPieceInstanceAsContinuation(p, instance) if (p.infinite) { - // This was copied from before, so we know we can force the time to 0 - instance.piece = { - ...instance.piece, - enable: { - start: 0, - }, + if (!instance.piece.enable.isAbsolute) { + // This was copied from before, so we know we can force the time to 0 + instance.piece = { + ...instance.piece, + enable: { + start: 0, + }, + } } + instance.infinite = { ...p.infinite, infiniteInstanceIndex: p.infinite.infiniteInstanceIndex + 1, @@ -299,11 +303,16 @@ export function isPiecePotentiallyActiveInPart( return false case PieceLifespan.OutOnSegmentEnd: return ( + !!pieceToCheck.startPartId && pieceToCheck.startSegmentId === part.segmentId && partsToReceiveOnSegmentEndFrom.has(pieceToCheck.startPartId) ) case PieceLifespan.OutOnRundownEnd: - if (pieceToCheck.startRundownId === part.rundownId) { + if ( + pieceToCheck.startRundownId === part.rundownId && + pieceToCheck.startPartId && + pieceToCheck.startSegmentId + ) { if (pieceToCheck.startSegmentId === part.segmentId) { return partsToReceiveOnSegmentEndFrom.has(pieceToCheck.startPartId) } else { @@ -320,6 +329,7 @@ export function isPiecePotentiallyActiveInPart( } else { // Predicting what will happen at arbitrary point in the future return ( + !!pieceToCheck.startPartId && pieceToCheck.startSegmentId === part.segmentId && partsToReceiveOnSegmentEndFrom.has(pieceToCheck.startPartId) ) @@ -332,6 +342,7 @@ export function isPiecePotentiallyActiveInPart( } else { // Predicting what will happen at arbitrary point in the future return ( + !!pieceToCheck.startSegmentId && pieceToCheck.startRundownId === part.rundownId && segmentsToReceiveOnRundownEndFrom.has(pieceToCheck.startSegmentId) ) @@ -395,8 +406,8 @@ export function getPieceInstancesForPart( if (pieceA.startPartId === pieceB.startPartId) { return pieceA.enable.start < pieceB.enable.start } - const pieceAIndex = orderedPartIds.indexOf(pieceA.startPartId) - const pieceBIndex = orderedPartIds.indexOf(pieceB.startPartId) + const pieceAIndex = pieceA.startPartId === null ? -2 : orderedPartIds.indexOf(pieceA.startPartId) + const pieceBIndex = pieceB.startPartId === null ? -2 : orderedPartIds.indexOf(pieceB.startPartId) if (pieceAIndex === -1) { return false @@ -535,6 +546,16 @@ export function isCandidateMoreImportant( best: ReadonlyDeep, candidate: ReadonlyDeep ): boolean | undefined { + // If one is absolute timed, prefer that + if (best.piece.enable.isAbsolute && !candidate.piece.enable.isAbsolute) { + // Prefer the absolute best + return false + } + if (!best.piece.enable.isAbsolute && candidate.piece.enable.isAbsolute) { + // Prefer the absolute candidate + return true + } + // Prioritise the one from this part over previous part if (best.infinite?.fromPreviousPart && !candidate.infinite?.fromPreviousPart) { // Prefer the candidate as it is not from previous diff --git a/packages/corelib/src/playout/processAndPrune.ts b/packages/corelib/src/playout/processAndPrune.ts index 5fac61e44b..239c8a96ae 100644 --- a/packages/corelib/src/playout/processAndPrune.ts +++ b/packages/corelib/src/playout/processAndPrune.ts @@ -10,12 +10,49 @@ import { ReadonlyDeep } from 'type-fest' /** * Get the `enable: { start: ?? }` for the new piece in terms that can be used as an `end` for another object */ -function getPieceStartTimeAsReference(newPieceStart: number | 'now'): number | RelativeResolvedEndCap { - return typeof newPieceStart === 'number' ? newPieceStart : { offsetFromNow: 0 } +function getPieceStartTimeAsReference( + newPieceStart: number | 'now', + partTimes: PartCurrentTimes, + pieceToAffect: ReadonlyDeep +): number | RelativeResolvedEndCap { + if (typeof newPieceStart !== 'number') return { offsetFromNow: 0 } + + if (pieceToAffect.piece.enable.isAbsolute) { + // If the piece is absolute timed, then the end needs to be adjusted to be absolute + if (pieceToAffect.piece.enable.start === 'now') { + return { offsetFromNow: newPieceStart } + } else { + // Translate to an absolute timestamp + return partTimes.currentTime - partTimes.nowInPart + newPieceStart + } + } + + return newPieceStart } -function getPieceStartTimeWithinPart(p: ReadonlyDeep): 'now' | number { - return p.piece.enable.start +function getPieceStartTimeWithinPart(p: ReadonlyDeep, partTimes: PartCurrentTimes): 'now' | number { + const pieceEnable = p.piece.enable + if (pieceEnable.isAbsolute) { + // Note: these can't be adlibbed, so we don't need to consider adding the preroll + + if (pieceEnable.start === 'now') { + // Should never happen, but just in case + return pieceEnable.start + } else { + // Translate this to the part + return pieceEnable.start - partTimes.currentTime + partTimes.nowInPart + } + } + + // If the piece is dynamically inserted, then its preroll should be factored into its start time, but not for any infinite continuations + const isStartOfAdlib = + !!p.dynamicallyInserted && !(p.infinite?.fromPreviousPart || p.infinite?.fromPreviousPlayhead) + + if (isStartOfAdlib && pieceEnable.start !== 'now') { + return pieceEnable.start + (p.piece.prerollDuration ?? 0) + } else { + return pieceEnable.start + } } function isClear(piece?: ReadonlyDeep): boolean { @@ -51,24 +88,43 @@ export interface PieceInstanceWithTimings extends ReadonlyDeep { * This is a maximum end point of the pieceInstance. * If the pieceInstance also has a enable.duration or userDuration set then the shortest one will need to be used * This can be: - * - 'now', if it was stopped by something that does not need a preroll (or is virtual) - * - '#something.start + 100', if it was stopped by something that needs a preroll - * - '100', if not relative to now at all + * - '100', if relative to the start of the part + * - { offsetFromNow: 100 }, if stopped by an absolute time */ resolvedEndCap?: number | RelativeResolvedEndCap priority: number } +export interface PartCurrentTimes { + /** The current time when this was sampled */ + readonly currentTime: number + /** The time the part started playback, if it has begun */ + readonly partStartTime: number | null + /** An approximate current time within the part */ + readonly nowInPart: number +} + +export function createPartCurrentTimes( + currentTime: number, + partStartTime: number | undefined | null +): PartCurrentTimes { + return { + currentTime, + partStartTime: partStartTime ?? null, + nowInPart: typeof partStartTime === 'number' ? currentTime - partStartTime : 0, + } +} + /** * Process the infinite pieces to determine the start time and a maximum end time for each. * Any pieces which have no chance of being shown (duplicate start times) are pruned * The stacking order of infinites is considered, to define the stop times - * Note: `nowInPart` is only needed to order the PieceInstances. The result of this can be cached until that order changes + * Note: `nowInPart` is only needed to order the PieceInstances. The result of this can be cached until that order changes. */ export function processAndPrunePieceInstanceTimings( sourceLayers: SourceLayers, pieces: ReadonlyDeep, - nowInPart: number, + partTimes: PartCurrentTimes, keepDisabledPieces?: boolean, includeVirtual?: boolean ): PieceInstanceWithTimings[] { @@ -82,7 +138,7 @@ export function processAndPrunePieceInstanceTimings( } } - const groupedPieces = groupByToMapFunc( + const piecesGroupedByExclusiveGroupOrLayer = groupByToMapFunc( keepDisabledPieces ? pieces : pieces.filter((p) => !p.disabled), // At this stage, if a Piece is disabled, the `keepDisabledPieces` must be turned on. If that's the case // we split out the disabled Pieces onto the sourceLayerId they actually exist on, instead of putting them @@ -91,13 +147,16 @@ export function processAndPrunePieceInstanceTimings( (p) => p.disabled ? p.piece.sourceLayerId : exclusiveGroupMap.get(p.piece.sourceLayerId) || p.piece.sourceLayerId ) - for (const pieces of groupedPieces.values()) { - // Group and sort the pieces so that we can step through each point in time + for (const piecesInExclusiveGroupOrLayer of piecesGroupedByExclusiveGroupOrLayer.values()) { + // Group and sort the pieces so that we can step through each point in time in order + const piecesByStartMap = groupByToMapFunc(piecesInExclusiveGroupOrLayer, (p) => + getPieceStartTimeWithinPart(p, partTimes) + ) const piecesByStart: Array<[number | 'now', ReadonlyDeep]> = _.sortBy( - Array.from(groupByToMapFunc(pieces, (p) => getPieceStartTimeWithinPart(p)).entries()).map(([k, v]) => + Array.from(piecesByStartMap.entries()).map(([k, v]) => literal<[number | 'now', ReadonlyDeep]>([k === 'now' ? 'now' : Number(k), v]) ), - ([k]) => (k === 'now' ? nowInPart : k) + ([k]) => (k === 'now' ? partTimes.nowInPart : k) ) // Step through time @@ -107,10 +166,34 @@ export function processAndPrunePieceInstanceTimings( // Apply the updates // Note: order is important, the higher layers must be done first - updateWithNewPieces(results, activePieces, newPieces, newPiecesStart, includeVirtual, 'other') - updateWithNewPieces(results, activePieces, newPieces, newPiecesStart, includeVirtual, 'onSegmentEnd') - updateWithNewPieces(results, activePieces, newPieces, newPiecesStart, includeVirtual, 'onRundownEnd') - updateWithNewPieces(results, activePieces, newPieces, newPiecesStart, includeVirtual, 'onShowStyleEnd') + updateWithNewPieces(results, partTimes, activePieces, newPieces, newPiecesStart, includeVirtual, 'other') + updateWithNewPieces( + results, + partTimes, + activePieces, + newPieces, + newPiecesStart, + includeVirtual, + 'onSegmentEnd' + ) + updateWithNewPieces( + results, + partTimes, + activePieces, + newPieces, + newPiecesStart, + includeVirtual, + 'onRundownEnd' + ) + updateWithNewPieces( + results, + partTimes, + activePieces, + newPieces, + newPiecesStart, + includeVirtual, + 'onShowStyleEnd' + ) } } @@ -119,6 +202,7 @@ export function processAndPrunePieceInstanceTimings( } function updateWithNewPieces( results: PieceInstanceWithTimings[], + partTimes: PartCurrentTimes, activePieces: PieceInstanceOnInfiniteLayers, newPieces: PieceInstanceOnInfiniteLayers, newPiecesStart: number | 'now', @@ -129,7 +213,7 @@ function updateWithNewPieces( if (newPiece) { const activePiece = activePieces[key] if (activePiece) { - activePiece.resolvedEndCap = getPieceStartTimeAsReference(newPiecesStart) + activePiece.resolvedEndCap = getPieceStartTimeAsReference(newPiecesStart, partTimes, activePiece) } // track the new piece activePieces[key] = newPiece @@ -154,7 +238,11 @@ function updateWithNewPieces( (newPiecesStart !== 0 || isCandidateBetterToBeContinued(activePieces.other, newPiece)) ) { // These modes should stop the 'other' when they start if not hidden behind a higher priority onEnd - activePieces.other.resolvedEndCap = getPieceStartTimeAsReference(newPiecesStart) + activePieces.other.resolvedEndCap = getPieceStartTimeAsReference( + newPiecesStart, + partTimes, + activePieces.other + ) activePieces.other = undefined } } @@ -221,21 +309,25 @@ function findPieceInstancesOnInfiniteLayers(pieces: ReadonlyDeep return` @@ -29,7 +22,6 @@ export interface NotifyCurrentlyPlayingPartProps { export type EventsJobFunc = { [EventsJobs.PartInstanceTimings]: (data: PartInstanceTimingsProps) => void [EventsJobs.RundownDataChanged]: (data: RundownDataChangedProps) => void - [EventsJobs.NotifyCurrentlyPlayingPart]: (data: NotifyCurrentlyPlayingPartProps) => void } export function getEventsQueueName(id: StudioId): string { diff --git a/packages/job-worker/src/__mocks__/context.ts b/packages/job-worker/src/__mocks__/context.ts index 0a96d7aa02..d11c3c5431 100644 --- a/packages/job-worker/src/__mocks__/context.ts +++ b/packages/job-worker/src/__mocks__/context.ts @@ -356,6 +356,7 @@ const MockShowStyleBlueprint: () => ShowStyleBlueprintManifest = () => ({ rundown, globalAdLibPieces: [], globalActions: [], + globalPieces: [], baseline: { timelineObjects: [] }, } }, diff --git a/packages/job-worker/src/blueprints/__tests__/context-events.test.ts b/packages/job-worker/src/blueprints/__tests__/context-events.test.ts index ba5e7ac60b..67324f3e72 100644 --- a/packages/job-worker/src/blueprints/__tests__/context-events.test.ts +++ b/packages/job-worker/src/blueprints/__tests__/context-events.test.ts @@ -273,6 +273,7 @@ describe('Test blueprint api context', () => { rundownId, })) as PieceInstance expect(pieceInstance).toBeTruthy() + expect(pieceInstance.partInstanceId).toBe(partInstance._id) // Check what was generated const context = await getContext(rundown, undefined, partInstance, undefined) diff --git a/packages/job-worker/src/blueprints/context/lib.ts b/packages/job-worker/src/blueprints/context/lib.ts index b1bd5dea2b..c56281b91e 100644 --- a/packages/job-worker/src/blueprints/context/lib.ts +++ b/packages/job-worker/src/blueprints/context/lib.ts @@ -2,7 +2,11 @@ import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibActio import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' -import { deserializePieceTimelineObjectsBlob, PieceGeneric } from '@sofie-automation/corelib/dist/dataModel/Piece' +import { + deserializePieceTimelineObjectsBlob, + Piece, + PieceGeneric, +} from '@sofie-automation/corelib/dist/dataModel/Piece' import { PieceInstance, PieceInstancePiece, @@ -39,6 +43,7 @@ import { IBlueprintPieceInstance, IBlueprintResolvedPieceInstance, IBlueprintRundownDB, + IBlueprintRundownPieceDB, IBlueprintRundownPlaylist, IBlueprintSegmentDB, IBlueprintSegmentRundown, @@ -64,6 +69,7 @@ import { } from '@sofie-automation/blueprints-integration/dist/userEditing' import type { PlayoutMutatablePart } from '../../playout/model/PlayoutPartInstanceModel.js' import { BlueprintQuickLookInfo } from '@sofie-automation/blueprints-integration/dist/context/quickLoopInfo' +import { IngestPartNotifyItemReady } from '@sofie-automation/shared-lib/dist/ingest/rundownStatus' /** * Convert an object to have all the values of all keys (including optionals) be 'true' @@ -119,6 +125,9 @@ export const PlayoutMutatablePartSampleKeys = allKeysOfObject return obj } +/** + * Convert a Rundown owned Piece into IBlueprintAdLibPieceDB, for passing into the blueprints + * Note: This does not check whether has the correct ownership + * @param piece the Piece to convert + * @returns a cloned complete and clean IBlueprintRundownPieceDB + */ +export function convertRundownPieceToBlueprints(piece: ReadonlyDeep): IBlueprintRundownPieceDB { + const obj: Complete = { + ...convertPieceGenericToBlueprintsInner(piece), + _id: unprotectString(piece._id), + enable: { + ...piece.enable, + start: piece.enable.start === 'now' ? 0 : piece.enable.start, + isAbsolute: true, + }, + virtual: piece.virtual, + notInVision: piece.notInVision, + } + return obj +} + /** * Convert a DBPart into IBlueprintPartDB, for passing into the blueprints * @param part the Part to convert @@ -280,6 +310,9 @@ export function convertPartToBlueprints(part: ReadonlyDeep): IBlueprintP expectedDuration: part.expectedDuration, holdMode: part.holdMode, shouldNotifyCurrentPlayingPart: part.shouldNotifyCurrentPlayingPart, + ingestNotifyPartExternalId: part.ingestNotifyPartExternalId, + ingestNotifyPartReady: part.ingestNotifyPartReady, + ingestNotifyItemsReady: clone(part.ingestNotifyItemsReady), classes: clone(part.classes), classesForNext: clone(part.classesForNext), displayDurationGroup: part.displayDurationGroup, @@ -518,27 +551,27 @@ function translateUserEditsToBlueprint( userEdits.map((userEdit) => { switch (userEdit.type) { case UserEditingType.ACTION: - return { + return literal({ type: UserEditingType.ACTION, id: userEdit.id, label: omit(userEdit.label, 'namespaces'), icon: userEdit.icon, iconInactive: userEdit.iconInactive, isActive: userEdit.isActive, - } satisfies Complete + }) case UserEditingType.FORM: - return { + return literal({ type: UserEditingType.FORM, id: userEdit.id, label: omit(userEdit.label, 'namespaces'), schema: clone(userEdit.schema), currentValues: clone(userEdit.currentValues), - } satisfies Complete + }) case UserEditingType.SOFIE: - return { + return literal({ type: UserEditingType.SOFIE, id: userEdit.id, - } satisfies Complete + }) default: assertNever(userEdit) return undefined @@ -580,28 +613,28 @@ export function translateUserEditsFromBlueprint( userEdits.map((userEdit) => { switch (userEdit.type) { case UserEditingType.ACTION: - return { + return literal({ type: UserEditingType.ACTION, id: userEdit.id, label: wrapTranslatableMessageFromBlueprints(userEdit.label, blueprintIds), icon: userEdit.icon, iconInactive: userEdit.iconInactive, isActive: userEdit.isActive, - } satisfies Complete + }) case UserEditingType.FORM: - return { + return literal({ type: UserEditingType.FORM, id: userEdit.id, label: wrapTranslatableMessageFromBlueprints(userEdit.label, blueprintIds), schema: clone(userEdit.schema), currentValues: clone(userEdit.currentValues), translationNamespaces: unprotectStringArray(blueprintIds), - } satisfies Complete + }) case UserEditingType.SOFIE: - return { + return literal({ type: UserEditingType.SOFIE, id: userEdit.id, - } satisfies Complete + }) default: assertNever(userEdit) return undefined @@ -666,6 +699,7 @@ export function convertPartialBlueprintMutablePartToCore( return playoutUpdatePart } + export function createBlueprintQuickLoopInfo(playlist: ReadonlyDeep): BlueprintQuickLookInfo | null { const playlistLoopProps = playlist.quickLoop if (!playlistLoopProps) return null diff --git a/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts b/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts index 2e183391ae..f18e2e3a0c 100644 --- a/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts +++ b/packages/job-worker/src/blueprints/context/services/PartAndPieceInstanceActionService.ts @@ -255,6 +255,9 @@ export class PartAndPieceInstanceActionService { }) if (!pieceDB) throw new Error(`Cannot find Piece ${piece._id}`) + if (!pieceDB.startPartId || !pieceDB.startSegmentId) + throw new Error(`Piece ${piece._id} does not belong to a part`) + const rundown = this._playoutModel.getRundown(pieceDB.startRundownId) const segment = rundown?.getSegment(pieceDB.startSegmentId) const part = segment?.getPart(pieceDB.startPartId) @@ -534,6 +537,7 @@ export async function applyActionSideEffects( await syncPlayheadInfinitesForNextPartInstance( context, playoutModel, + undefined, playoutModel.currentPartInstance, playoutModel.nextPartInstance ) diff --git a/packages/job-worker/src/blueprints/context/services/__tests__/PartAndPieceInstanceActionService.test.ts b/packages/job-worker/src/blueprints/context/services/__tests__/PartAndPieceInstanceActionService.test.ts index ebee49813b..823082a7bb 100644 --- a/packages/job-worker/src/blueprints/context/services/__tests__/PartAndPieceInstanceActionService.test.ts +++ b/packages/job-worker/src/blueprints/context/services/__tests__/PartAndPieceInstanceActionService.test.ts @@ -58,6 +58,7 @@ import { postProcessPieces, postProcessTimelineObjects } from '../../../postProc import { ActionPartChange, PartAndPieceInstanceActionService } from '../PartAndPieceInstanceActionService.js' import { mock } from 'jest-mock-extended' import { QuickLoopService } from '../../../../playout/model/services/QuickLoopService.js' +import { SelectedPartInstance } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' const { postProcessPieces: postProcessPiecesOrig, postProcessTimelineObjects: postProcessTimelineObjectsOrig } = jest.requireActual('../../../postProcess') @@ -238,7 +239,9 @@ describe('Test blueprint api context', () => { nextPartInstance: PlayoutPartInstanceModel | DBPartInstance | PieceInstance | undefined | null, previousPartInstance?: PlayoutPartInstanceModel | DBPartInstance | PieceInstance | null ) { - const convertInfo = (info: PlayoutPartInstanceModel | DBPartInstance | PieceInstance | null) => { + const convertInfo = ( + info: PlayoutPartInstanceModel | DBPartInstance | PieceInstance | null + ): SelectedPartInstance | null => { if (!info) { return null } else if ('partInstanceId' in info) { diff --git a/packages/job-worker/src/blueprints/postProcess.ts b/packages/job-worker/src/blueprints/postProcess.ts index 5ec47e2f4f..5c17bb1a3c 100644 --- a/packages/job-worker/src/blueprints/postProcess.ts +++ b/packages/job-worker/src/blueprints/postProcess.ts @@ -13,6 +13,7 @@ import { PieceLifespan, IBlueprintPieceType, ITranslatableMessage, + IBlueprintRundownPiece, } from '@sofie-automation/blueprints-integration' import { BlueprintId, @@ -358,6 +359,85 @@ export function postProcessAdLibActions( }) } +/** + * Process and validate some IBlueprintRundownPiece into Piece + * @param context Context from the job queue + * @param pieces IBlueprintPiece to process + * @param blueprintId Id of the Blueprint the Pieces are from + * @param rundownId Id of the Rundown the Pieces belong to + * @param setInvalid If true all Pieces will be marked as `invalid`, this should be set to match the owning Part + */ +export function postProcessGlobalPieces( + context: JobContext, + pieces: Array, + blueprintId: BlueprintId, + rundownId: RundownId, + setInvalid?: boolean +): Piece[] { + const span = context.startSpan('blueprints.postProcess.postProcessPieces') + + const uniqueIds = new Map() + const timelineUniqueIds = new Set() + + const processedPieces = pieces.map((orgPiece: IBlueprintRundownPiece) => { + if (!orgPiece.externalId) + throw new Error( + `Error in blueprint "${blueprintId}" externalId not set for rundown piece ("${orgPiece.name}")` + ) + + const docId = getIdHash( + 'Piece', + uniqueIds, + `${rundownId}_${blueprintId}_rundown_piece_${orgPiece.sourceLayerId}_${orgPiece.externalId}` + ) + + const piece: Piece = { + ...orgPiece, + content: omit(orgPiece.content, 'timelineObjects'), + + pieceType: IBlueprintPieceType.Normal, + lifespan: PieceLifespan.OutOnRundownChange, + + _id: protectString(docId), + startRundownId: rundownId, + startSegmentId: null, + startPartId: null, + invalid: setInvalid ?? false, + timelineObjectsString: EmptyPieceTimelineObjectsBlob, + } + + if (piece.pieceType !== IBlueprintPieceType.Normal) { + // transition pieces must not be infinite, lets enforce that + piece.lifespan = PieceLifespan.WithinPart + } + if (piece.extendOnHold) { + // HOLD pieces must not be infinite, as they become that when being held + piece.lifespan = PieceLifespan.WithinPart + } + + if (piece.enable.start === 'now') + throw new Error( + `Error in blueprint "${blueprintId}" rundown piece cannot have a start of 'now'! ("${piece.name}")` + ) + + const timelineObjects = postProcessTimelineObjects( + piece._id, + blueprintId, + orgPiece.content.timelineObjects, + timelineUniqueIds + ) + piece.timelineObjectsString = serializePieceTimelineObjectsBlob(timelineObjects) + + // Fill in ids of unnamed expectedPackages + setDefaultIdOnExpectedPackages(piece.expectedPackages) + + return piece + }) + + span?.end() + return processedPieces +} + /** * Process and validate TSRTimelineObj for the StudioBaseline into TimelineObjRundown * @param blueprintId Id of the Blueprint the TSRTimelineObj are from diff --git a/packages/job-worker/src/events/handle.ts b/packages/job-worker/src/events/handle.ts index 7103a6e800..10476bfe61 100644 --- a/packages/job-worker/src/events/handle.ts +++ b/packages/job-worker/src/events/handle.ts @@ -1,8 +1,4 @@ -import { - NotifyCurrentlyPlayingPartProps, - PartInstanceTimingsProps, - RundownDataChangedProps, -} from '@sofie-automation/corelib/dist/worker/events' +import { PartInstanceTimingsProps, RundownDataChangedProps } from '@sofie-automation/corelib/dist/worker/events' import { getCurrentTime } from '../lib/index.js' import { JobContext } from '../jobs/index.js' import { logger } from '../logging.js' @@ -17,16 +13,7 @@ import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyE import { ExternalMessageQueueObj } from '@sofie-automation/corelib/dist/dataModel/ExternalMessageQueue' import { ICollection, MongoModifier } from '../db/index.js' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' -import { ExternalMessageQueueObjId, PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { runWithRundownLock } from '../ingest/lock.js' -import { - PeripheralDevice, - PeripheralDeviceCategory, - PeripheralDeviceType, -} from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' -import { MOS } from '@sofie-automation/corelib' -import { executePeripheralDeviceFunction } from '../peripheralDevice.js' -import { DEFAULT_MOS_TIMEOUT_TIME } from '@sofie-automation/shared-lib/dist/core/constants' +import { ExternalMessageQueueObjId } from '@sofie-automation/corelib/dist/dataModel/Ids' async function getBlueprintAndDependencies(context: JobContext, rundown: ReadonlyDeep) { const pShowStyle = context.getShowStyleCompound(rundown.showStyleVariantId, rundown.showStyleBaseId) @@ -226,120 +213,3 @@ export async function handleRundownDataHasChanged(context: JobContext, data: Run logger.error(`Error in showStyleBlueprint.onRundownDataChangedEvent: ${stringifyError(err)}`) } } - -export async function handleNotifyCurrentlyPlayingPart( - context: JobContext, - data: NotifyCurrentlyPlayingPartProps -): Promise { - const rundown = await context.directCollections.Rundowns.findOne(data.rundownId) - if (!rundown) { - logger.warn(`Rundown "${data.rundownId} is missing. Skipping notifyCurrentPlayingPart`) - return - } - - if (rundown.source.type !== 'nrcs') { - logger.warn(`Rundown "${rundown._id} has no peripheralDevice. Skipping notifyCurrentPlayingPart`) - return - } - - const device = await context.directCollections.PeripheralDevices.findOne({ - _id: rundown.source.peripheralDeviceId, - // Future: we really should be constraining this to the studio, but that is often only defined on the parent of this device - // studioId: context.studioId, - parentDeviceId: { $exists: true }, - }) - if (!device || !device.parentDeviceId) { - logger.warn( - `PeripheralDevice "${rundown.source.peripheralDeviceId}" for Rundown "${rundown._id} not found. Skipping notifyCurrentPlayingPart` - ) - return - } - const parentDevice = await context.directCollections.PeripheralDevices.findOne({ - _id: device.parentDeviceId, - 'studioAndConfigId.studioId': context.studioId, - parentDeviceId: { $exists: false }, - }) - if (!parentDevice) { - logger.warn( - `PeripheralDevice "${rundown.source.peripheralDeviceId}" for Rundown "${rundown._id} not found. Skipping notifyCurrentPlayingPart` - ) - return - } - - const previousPlayingPartExternalId: string | null = rundown.notifiedCurrentPlayingPartExternalId || null - const currentPlayingPartExternalId: string | null = data.isRehearsal ? null : data.partExternalId - - // Lock the rundown so that we are allowed to write to it - // This is technically a bit of a race condition, but is really low risk and low impact if it does - await runWithRundownLock(context, rundown._id, async (rundown0) => { - if (rundown0) { - if (currentPlayingPartExternalId) { - await context.directCollections.Rundowns.update(rundown._id, { - $set: { - notifiedCurrentPlayingPartExternalId: currentPlayingPartExternalId, - }, - }) - } else { - await context.directCollections.Rundowns.update(rundown._id, { - $unset: { - notifiedCurrentPlayingPartExternalId: 1, - }, - }) - } - } - }) - - // TODO: refactor this to be non-mos centric - if (device.category === PeripheralDeviceCategory.INGEST && device.type === PeripheralDeviceType.MOS) { - // Note: rundown may not be up to date anymore - await notifyCurrentPlayingPartMOS( - context, - device, - rundown.externalId, - previousPlayingPartExternalId, - currentPlayingPartExternalId - ) - } -} - -async function notifyCurrentPlayingPartMOS( - context: JobContext, - peripheralDevice: PeripheralDevice, - rundownExternalId: string, - oldPlayingPartExternalId: string | null, - newPlayingPartExternalId: string | null -): Promise { - if (oldPlayingPartExternalId !== newPlayingPartExternalId) { - // New implementation 2022 only sends PLAY, never stop, after getting advice from AP - // Reason 1: NRK ENPS "sendt tid" (elapsed time) stopped working in ENPS 8/9 when doing STOP prior to PLAY - // Reason 2: there's a delay between the STOP (yellow line disappears) and PLAY (yellow line re-appears), which annoys the users - if (newPlayingPartExternalId) { - try { - await setStoryStatusMOS( - context, - peripheralDevice._id, - rundownExternalId, - newPlayingPartExternalId, - MOS.IMOSObjectStatus.PLAY - ) - } catch (error) { - logger.error(`Error in setStoryStatus PLAY: ${stringifyError(error)}`) - } - } - } -} - -async function setStoryStatusMOS( - context: JobContext, - deviceId: PeripheralDeviceId, - rundownExternalId: string, - storyId: string, - status: MOS.IMOSObjectStatus -): Promise { - logger.debug('setStoryStatus', { deviceId, externalId: rundownExternalId, storyId, status }) - return executePeripheralDeviceFunction(context, deviceId, DEFAULT_MOS_TIMEOUT_TIME + 1000, 'setStoryStatus', [ - rundownExternalId, - storyId, - status, - ]) -} diff --git a/packages/job-worker/src/ingest/__tests__/syncChangesToPartInstance.test.ts b/packages/job-worker/src/ingest/__tests__/syncChangesToPartInstance.test.ts index 498802d0d0..f372c3830b 100644 --- a/packages/job-worker/src/ingest/__tests__/syncChangesToPartInstance.test.ts +++ b/packages/job-worker/src/ingest/__tests__/syncChangesToPartInstance.test.ts @@ -104,6 +104,7 @@ describe('SyncChangesToPartInstancesWorker', () => { return mock( { findPart: jest.fn(() => undefined), + getGlobalPieces: jest.fn(() => []), }, mockOptions ) @@ -338,6 +339,7 @@ describe('SyncChangesToPartInstancesWorker', () => { return mock( { findPart: jest.fn(() => undefined), + getGlobalPieces: jest.fn(() => []), }, mockOptions ) diff --git a/packages/job-worker/src/ingest/expectedPackages.ts b/packages/job-worker/src/ingest/expectedPackages.ts index 164e602f61..b49d9e993e 100644 --- a/packages/job-worker/src/ingest/expectedPackages.ts +++ b/packages/job-worker/src/ingest/expectedPackages.ts @@ -145,6 +145,21 @@ export async function updateExpectedPackagesForRundownBaseline( preserveTypesDuringSave.add(ExpectedPackageDBType.RUNDOWN_BASELINE_OBJECTS) } + // Add expected packages for global pieces + for (const piece of ingestModel.getGlobalPieces()) { + if (piece.expectedPackages) { + const bases = generateExpectedPackageBases(context.studio, piece._id, piece.expectedPackages) + for (const base of bases) { + expectedPackages.push({ + ...base, + rundownId: ingestModel.rundownId, + pieceId: piece._id, + fromPieceType: ExpectedPackageDBType.BASELINE_PIECE, + }) + } + } + } + // Preserve anything existing for (const expectedPackage of ingestModel.expectedPackagesForRundownBaseline) { if (preserveTypesDuringSave.has(expectedPackage.fromPieceType)) { diff --git a/packages/job-worker/src/ingest/expectedPlayoutItems.ts b/packages/job-worker/src/ingest/expectedPlayoutItems.ts index 6451f00b65..2e436e5b81 100644 --- a/packages/job-worker/src/ingest/expectedPlayoutItems.ts +++ b/packages/job-worker/src/ingest/expectedPlayoutItems.ts @@ -62,6 +62,9 @@ export async function updateExpectedPlayoutItemsForRundownBaseline( for (const action of baselineAdlibActions) { baselineExpectedPlayoutItems.push(...extractExpectedPlayoutItems(studioId, rundownId, undefined, action)) } + for (const piece of ingestModel.getGlobalPieces()) { + baselineExpectedPlayoutItems.push(...extractExpectedPlayoutItems(studioId, rundownId, undefined, piece)) + } if (baseline) { for (const item of baseline.expectedPlayoutItems ?? []) { @@ -93,7 +96,7 @@ export function updateExpectedPlayoutItemsForPartModel(context: JobContext, part const expectedPlayoutItems: ExpectedPlayoutItemRundown[] = [] for (const piece of part.pieces) { expectedPlayoutItems.push( - ...extractExpectedPlayoutItems(studioId, part.part.rundownId, piece.startPartId, piece) + ...extractExpectedPlayoutItems(studioId, part.part.rundownId, piece.startPartId ?? undefined, piece) ) } for (const piece of part.adLibPieces) { diff --git a/packages/job-worker/src/ingest/generationRundown.ts b/packages/job-worker/src/ingest/generationRundown.ts index fc5641faf2..3f1bf465dc 100644 --- a/packages/job-worker/src/ingest/generationRundown.ts +++ b/packages/job-worker/src/ingest/generationRundown.ts @@ -11,6 +11,7 @@ import { WatchedPackagesHelper } from '../blueprints/context/watchedPackages.js' import { postProcessAdLibPieces, postProcessGlobalAdLibActions, + postProcessGlobalPieces, postProcessRundownBaselineItems, } from '../blueprints/postProcess.js' import { logger } from '../logging.js' @@ -160,8 +161,6 @@ export async function updateRundownFromIngestDataInner( return null } - // TODO - store notes from rundownNotesContext - let regenerateAllContents = true if (generateMode == GenerateRundownMode.MetadataChange) { regenerateAllContents = @@ -295,6 +294,7 @@ export async function regenerateRundownAndBaselineFromIngestData( logger.info(`... got ${rundownRes.baseline.timelineObjects.length} objects from baseline.`) logger.info(`... got ${rundownRes.globalAdLibPieces.length} adLib objects from baseline.`) logger.info(`... got ${(rundownRes.globalActions || []).length} adLib actions from baseline.`) + logger.info(`... got ${(rundownRes.globalPieces || []).length} global pieces from baseline.`) const timelineObjectsBlob = serializePieceTimelineObjectsBlob( postProcessRundownBaselineItems(showStyle.base.blueprintId, rundownRes.baseline.timelineObjects) @@ -312,8 +312,14 @@ export async function regenerateRundownAndBaselineFromIngestData( dbRundown._id, rundownRes.globalActions || [] ) + const globalPieces = postProcessGlobalPieces( + context, + rundownRes.globalPieces || [], + showStyle.base.blueprintId, + dbRundown._id + ) - await ingestModel.setRundownBaseline(timelineObjectsBlob, adlibPieces, adlibActions) + await ingestModel.setRundownBaseline(timelineObjectsBlob, adlibPieces, adlibActions, globalPieces) await updateExpectedPackagesForRundownBaseline(context, ingestModel, rundownRes.baseline) diff --git a/packages/job-worker/src/ingest/model/IngestModel.ts b/packages/job-worker/src/ingest/model/IngestModel.ts index 4919912cf6..942095330d 100644 --- a/packages/job-worker/src/ingest/model/IngestModel.ts +++ b/packages/job-worker/src/ingest/model/IngestModel.ts @@ -1,6 +1,7 @@ import { ExpectedPackageDBFromBaselineAdLibAction, ExpectedPackageDBFromBaselineAdLibPiece, + ExpectedPackageDBFromBaselinePiece, ExpectedPackageDBFromRundownBaselineObjects, ExpectedPackageFromRundown, } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' @@ -36,6 +37,7 @@ export type ExpectedPackageForIngestModelBaseline = | ExpectedPackageDBFromBaselineAdLibAction | ExpectedPackageDBFromBaselineAdLibPiece | ExpectedPackageDBFromRundownBaselineObjects + | ExpectedPackageDBFromBaselinePiece export type ExpectedPackageForIngestModel = ExpectedPackageFromRundown | ExpectedPackageForIngestModelBaseline export interface IngestModelReadonly { @@ -124,6 +126,11 @@ export interface IngestModelReadonly { */ getAllPieces(): ReadonlyDeep[] + /** + * Get the Pieces which belong to the Rundown, not a Part + */ + getGlobalPieces(): ReadonlyDeep[] + /** * Search for a Part through the whole Rundown * @param id Id of the Part @@ -233,11 +240,13 @@ export interface IngestModel extends IngestModelReadonly, BaseModel, INotificati * @param timelineObjectsBlob Rundown baseline timeline objects * @param adlibPieces Rundown adlib pieces * @param adlibActions Rundown adlib actions + * @param pieces Rundown owned pieces */ setRundownBaseline( timelineObjectsBlob: PieceTimelineObjectsBlob, adlibPieces: RundownBaselineAdLibItem[], - adlibActions: RundownBaselineAdLibAction[] + adlibActions: RundownBaselineAdLibAction[], + pieces: Piece[] ): Promise /** diff --git a/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts b/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts index 59b3681802..5ed8687273 100644 --- a/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts +++ b/packages/job-worker/src/ingest/model/implementation/IngestModelImpl.ts @@ -114,6 +114,8 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { } protected readonly segmentsImpl: Map + readonly #piecesWithChanges = new Set() + #piecesImpl: ReadonlyArray readonly #rundownBaselineExpectedPackagesStore: ExpectedPackagesStore @@ -216,6 +218,8 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { }) } + this.#piecesImpl = groupedPieces.get(null) ?? [] + this.#rundownBaselineObjs = new LazyInitialise(async () => context.directCollections.RundownBaselineObjects.findFetch({ rundownId: this.rundownId, @@ -244,6 +248,7 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { ) this.segmentsImpl = new Map() + this.#piecesImpl = [] this.#rundownBaselineObjs = new LazyInitialise(async () => []) this.#rundownBaselineAdLibPieces = new LazyInitialise(async () => []) @@ -325,6 +330,10 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { return this.getAllOrderedParts().flatMap((part) => part.pieces) } + getGlobalPieces(): ReadonlyDeep[] { + return [...this.#piecesImpl] + } + findPart(partId: PartId): IngestPartModel | undefined { for (const segment of this.segmentsImpl.values()) { if (!segment || segment.deleted) continue @@ -450,7 +459,6 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { // owned by elsewhere airStatus: this.#rundownImpl?.airStatus, status: this.#rundownImpl?.status, - notifiedCurrentPlayingPartExternalId: this.#rundownImpl?.notifiedCurrentPlayingPartExternalId, }) deleteAllUndefinedProperties(newRundown) @@ -465,7 +473,8 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { async setRundownBaseline( timelineObjectsBlob: PieceTimelineObjectsBlob, adlibPieces: RundownBaselineAdLibItem[], - adlibActions: RundownBaselineAdLibAction[] + adlibActions: RundownBaselineAdLibAction[], + pieces: Piece[] ): Promise { const [loadedRundownBaselineObjs, loadedRundownBaselineAdLibPieces, loadedRundownBaselineAdLibActions] = await Promise.all([ @@ -487,11 +496,13 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { ) // Compare and update the adlibPieces - const newAdlibPieces = adlibPieces.map((piece) => ({ - ...clone(piece), - partId: undefined, - rundownId: this.rundownId, - })) + const newAdlibPieces = adlibPieces.map( + (piece): AdLibPiece => ({ + ...clone(piece), + partId: undefined, + rundownId: this.rundownId, + }) + ) this.#rundownBaselineAdLibPieces.setValue( diffAndReturnLatestObjects( this.#rundownBaselineAdLibPiecesWithChanges, @@ -501,11 +512,13 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { ) // Compare and update the adlibActions - const newAdlibActions = adlibActions.map((action) => ({ - ...clone(action), - partId: undefined, - rundownId: this.rundownId, - })) + const newAdlibActions = adlibActions.map( + (action): RundownBaselineAdLibAction => ({ + ...clone(action), + partId: undefined, + rundownId: this.rundownId, + }) + ) this.#rundownBaselineAdLibActions.setValue( diffAndReturnLatestObjects( this.#rundownBaselineAdLibActionsWithChanges, @@ -513,6 +526,17 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { newAdlibActions ) ) + + // Compare and update the rundown pieces + const newPieces = pieces.map( + (piece): Piece => ({ + ...clone(piece), + startRundownId: this.rundownId, + startPartId: null, + startSegmentId: null, + }) + ) + this.#piecesImpl = diffAndReturnLatestObjects(this.#piecesWithChanges, this.#piecesImpl, newPieces) } setRundownOrphaned(orphaned: RundownOrphanedReason | undefined): void { @@ -616,9 +640,26 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { for (const segment of this.segmentsImpl.values()) { if (segment.deleted) { logOrThrowError(new Error(`Failed no changes in model assertion, Segment has been changed`)) + break } else { const err = segment.segmentModel.checkNoChanges() - if (err) logOrThrowError(err) + if (err) { + logOrThrowError(err) + break + } + } + } + + if (this.#piecesWithChanges.size) { + logOrThrowError(new Error(`Failed no changes in model assertion, Rundown Pieces have been changed`)) + } else { + for (const piece of this.#piecesImpl.values()) { + if (!piece) { + logOrThrowError( + new Error(`Failed no changes in model assertion, Rundown Pieces have been changed`) + ) + break + } } } } finally { @@ -676,6 +717,8 @@ export class IngestModelImpl implements IngestModel, DatabasePersistedModel { saveHelper.addExpectedPackagesStore(this.#rundownBaselineExpectedPackagesStore) this.#rundownBaselineExpectedPackagesStore.clearChangedFlags() + saveHelper.addChangedPieces(this.#piecesImpl, this.#piecesWithChanges) + await Promise.all([ this.#rundownHasChanged && this.#rundownImpl ? this.context.directCollections.Rundowns.replace(this.#rundownImpl) diff --git a/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts b/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts index b3d8a391cc..296b4b2b30 100644 --- a/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts +++ b/packages/job-worker/src/ingest/model/implementation/SaveIngestModel.ts @@ -2,7 +2,7 @@ import { AdLibAction } from '@sofie-automation/corelib/dist/dataModel/AdlibActio import { AdLibPiece } from '@sofie-automation/corelib/dist/dataModel/AdLibPiece' import { ExpectedPackageDB } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItem } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' -import { RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PieceId, RundownId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' @@ -53,6 +53,19 @@ export class SaveIngestModelHelper { } } + addChangedPieces(pieces: ReadonlyArray, changedPieceIds: Set): void { + for (const piece of pieces) { + this.#pieces.addDocument(piece, changedPieceIds.has(piece._id)) + } + + const currentPieceIds = new Set(pieces.map((p) => p._id)) + for (const changedPieceId of changedPieceIds) { + if (!currentPieceIds.has(changedPieceId)) { + this.#pieces.deleteDocument(changedPieceId) + } + } + } + commit(context: JobContext): Array> { // Log deleted ids: const deletedIds: { [key: string]: ProtectedString[] } = { diff --git a/packages/job-worker/src/ingest/syncChangesToPartInstance.ts b/packages/job-worker/src/ingest/syncChangesToPartInstance.ts index 9e87733c1b..32324cd12d 100644 --- a/packages/job-worker/src/ingest/syncChangesToPartInstance.ts +++ b/packages/job-worker/src/ingest/syncChangesToPartInstance.ts @@ -24,6 +24,7 @@ import { convertPartInstanceToBlueprints, convertPartToBlueprints, convertPieceInstanceToBlueprints, + convertRundownPieceToBlueprints, } from '../blueprints/context/lib.js' import { validateAdlibTestingPartInstanceProperties } from '../playout/adlibTesting.js' import { ReadonlyDeep } from 'type-fest' @@ -50,7 +51,7 @@ export interface PartInstanceToSync { * This defers out to the Blueprints to do the syncing * @param context Context of the job being run * @param playoutModel Playout model containing containing the Rundown being ingested - * @param ingestModel Ingest model for the Rundown + * @param ingestModel Ingest model for the Rundown. This is being written to mongodb while this method runs */ export async function syncChangesToPartInstances( context: JobContext, @@ -181,6 +182,7 @@ export class SyncChangesToPartInstancesWorker { await syncPlayheadInfinitesForNextPartInstance( this.#context, this.#playoutModel, + this.#ingestModel, this.#playoutModel.currentPartInstance, this.#playoutModel.nextPartInstance ) @@ -210,6 +212,7 @@ export class SyncChangesToPartInstancesWorker { actions: instanceToSync.newPart && ingestPart ? ingestPart.adLibActions.map(convertAdLibActionToBlueprints) : [], referencedAdlibs: referencedAdlibs, + rundownPieces: this.#ingestModel.getGlobalPieces().map(convertRundownPieceToBlueprints), } } diff --git a/packages/job-worker/src/jobs/studio.ts b/packages/job-worker/src/jobs/studio.ts index 16368fec1a..5a257fc7d1 100644 --- a/packages/job-worker/src/jobs/studio.ts +++ b/packages/job-worker/src/jobs/studio.ts @@ -67,6 +67,6 @@ export function convertStudioToJobStudio(studio: DBStudio): JobStudio { routeSetExclusivityGroups: studio.routeSetExclusivityGroupsWithOverrides ? applyAndValidateOverrides(studio.routeSetExclusivityGroupsWithOverrides).obj : (studio as any).routeSetExclusivityGroups || {}, - // packageContainers: studio.packageContainersWithOverrides ? applyAndValidateOverrides(studio.packageContainersWithOverrides).obj : (studio as any).packageContainers || {}, + // packageContainers: applyAndValidateOverrides(studio.packageContainersWithOverrides).obj, } } diff --git a/packages/job-worker/src/playout/__tests__/playout.test.ts b/packages/job-worker/src/playout/__tests__/playout.test.ts index 369f445397..912b118cc2 100644 --- a/packages/job-worker/src/playout/__tests__/playout.test.ts +++ b/packages/job-worker/src/playout/__tests__/playout.test.ts @@ -608,7 +608,7 @@ describe('Playout API', () => { : now) + Math.random() * TIME_RANDOM, }, - } + } satisfies PlayoutChangedResult }), ], }) @@ -701,7 +701,7 @@ describe('Playout API', () => { : now) + Math.random() * TIME_RANDOM, }, - } + } satisfies PlayoutChangedResult }), ], }) diff --git a/packages/job-worker/src/playout/__tests__/resolvedPieces.test.ts b/packages/job-worker/src/playout/__tests__/resolvedPieces.test.ts index 42da0af9e9..76898419ed 100644 --- a/packages/job-worker/src/playout/__tests__/resolvedPieces.test.ts +++ b/packages/job-worker/src/playout/__tests__/resolvedPieces.test.ts @@ -13,6 +13,8 @@ import { } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { EmptyPieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import { + createPartCurrentTimes, + PartCurrentTimes, processAndPrunePieceInstanceTimings, resolvePrunedPieceInstance, } from '@sofie-automation/corelib/dist/playout/processAndPrune' @@ -93,8 +95,9 @@ describe('Resolved Pieces', () => { nowInPart: number | null, pieceInstances: PieceInstance[] ): ResolvedPieceInstance[] { - const preprocessedPieces = processAndPrunePieceInstanceTimings(sourceLayers, pieceInstances, nowInPart ?? 0) - return preprocessedPieces.map((instance) => resolvePrunedPieceInstance(nowInPart ?? 0, instance)) + const partTimes = createPartCurrentTimes(5000, nowInPart) + const preprocessedPieces = processAndPrunePieceInstanceTimings(sourceLayers, pieceInstances, partTimes) + return preprocessedPieces.map((instance) => resolvePrunedPieceInstance(partTimes, instance)) } test('simple single piece', async () => { @@ -398,18 +401,18 @@ describe('Resolved Pieces', () => { } function createPartInstanceInfo( - partStarted: number, - nowInPart: number, + partTimes: PartCurrentTimes, + // partStarted: number, + // nowInPart: number, partInstance: DBPartInstance, currentPieces: PieceInstance[] ): SelectedPartInstanceTimelineInfo { - const pieceInstances = processAndPrunePieceInstanceTimings(sourceLayers, currentPieces, nowInPart) + const pieceInstances = processAndPrunePieceInstanceTimings(sourceLayers, currentPieces, partTimes) return { partInstance, pieceInstances, - nowInPart, - partStarted, + partTimes, // Approximate `calculatedTimings`, for the partInstances which already have it cached calculatedTimings: getPartTimingsOrDefaults(partInstance, pieceInstances), regenerateTimelineAt: undefined, @@ -421,9 +424,10 @@ describe('Resolved Pieces', () => { expect(sourceLayerId).toBeTruthy() const now = 990000 + const partTimes = createPartCurrentTimes(now, now) const piece001 = createPieceInstance(sourceLayerId, { start: 0 }) - const currentPartInfo = createPartInstanceInfo(now, 0, createPartInstance(), [piece001]) + const currentPartInfo = createPartInstanceInfo(partTimes, createPartInstance(), [piece001]) const resolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, @@ -456,13 +460,9 @@ describe('Resolved Pieces', () => { ) const now = 990000 - const nowInPart = 2000 - const partStarted = now - nowInPart + const partTimes = createPartCurrentTimes(now, now - 2000) - const currentPartInfo = createPartInstanceInfo(partStarted, nowInPart, createPartInstance(), [ - piece001, - virtualPiece, - ]) + const currentPartInfo = createPartInstanceInfo(partTimes, createPartInstance(), [piece001, virtualPiece]) // Check the result const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( @@ -473,8 +473,8 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: partStarted, - resolvedDuration: nowInPart, + resolvedStart: partTimes.partStartTime!, + resolvedDuration: partTimes.nowInPart, }, { // TODO - this object should not be present? @@ -501,13 +501,9 @@ describe('Resolved Pieces', () => { ) const now = 990000 - const nowInPart = 2000 - const partStarted = now - nowInPart + const partTimes = createPartCurrentTimes(now, now - 2000) - const currentPartInfo = createPartInstanceInfo(partStarted, nowInPart, createPartInstance(), [ - piece001, - virtualPiece, - ]) + const currentPartInfo = createPartInstanceInfo(partTimes, createPartInstance(), [piece001, virtualPiece]) const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, @@ -517,13 +513,13 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: partStarted, + resolvedStart: partTimes.partStartTime!, resolvedDuration: 7000, }, { // TODO - this object should not be present? _id: virtualPiece._id, - resolvedStart: partStarted + 7000, + resolvedStart: partTimes.partStartTime! + 7000, resolvedDuration: undefined, }, ] satisfies StrippedResult) @@ -536,10 +532,9 @@ describe('Resolved Pieces', () => { const piece001 = createPieceInstance(sourceLayerId, { start: 0, duration: 0 }) const now = 990000 - const nowInPart = 2000 - const partStarted = now - nowInPart + const partTimes = createPartCurrentTimes(now, now - 2000) - const currentPartInfo = createPartInstanceInfo(partStarted, nowInPart, createPartInstance(), [piece001]) + const currentPartInfo = createPartInstanceInfo(partTimes, createPartInstance(), [piece001]) const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, @@ -549,7 +544,7 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: partStarted, + resolvedStart: partTimes.partStartTime!, resolvedDuration: 0, }, ] satisfies StrippedResult) @@ -577,10 +572,9 @@ describe('Resolved Pieces', () => { ) const now = 990000 - const nowInPart = 2000 - const partStarted = now - nowInPart + const partTimes = createPartCurrentTimes(now, now - 2000) - const currentPartInfo = createPartInstanceInfo(partStarted, nowInPart, createPartInstance(), [ + const currentPartInfo = createPartInstanceInfo(partTimes, createPartInstance(), [ piece001, infinite1, infinite2, @@ -594,17 +588,17 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: infinite1._id, - resolvedStart: partStarted + 1000, + resolvedStart: partTimes.partStartTime! + 1000, resolvedDuration: 4000, }, { _id: piece001._id, - resolvedStart: partStarted + 3000, + resolvedStart: partTimes.partStartTime! + 3000, resolvedDuration: 2000, }, { _id: infinite2._id, - resolvedStart: partStarted + 5000, + resolvedStart: partTimes.partStartTime! + 5000, resolvedDuration: undefined, }, ] satisfies StrippedResult) @@ -626,10 +620,9 @@ describe('Resolved Pieces', () => { ) const now = 990000 - const nowInPart = 2000 - const partStarted = now - nowInPart + const partTimes = createPartCurrentTimes(now, now - 2000) - const currentPartInfo = createPartInstanceInfo(partStarted, nowInPart, createPartInstance(), [piece001]) + const currentPartInfo = createPartInstanceInfo(partTimes, createPartInstance(), [piece001]) const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, @@ -639,7 +632,7 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: partStarted + 3000, + resolvedStart: partTimes.partStartTime! + 3000, resolvedDuration: 1200, }, ] satisfies StrippedResult) @@ -661,10 +654,9 @@ describe('Resolved Pieces', () => { ) const now = 990000 - const nowInPart = 7000 - const partStarted = now - nowInPart + const partTimes = createPartCurrentTimes(now, now - 7000) - const currentPartInfo = createPartInstanceInfo(partStarted, nowInPart, createPartInstance(), [piece001]) + const currentPartInfo = createPartInstanceInfo(partTimes, createPartInstance(), [piece001]) const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, @@ -674,7 +666,7 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: partStarted + 4000, + resolvedStart: partTimes.partStartTime! + 4000, resolvedDuration: -4000 + 7000 + 1300, }, ] satisfies StrippedResult) @@ -689,20 +681,12 @@ describe('Resolved Pieces', () => { const piece010 = createPieceInstance(sourceLayerId, { start: 0 }) const now = 990000 - const nowInPart = 2000 - const currentPartStarted = now - nowInPart - const previousPartStarted = currentPartStarted - 5000 - - const previousPartInfo = createPartInstanceInfo( - previousPartStarted, - nowInPart + 5000, - createPartInstance(), - [piece001] - ) + const currentPartTimes = createPartCurrentTimes(now, now - 2000) + const previousPartTimes = createPartCurrentTimes(now, now - 7000) - const currentPartInfo = createPartInstanceInfo(currentPartStarted, nowInPart, createPartInstance(), [ - piece010, - ]) + const previousPartInfo = createPartInstanceInfo(previousPartTimes, createPartInstance(), [piece001]) + + const currentPartInfo = createPartInstanceInfo(currentPartTimes, createPartInstance(), [piece010]) const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, @@ -715,12 +699,12 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: previousPartStarted, + resolvedStart: previousPartTimes.partStartTime!, resolvedDuration: 5000, }, { _id: piece010._id, - resolvedStart: currentPartStarted, + resolvedStart: currentPartTimes.partStartTime!, resolvedDuration: undefined, }, ] satisfies StrippedResult) @@ -743,21 +727,16 @@ describe('Resolved Pieces', () => { ) const now = 990000 - const nowInPart = 2000 - const currentPartStarted = now - nowInPart - const previousPartStarted = currentPartStarted - 5000 - - const previousPartInfo = createPartInstanceInfo( - previousPartStarted, - nowInPart + 5000, - createPartInstance(), - [piece001, cappedInfinitePiece] - ) + const currentPartTimes = createPartCurrentTimes(now, now - 2000) + const previousPartTimes = createPartCurrentTimes(now, now - 7000) - const currentPartInfo = createPartInstanceInfo(currentPartStarted, nowInPart, createPartInstance(), [ - piece010, + const previousPartInfo = createPartInstanceInfo(previousPartTimes, createPartInstance(), [ + piece001, + cappedInfinitePiece, ]) + const currentPartInfo = createPartInstanceInfo(currentPartTimes, createPartInstance(), [piece010]) + const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, { @@ -769,17 +748,17 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: previousPartStarted, + resolvedStart: previousPartTimes.partStartTime!, resolvedDuration: 1000, }, { _id: cappedInfinitePiece._id, - resolvedStart: previousPartStarted + 1000, + resolvedStart: previousPartTimes.partStartTime! + 1000, resolvedDuration: 4000, }, { _id: piece010._id, - resolvedStart: currentPartStarted, + resolvedStart: currentPartTimes.partStartTime!, resolvedDuration: undefined, }, ] satisfies StrippedResult) @@ -820,18 +799,15 @@ describe('Resolved Pieces', () => { } const now = 990000 - const nowInPart = 2000 - const currentPartStarted = now - nowInPart - const previousPartStarted = currentPartStarted - 5000 - - const previousPartInfo = createPartInstanceInfo( - previousPartStarted, - nowInPart + 5000, - createPartInstance(), - [piece001, startingInfinitePiece] - ) + const currentPartTimes = createPartCurrentTimes(now, now - 2000) + const previousPartTimes = createPartCurrentTimes(now, now - 7000) + + const previousPartInfo = createPartInstanceInfo(previousPartTimes, createPartInstance(), [ + piece001, + startingInfinitePiece, + ]) - const currentPartInfo = createPartInstanceInfo(currentPartStarted, nowInPart, createPartInstance(), [ + const currentPartInfo = createPartInstanceInfo(currentPartTimes, createPartInstance(), [ piece010, continuingInfinitePiece, ]) @@ -847,17 +823,17 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: previousPartStarted, + resolvedStart: previousPartTimes.partStartTime!, resolvedDuration: 1000, }, { _id: continuingInfinitePiece._id, - resolvedStart: previousPartStarted + 1000, + resolvedStart: previousPartTimes.partStartTime! + 1000, resolvedDuration: 9400, }, { _id: piece010._id, - resolvedStart: currentPartStarted, + resolvedStart: currentPartTimes.partStartTime!, resolvedDuration: undefined, }, ] satisfies StrippedResult) @@ -872,14 +848,12 @@ describe('Resolved Pieces', () => { const piece010 = createPieceInstance(sourceLayerId, { start: 0 }) const now = 990000 - const nowInPart = 2000 - const currentPartStarted = now - nowInPart + const currentPartTimes = createPartCurrentTimes(now, now - 2000) const currentPartLength = 13000 - const nextPartStart = currentPartStarted + currentPartLength + const nextPartTimes = createPartCurrentTimes(now, currentPartTimes.partStartTime! + currentPartLength) const currentPartInfo = createPartInstanceInfo( - currentPartStarted, - nowInPart, + currentPartTimes, createPartInstance({ autoNext: true, expectedDuration: currentPartLength, @@ -887,7 +861,7 @@ describe('Resolved Pieces', () => { [piece001] ) - const nextPartInfo = createPartInstanceInfo(nextPartStart, 0, createPartInstance(), [piece010]) + const nextPartInfo = createPartInstanceInfo(nextPartTimes, createPartInstance(), [piece010]) const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, @@ -900,12 +874,12 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: currentPartStarted, + resolvedStart: currentPartTimes.partStartTime!, resolvedDuration: currentPartLength, }, { _id: piece010._id, - resolvedStart: nextPartStart, + resolvedStart: nextPartTimes.partStartTime!, resolvedDuration: undefined, }, ] satisfies StrippedResult) @@ -928,14 +902,12 @@ describe('Resolved Pieces', () => { ) const now = 990000 - const nowInPart = 2000 - const currentPartStarted = now - nowInPart + const currentPartTimes = createPartCurrentTimes(now, now - 2000) const currentPartLength = 13000 - const nextPartStart = currentPartStarted + currentPartLength + const nextPartTimes = createPartCurrentTimes(now, currentPartTimes.partStartTime! + currentPartLength) const currentPartInfo = createPartInstanceInfo( - currentPartStarted, - nowInPart, + currentPartTimes, createPartInstance({ autoNext: true, expectedDuration: currentPartLength, @@ -943,7 +915,7 @@ describe('Resolved Pieces', () => { [piece001, cappedInfinitePiece] ) - const nextPartInfo = createPartInstanceInfo(nextPartStart, 0, createPartInstance(), [piece010]) + const nextPartInfo = createPartInstanceInfo(nextPartTimes, createPartInstance(), [piece010]) const simpleResolvedPieces = getResolvedPiecesForPartInstancesOnTimeline( context, @@ -957,17 +929,17 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: currentPartStarted, + resolvedStart: currentPartTimes.partStartTime!, resolvedDuration: 1000, }, { _id: cappedInfinitePiece._id, - resolvedStart: currentPartStarted + 1000, + resolvedStart: currentPartTimes.partStartTime! + 1000, resolvedDuration: currentPartLength - 1000, }, { _id: piece010._id, - resolvedStart: nextPartStart, + resolvedStart: nextPartTimes.partStartTime!, resolvedDuration: undefined, }, ] satisfies StrippedResult) @@ -1008,14 +980,12 @@ describe('Resolved Pieces', () => { } const now = 990000 - const nowInPart = 2000 - const currentPartStarted = now - nowInPart + const currentPartTimes = createPartCurrentTimes(now, now - 2000) const currentPartLength = 13000 - const nextPartStart = currentPartStarted + currentPartLength + const nextPartTimes = createPartCurrentTimes(now, currentPartTimes.partStartTime! + currentPartLength) const currentPartInfo = createPartInstanceInfo( - currentPartStarted, - nowInPart, + currentPartTimes, createPartInstance({ autoNext: true, expectedDuration: currentPartLength, @@ -1023,7 +993,7 @@ describe('Resolved Pieces', () => { [piece001, startingInfinitePiece] ) - const nextPartInfo = createPartInstanceInfo(nextPartStart, 0, createPartInstance(), [ + const nextPartInfo = createPartInstanceInfo(nextPartTimes, createPartInstance(), [ piece010, continuingInfinitePiece, ]) @@ -1040,17 +1010,17 @@ describe('Resolved Pieces', () => { expect(stripResult(simpleResolvedPieces)).toEqual([ { _id: piece001._id, - resolvedStart: currentPartStarted, + resolvedStart: currentPartTimes.partStartTime!, resolvedDuration: 1000, }, { _id: startingInfinitePiece._id, - resolvedStart: currentPartStarted + 1000, + resolvedStart: currentPartTimes.partStartTime! + 1000, resolvedDuration: currentPartLength - 1000 + 3400, }, { _id: piece010._id, - resolvedStart: nextPartStart, + resolvedStart: nextPartTimes.partStartTime!, resolvedDuration: undefined, }, ] satisfies StrippedResult) diff --git a/packages/job-worker/src/playout/activePlaylistActions.ts b/packages/job-worker/src/playout/activePlaylistActions.ts index 89f049a150..62db41ad04 100644 --- a/packages/job-worker/src/playout/activePlaylistActions.ts +++ b/packages/job-worker/src/playout/activePlaylistActions.ts @@ -168,8 +168,6 @@ export async function deactivateRundownPlaylistInner( let rundown: ReadonlyDeep | undefined if (currentPartInstance) { rundown = playoutModel.getRundown(currentPartInstance.partInstance.rundownId)?.rundown - - playoutModel.queueNotifyCurrentlyPlayingPartEvent(currentPartInstance.partInstance.rundownId, null) } else if (nextPartInstance) { rundown = playoutModel.getRundown(nextPartInstance.partInstance.rundownId)?.rundown } diff --git a/packages/job-worker/src/playout/adlibJobs.ts b/packages/job-worker/src/playout/adlibJobs.ts index 836ea26c31..0d0bf2e307 100644 --- a/packages/job-worker/src/playout/adlibJobs.ts +++ b/packages/job-worker/src/playout/adlibJobs.ts @@ -194,6 +194,7 @@ async function pieceTakeNowAsAdlib( await syncPlayheadInfinitesForNextPartInstance( context, playoutModel, + undefined, playoutModel.currentPartInstance, playoutModel.nextPartInstance ) @@ -373,6 +374,7 @@ export async function handleStopPiecesOnSourceLayers( await syncPlayheadInfinitesForNextPartInstance( context, playoutModel, + undefined, playoutModel.currentPartInstance, playoutModel.nextPartInstance ) diff --git a/packages/job-worker/src/playout/adlibUtils.ts b/packages/job-worker/src/playout/adlibUtils.ts index 428d60fa75..adde0ab27e 100644 --- a/packages/job-worker/src/playout/adlibUtils.ts +++ b/packages/job-worker/src/playout/adlibUtils.ts @@ -68,6 +68,7 @@ export async function innerStartOrQueueAdLibPiece( await syncPlayheadInfinitesForNextPartInstance( context, playoutModel, + undefined, currentPartInstance, playoutModel.nextPartInstance ) @@ -310,13 +311,15 @@ export function innerStopPieces( const pieceInstanceModel = playoutModel.findPieceInstance(pieceInstance._id) if (pieceInstanceModel) { - const newDuration: Required['userDuration'] = playoutModel.isMultiGatewayMode - ? { - endRelativeToNow: offsetRelativeToNow, - } - : { - endRelativeToPart: relativeStopAt, - } + const newDuration: Required['userDuration'] = + playoutModel.isMultiGatewayMode || + pieceInstanceModel.pieceInstance.pieceInstance.piece.enable.isAbsolute + ? { + endRelativeToNow: offsetRelativeToNow, + } + : { + endRelativeToPart: relativeStopAt, + } pieceInstanceModel.pieceInstance.setDuration(newDuration) diff --git a/packages/job-worker/src/playout/debug.ts b/packages/job-worker/src/playout/debug.ts index 2028b0b23c..cd6988b72d 100644 --- a/packages/job-worker/src/playout/debug.ts +++ b/packages/job-worker/src/playout/debug.ts @@ -25,6 +25,7 @@ export async function handleDebugSyncPlayheadInfinitesForNextPartInstance( await syncPlayheadInfinitesForNextPartInstance( context, playoutModel, + undefined, playoutModel.currentPartInstance, playoutModel.nextPartInstance ) diff --git a/packages/job-worker/src/playout/infinites.ts b/packages/job-worker/src/playout/infinites.ts index 4523e8cc57..0f149931ba 100644 --- a/packages/job-worker/src/playout/infinites.ts +++ b/packages/job-worker/src/playout/infinites.ts @@ -2,21 +2,24 @@ import { PartInstanceId, RundownId, ShowStyleBaseId } from '@sofie-automation/co import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { DBPartInstance } from '@sofie-automation/corelib/dist/dataModel/PartInstance' import { Piece } from '@sofie-automation/corelib/dist/dataModel/Piece' -import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' +import { PieceInstance, wrapPieceToInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { getPieceInstancesForPart as libgetPieceInstancesForPart, getPlayheadTrackingInfinitesForPart as libgetPlayheadTrackingInfinitesForPart, buildPiecesStartingInThisPartQuery, buildPastInfinitePiecesForThisPartQuery, } from '@sofie-automation/corelib/dist/playout/infinites' -import { processAndPrunePieceInstanceTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' +import { + createPartCurrentTimes, + processAndPrunePieceInstanceTimings, +} from '@sofie-automation/corelib/dist/playout/processAndPrune' import { JobContext } from '../jobs/index.js' import { ReadonlyDeep } from 'type-fest' import { PlayoutModel } from './model/PlayoutModel.js' import { PlayoutPartInstanceModel } from './model/PlayoutPartInstanceModel.js' import { PlayoutSegmentModel } from './model/PlayoutSegmentModel.js' import { getCurrentTime } from '../lib/index.js' -import { flatten } from '@sofie-automation/corelib/dist/lib' +import { clone, flatten, getRandomId } from '@sofie-automation/corelib/dist/lib' import _ from 'underscore' import { IngestModelReadonly } from '../ingest/model/IngestModel.js' import { SegmentOrphanedReason } from '@sofie-automation/corelib/dist/dataModel/Segment' @@ -217,6 +220,7 @@ export async function fetchPiecesThatMayBeActiveForPart( export async function syncPlayheadInfinitesForNextPartInstance( context: JobContext, playoutModel: PlayoutModel, + unsavedIngestModel: Pick | undefined, fromPartInstance: PlayoutPartInstanceModel | null, toPartInstance: PlayoutPartInstanceModel | null ): Promise { @@ -253,11 +257,14 @@ export async function syncPlayheadInfinitesForNextPartInstance( toPartInstance.partInstance.part ) - const nowInPart = getCurrentTime() - (fromPartInstance.partInstance.timings?.plannedStartedPlayback ?? 0) + const partTimes = createPartCurrentTimes( + getCurrentTime(), + fromPartInstance.partInstance.timings?.plannedStartedPlayback + ) const prunedPieceInstances = processAndPrunePieceInstanceTimings( showStyleBase.sourceLayers, fromPartInstance.pieceInstances.map((p) => p.pieceInstance), - nowInPart, + partTimes, undefined, true ) @@ -283,6 +290,17 @@ export async function syncPlayheadInfinitesForNextPartInstance( ) toPartInstance.replaceInfinitesFromPreviousPlayhead(infinites) + } else if (toPartInstance && !fromPartInstance) { + // This is the first take of the rundown, ensure the baseline infinites are loaded + const baselineInfinites = await getBaselineInfinitesForPart( + context, + playoutModel, + unsavedIngestModel, + toPartInstance.partInstance.part, + toPartInstance.partInstance._id + ) + + toPartInstance.replaceInfinitesFromPreviousPlayhead(baselineInfinites) } if (span) span.end() } @@ -385,3 +403,38 @@ export function getPieceInstancesForPart( if (span) span.end() return res } + +export async function getBaselineInfinitesForPart( + context: JobContext, + playoutModel: PlayoutModel, + unsavedIngestModel: Pick | undefined, + part: ReadonlyDeep, + partInstanceId: PartInstanceId +): Promise { + // Find the pieces. If an ingest model is provided, use that instead of the database + const pieces = + unsavedIngestModel && unsavedIngestModel.rundownId === part.rundownId + ? unsavedIngestModel.getAllPieces().filter((p) => p.startPartId === null) + : await context.directCollections.Pieces.findFetch({ + startRundownId: part.rundownId, + startPartId: null, + }) + + const playlistActivationId = playoutModel.playlist.activationId + if (!playlistActivationId) throw new Error(`RundownPlaylist "${playoutModel.playlistId}" is not active`) + + return pieces.map((piece) => { + const instance = wrapPieceToInstance(clone(piece), playlistActivationId, partInstanceId, false) + + // All these pieces are expected to be outOnRundownChange infinites, as that is how they are ingested + + instance.infinite = { + infiniteInstanceId: getRandomId(), + infiniteInstanceIndex: 0, + infinitePieceId: instance.piece._id, + fromPreviousPart: true, + } + + return instance + }) +} diff --git a/packages/job-worker/src/playout/lookahead/__tests__/lookahead.test.ts b/packages/job-worker/src/playout/lookahead/__tests__/lookahead.test.ts index 5aeb024a98..c421f1b5c7 100644 --- a/packages/job-worker/src/playout/lookahead/__tests__/lookahead.test.ts +++ b/packages/job-worker/src/playout/lookahead/__tests__/lookahead.test.ts @@ -23,6 +23,7 @@ type TgetOrderedPartsAfterPlayhead = jest.MockedFunction { @@ -272,8 +273,7 @@ describe('Lookahead', () => { const partInstancesInfo: SelectedPartInstancesTimelineInfo = {} partInstancesInfo.previous = { partInstance: { _id: 'abc2', part: { _id: 'abc' } } as any, - nowInPart: 987, - partStarted: getCurrentTime() + 546, + partTimes: createPartCurrentTimes(getCurrentTime(), getCurrentTime() + 546), pieceInstances: ['1', '2'] as any, calculatedTimings: { inTransitionStart: null } as any, regenerateTimelineAt: undefined, @@ -282,7 +282,7 @@ describe('Lookahead', () => { const expectedPrevious = { part: partInstancesInfo.previous.partInstance, onTimeline: true, - nowInPart: partInstancesInfo.previous.nowInPart, + nowInPart: partInstancesInfo.previous.partTimes.nowInPart, allPieces: partInstancesInfo.previous.pieceInstances, calculatedTimings: partInstancesInfo.previous.calculatedTimings, } @@ -296,8 +296,7 @@ describe('Lookahead', () => { // Add a current partInstancesInfo.current = { partInstance: { _id: 'curr', part: {} } as any, - nowInPart: 56, - partStarted: getCurrentTime() + 865, + partTimes: createPartCurrentTimes(getCurrentTime(), getCurrentTime() + 865), pieceInstances: ['3', '4'] as any, calculatedTimings: { inTransitionStart: null } as any, regenerateTimelineAt: undefined, @@ -305,7 +304,7 @@ describe('Lookahead', () => { const expectedCurrent = { part: partInstancesInfo.current.partInstance, onTimeline: true, - nowInPart: partInstancesInfo.current.nowInPart, + nowInPart: partInstancesInfo.current.partTimes.nowInPart, allPieces: partInstancesInfo.current.pieceInstances, calculatedTimings: partInstancesInfo.current.calculatedTimings, } @@ -317,8 +316,7 @@ describe('Lookahead', () => { // Add a next partInstancesInfo.next = { partInstance: { _id: 'nxt2', part: { _id: 'nxt' } } as any, - nowInPart: -85, - partStarted: getCurrentTime() + 142, + partTimes: createPartCurrentTimes(getCurrentTime(), getCurrentTime() + 142), pieceInstances: ['5'] as any, calculatedTimings: { inTransitionStart: null } as any, regenerateTimelineAt: undefined, @@ -326,7 +324,7 @@ describe('Lookahead', () => { const expectedNext = { part: partInstancesInfo.next.partInstance, onTimeline: false, - nowInPart: partInstancesInfo.next.nowInPart, + nowInPart: partInstancesInfo.next.partTimes.nowInPart, allPieces: partInstancesInfo.next.pieceInstances, calculatedTimings: partInstancesInfo.next.calculatedTimings, } diff --git a/packages/job-worker/src/playout/lookahead/findObjects.ts b/packages/job-worker/src/playout/lookahead/findObjects.ts index 8e228a5a01..d96035a74f 100644 --- a/packages/job-worker/src/playout/lookahead/findObjects.ts +++ b/packages/job-worker/src/playout/lookahead/findObjects.ts @@ -19,7 +19,7 @@ function getBestPieceInstanceId(piece: ReadonlyDeep): string { return unprotectString(piece._id) } // Something is needed, and it must be distant future here, so accuracy is not important - return unprotectString(piece.piece.startPartId) + return unprotectString(piece.piece.startPartId ?? piece.rundownId) } function tryActivateKeyframesForObject( diff --git a/packages/job-worker/src/playout/lookahead/index.ts b/packages/job-worker/src/playout/lookahead/index.ts index 83fe9ea839..64ac5a2337 100644 --- a/packages/job-worker/src/playout/lookahead/index.ts +++ b/packages/job-worker/src/playout/lookahead/index.ts @@ -46,15 +46,29 @@ function getPrunedEndedPieceInstances(info: SelectedPartInstanceTimelineInfo) { if (!info.partInstance.timings?.plannedStartedPlayback) { return info.pieceInstances } else { - return info.pieceInstances.filter((p) => !hasPieceInstanceDefinitelyEnded(p, info.nowInPart)) + return info.pieceInstances.filter((p) => !hasPieceInstanceDefinitelyEnded(p, info.partTimes.nowInPart)) } } -function removeInfiniteContinuations(info: PartInstanceAndPieceInstances): PartInstanceAndPieceInstances { +function removeInfiniteContinuations( + info: PartInstanceAndPieceInstances, + isCurrentPart: boolean +): PartInstanceAndPieceInstances { const partId = info.part.part._id return { ...info, // Ignore PieceInstances that continue from the previous part, as they will not need lookahead - allPieces: info.allPieces.filter((inst) => !inst.infinite || inst.piece.startPartId === partId), + allPieces: info.allPieces.filter((inst) => { + // Always include non infinite pieces + if (!inst.infinite) return true + + // Only include rundown owned pieces in the current part + if (!inst.piece.startPartId) { + return isCurrentPart + } + + // Include infinite pieces in the part where they start + return inst.piece.startPartId === partId + }), } } @@ -92,35 +106,44 @@ export async function getLookeaheadObjects( const partInstancesInfo: PartInstanceAndPieceInstances[] = _.compact([ partInstancesInfo0.current - ? removeInfiniteContinuations({ - part: partInstancesInfo0.current.partInstance, - onTimeline: true, - nowInPart: partInstancesInfo0.current.nowInPart, - allPieces: getPrunedEndedPieceInstances(partInstancesInfo0.current), - calculatedTimings: partInstancesInfo0.current.calculatedTimings, - }) + ? removeInfiniteContinuations( + { + part: partInstancesInfo0.current.partInstance, + onTimeline: true, + nowInPart: partInstancesInfo0.current.partTimes.nowInPart, + allPieces: getPrunedEndedPieceInstances(partInstancesInfo0.current), + calculatedTimings: partInstancesInfo0.current.calculatedTimings, + }, + true + ) : undefined, partInstancesInfo0.next - ? removeInfiniteContinuations({ - part: partInstancesInfo0.next.partInstance, - onTimeline: !!partInstancesInfo0.current?.partInstance?.part?.autoNext, //TODO -QL - nowInPart: partInstancesInfo0.next.nowInPart, - allPieces: partInstancesInfo0.next.pieceInstances, - calculatedTimings: partInstancesInfo0.next.calculatedTimings, - }) + ? removeInfiniteContinuations( + { + part: partInstancesInfo0.next.partInstance, + onTimeline: !!partInstancesInfo0.current?.partInstance?.part?.autoNext, //TODO -QL + nowInPart: partInstancesInfo0.next.partTimes.nowInPart, + allPieces: partInstancesInfo0.next.pieceInstances, + calculatedTimings: partInstancesInfo0.next.calculatedTimings, + }, + false + ) : undefined, ]) // Track the previous info for checking how the timeline will be built let previousPartInfo: PartInstanceAndPieceInstances | undefined if (partInstancesInfo0.previous) { - previousPartInfo = removeInfiniteContinuations({ - part: partInstancesInfo0.previous.partInstance, - onTimeline: true, - nowInPart: partInstancesInfo0.previous.nowInPart, - allPieces: getPrunedEndedPieceInstances(partInstancesInfo0.previous), - calculatedTimings: partInstancesInfo0.previous.calculatedTimings, - }) + previousPartInfo = removeInfiniteContinuations( + { + part: partInstancesInfo0.previous.partInstance, + onTimeline: true, + nowInPart: partInstancesInfo0.previous.partTimes.nowInPart, + allPieces: getPrunedEndedPieceInstances(partInstancesInfo0.previous), + calculatedTimings: partInstancesInfo0.previous.calculatedTimings, + }, + false + ) } // TODO: Do we need to use processAndPrunePieceInstanceTimings on these pieces? In theory yes, but that gets messy and expensive. @@ -129,6 +152,9 @@ export async function getLookeaheadObjects( const piecesByPart = new Map>() for (const piece of piecesToSearch) { + // Don't lookahead any rundown owned pieces, that should only happen once they become PieceInstances + if (!piece.startPartId) continue + const pieceInstance = wrapPieceToInstance(piece, protectString(''), protectString(''), true) const existing = piecesByPart.get(piece.startPartId) if (existing) { diff --git a/packages/job-worker/src/playout/model/PlayoutModel.ts b/packages/job-worker/src/playout/model/PlayoutModel.ts index e3c710ba17..0c47b429d9 100644 --- a/packages/job-worker/src/playout/model/PlayoutModel.ts +++ b/packages/job-worker/src/playout/model/PlayoutModel.ts @@ -288,13 +288,6 @@ export interface PlayoutModel extends PlayoutModelReadonly, StudioPlayoutModelBa */ queuePartInstanceTimingEvent(partInstanceId: PartInstanceId): void - /** - * Queue a `NotifyCurrentlyPlayingPart` operation to be performed upon completion of this Playout operation - * @param rundownId The Rundown to report the notification to - * @param partInstance The PartInstance the event is in relation to - */ - queueNotifyCurrentlyPlayingPartEvent(rundownId: RundownId, partInstance: PlayoutPartInstanceModel | null): void - /** * Remove all loaded PartInstances marked as `rehearsal` from this RundownPlaylist */ diff --git a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts index 771bcfd752..5c1cfd8c87 100644 --- a/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts +++ b/packages/job-worker/src/playout/model/implementation/PlayoutModelImpl.ts @@ -56,11 +56,9 @@ import { DatabasePersistedModel } from '../../../modelBase.js' import { ExpectedPackageDBFromStudioBaselineObjects } from '@sofie-automation/corelib/dist/dataModel/ExpectedPackages' import { ExpectedPlayoutItemStudio } from '@sofie-automation/corelib/dist/dataModel/ExpectedPlayoutItem' import { StudioBaselineHelper } from '../../../studio/model/StudioBaselineHelper.js' -import { EventsJobs } from '@sofie-automation/corelib/dist/worker/events' import { QuickLoopService } from '../services/QuickLoopService.js' import { calculatePartTimings, PartCalculatedTimings } from '@sofie-automation/corelib/dist/playout/timings' import { PieceInstanceWithTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' -import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { NotificationsModelHelper } from '../../../notifications/NotificationsModelHelper.js' export class PlayoutModelReadonlyImpl implements PlayoutModelReadonly { @@ -283,7 +281,6 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou #timelineHasChanged = false #pendingPartInstanceTimingEvents = new Set() - #pendingNotifyCurrentlyPlayingPartEvent = new Map() get hackDeletedPartInstanceIds(): PartInstanceId[] { const result: PartInstanceId[] = [] @@ -528,14 +525,6 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou this.#pendingPartInstanceTimingEvents.add(partInstanceId) } - queueNotifyCurrentlyPlayingPartEvent(rundownId: RundownId, partInstance: PlayoutPartInstanceModel | null): void { - if (partInstance && partInstance.partInstance.part.shouldNotifyCurrentPlayingPart) { - this.#pendingNotifyCurrentlyPlayingPartEvent.set(rundownId, partInstance.partInstance.part.externalId) - } else if (!partInstance) { - this.#pendingNotifyCurrentlyPlayingPartEvent.set(rundownId, null) - } - } - removeAllRehearsalPartInstances(): void { const partInstancesToRemove: PartInstanceId[] = [] @@ -703,21 +692,6 @@ export class PlayoutModelImpl extends PlayoutModelReadonlyImpl implements Playou } this.#pendingPartInstanceTimingEvents.clear() - for (const [rundownId, partExternalId] of this.#pendingNotifyCurrentlyPlayingPartEvent) { - // This is low-prio, defer so that it's executed well after publications has been updated, - // so that the playout gateway has had the chance to learn about the timeline changes - this.context - .queueEventJob(EventsJobs.NotifyCurrentlyPlayingPart, { - rundownId: rundownId, - isRehearsal: !!this.playlist.rehearsal, - partExternalId: partExternalId, - }) - .catch((e) => { - logger.warn(`Failed to queue NotifyCurrentlyPlayingPart job: ${stringifyError(e)}`) - }) - } - this.#pendingNotifyCurrentlyPlayingPartEvent.clear() - if (span) span.end() } diff --git a/packages/job-worker/src/playout/resolvedPieces.ts b/packages/job-worker/src/playout/resolvedPieces.ts index f28b928506..4bbc96a8c6 100644 --- a/packages/job-worker/src/playout/resolvedPieces.ts +++ b/packages/job-worker/src/playout/resolvedPieces.ts @@ -4,6 +4,7 @@ import { SourceLayers } from '@sofie-automation/corelib/dist/dataModel/ShowStyle import { JobContext } from '../jobs/index.js' import { getCurrentTime } from '../lib/index.js' import { + createPartCurrentTimes, processAndPrunePieceInstanceTimings, resolvePrunedPieceInstance, } from '@sofie-automation/corelib/dist/playout/processAndPrune' @@ -26,15 +27,14 @@ export function getResolvedPiecesForCurrentPartInstance( ): ResolvedPieceInstance[] { if (now === undefined) now = getCurrentTime() - const partStarted = partInstance.partInstance.timings?.plannedStartedPlayback - const nowInPart = partStarted ? now - partStarted : 0 + const partTimes = createPartCurrentTimes(now, partInstance.partInstance.timings?.plannedStartedPlayback) const preprocessedPieces = processAndPrunePieceInstanceTimings( sourceLayers, partInstance.pieceInstances.map((p) => p.pieceInstance), - nowInPart + partTimes ) - return preprocessedPieces.map((instance) => resolvePrunedPieceInstance(nowInPart, instance)) + return preprocessedPieces.map((instance) => resolvePrunedPieceInstance(partTimes, instance)) } export function getResolvedPiecesForPartInstancesOnTimeline( @@ -45,7 +45,7 @@ export function getResolvedPiecesForPartInstancesOnTimeline( // With no current part, there are no timings to consider if (!partInstancesInfo.current) return [] - const currentPartStarted = partInstancesInfo.current.partStarted ?? now + const currentPartStarted = partInstancesInfo.current.partTimes.partStartTime ?? now const nextPartStarted = partInstancesInfo.current.partInstance.part.autoNext && @@ -57,9 +57,9 @@ export function getResolvedPiecesForPartInstancesOnTimeline( // Calculate the next part if needed let nextResolvedPieces: ResolvedPieceInstance[] = [] if (partInstancesInfo.next && nextPartStarted != null) { - const nowInPart = partInstancesInfo.next.nowInPart + const partTimes = partInstancesInfo.next.partTimes nextResolvedPieces = partInstancesInfo.next.pieceInstances.map((instance) => - resolvePrunedPieceInstance(nowInPart, instance) + resolvePrunedPieceInstance(partTimes, instance) ) // Translate start to absolute times @@ -67,9 +67,9 @@ export function getResolvedPiecesForPartInstancesOnTimeline( } // Calculate the current part - const nowInCurrentPart = partInstancesInfo.current.nowInPart + const currentPartTimes = partInstancesInfo.current.partTimes const currentResolvedPieces = partInstancesInfo.current.pieceInstances.map((instance) => - resolvePrunedPieceInstance(nowInCurrentPart, instance) + resolvePrunedPieceInstance(currentPartTimes, instance) ) // Translate start to absolute times @@ -77,16 +77,16 @@ export function getResolvedPiecesForPartInstancesOnTimeline( // Calculate the previous part let previousResolvedPieces: ResolvedPieceInstance[] = [] - if (partInstancesInfo.previous?.partStarted) { - const nowInPart = partInstancesInfo.previous.nowInPart + if (partInstancesInfo.previous?.partTimes.partStartTime) { + const partTimes = partInstancesInfo.previous.partTimes previousResolvedPieces = partInstancesInfo.previous.pieceInstances.map((instance) => - resolvePrunedPieceInstance(nowInPart, instance) + resolvePrunedPieceInstance(partTimes, instance) ) // Translate start to absolute times offsetResolvedStartAndCapDuration( previousResolvedPieces, - partInstancesInfo.previous.partStarted, + partInstancesInfo.previous.partTimes.partStartTime, currentPartStarted ) } diff --git a/packages/job-worker/src/playout/setNext.ts b/packages/job-worker/src/playout/setNext.ts index 7134917332..8739e289a3 100644 --- a/packages/job-worker/src/playout/setNext.ts +++ b/packages/job-worker/src/playout/setNext.ts @@ -8,6 +8,7 @@ import { PlayoutPartInstanceModel } from './model/PlayoutPartInstanceModel.js' import { PlayoutSegmentModel } from './model/PlayoutSegmentModel.js' import { fetchPiecesThatMayBeActiveForPart, + getBaselineInfinitesForPart, getPieceInstancesForPart, syncPlayheadInfinitesForNextPartInstance, } from './infinites.js' @@ -283,7 +284,13 @@ async function prepareExistingPartInstanceForBeingNexted( playoutModel: PlayoutModel, instance: PlayoutPartInstanceModel ): Promise { - await syncPlayheadInfinitesForNextPartInstance(context, playoutModel, playoutModel.currentPartInstance, instance) + await syncPlayheadInfinitesForNextPartInstance( + context, + playoutModel, + undefined, // Any ingest model must have been fully written before we get here + playoutModel.currentPartInstance, + instance + ) return instance } @@ -297,6 +304,8 @@ async function preparePartInstanceForPartBeingNexted( const rundown = playoutModel.getRundown(nextPart.rundownId) if (!rundown) throw new Error(`Could not find rundown ${nextPart.rundownId}`) + const partInstanceId = protectString('') // Replaced inside playoutModel.createInstanceForPart + const possiblePieces = await fetchPiecesThatMayBeActiveForPart(context, playoutModel, undefined, nextPart) const newPieceInstances = getPieceInstancesForPart( context, @@ -305,9 +314,21 @@ async function preparePartInstanceForPartBeingNexted( rundown, nextPart, possiblePieces, - protectString('') // Replaced inside playoutModel.createInstanceForPart + partInstanceId ) + if (currentPartInstance === null) { + // This is the first take of the rundown, ensure the baseline infinites are loaded + const baselineInfinites = await getBaselineInfinitesForPart( + context, + playoutModel, + undefined, // Any ingest model must have been fully written before we get here + nextPart, + partInstanceId + ) + newPieceInstances.push(...baselineInfinites) + } + return playoutModel.createInstanceForPart(nextPart, newPieceInstances) } diff --git a/packages/job-worker/src/playout/snapshot.ts b/packages/job-worker/src/playout/snapshot.ts index 97a5f8230e..c173f2cb29 100644 --- a/packages/job-worker/src/playout/snapshot.ts +++ b/packages/job-worker/src/playout/snapshot.ts @@ -193,7 +193,6 @@ export async function handleRestorePlaylistSnapshot( rundownId: rd._id, } rd.studioId = snapshot.playlist.studioId - rd.notifiedCurrentPlayingPartExternalId = undefined } // TODO: This is too naive. Ideally we should unset it if it isnt valid, as anything other than a match is likely to have issues. @@ -236,9 +235,10 @@ export async function handleRestorePlaylistSnapshot( delete pieceOld.rundownId } if (pieceOld.partId) { - piece.startPartId = pieceOld.partId + const partId = pieceOld.partId + piece.startPartId = partId delete pieceOld.partId - piece.startSegmentId = partSegmentIds[unprotectString(piece.startPartId)] + piece.startSegmentId = partSegmentIds[unprotectString(partId)] } } @@ -285,14 +285,18 @@ export async function handleRestorePlaylistSnapshot( for (const piece of snapshot.pieces) { const oldId = piece._id piece.startRundownId = getNewRundownId(piece.startRundownId) - piece.startPartId = partIdMap.getOrGenerateAndWarn( - piece.startPartId, - `piece.startPartId=${piece.startPartId} of piece=${piece._id}` - ) - piece.startSegmentId = segmentIdMap.getOrGenerateAndWarn( - piece.startSegmentId, - `piece.startSegmentId=${piece.startSegmentId} of piece=${piece._id}` - ) + if (piece.startPartId) { + piece.startPartId = partIdMap.getOrGenerateAndWarn( + piece.startPartId, + `piece.startPartId=${piece.startPartId} of piece=${piece._id}` + ) + } + if (piece.startSegmentId) { + piece.startSegmentId = segmentIdMap.getOrGenerateAndWarn( + piece.startSegmentId, + `piece.startSegmentId=${piece.startSegmentId} of piece=${piece._id}` + ) + } piece._id = getRandomId() pieceIdMap.set(oldId, piece._id) } @@ -344,7 +348,8 @@ export async function handleRestorePlaylistSnapshot( case ExpectedPackageDBType.ADLIB_PIECE: case ExpectedPackageDBType.ADLIB_ACTION: case ExpectedPackageDBType.BASELINE_ADLIB_PIECE: - case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: { + case ExpectedPackageDBType.BASELINE_ADLIB_ACTION: + case ExpectedPackageDBType.BASELINE_PIECE: { expectedPackage.pieceId = pieceIdMap.getOrGenerateAndWarn( expectedPackage.pieceId, `expectedPackage.pieceId=${expectedPackage.pieceId}` diff --git a/packages/job-worker/src/playout/take.ts b/packages/job-worker/src/playout/take.ts index 67b8f51742..036b3bb053 100644 --- a/packages/job-worker/src/playout/take.ts +++ b/packages/job-worker/src/playout/take.ts @@ -22,7 +22,10 @@ import { WrappedShowStyleBlueprint } from '../blueprints/cache.js' import { innerStopPieces } from './adlibUtils.js' import { reportPartInstanceHasStarted, reportPartInstanceHasStopped } from './timings/partPlayback.js' import { convertPartInstanceToBlueprints, convertResolvedPieceInstanceToBlueprints } from '../blueprints/context/lib.js' -import { processAndPrunePieceInstanceTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' +import { + createPartCurrentTimes, + processAndPrunePieceInstanceTimings, +} from '@sofie-automation/corelib/dist/playout/processAndPrune' import { TakeNextPartProps } from '@sofie-automation/corelib/dist/worker/studio' import { runJobWithPlayoutModel } from './lock.js' import _ from 'underscore' @@ -541,10 +544,11 @@ export function updatePartInstanceOnTake( } // calculate and cache playout timing properties, so that we don't depend on the previousPartInstance: + const partTimes = createPartCurrentTimes(getCurrentTime(), null) const tmpTakePieces = processAndPrunePieceInstanceTimings( showStyle.sourceLayers, takePartInstance.pieceInstances.map((p) => p.pieceInstance), - 0 + partTimes ) const partPlayoutTimings = playoutModel.calculatePartTimings(currentPartInstance, takePartInstance, tmpTakePieces) @@ -554,7 +558,7 @@ export function updatePartInstanceOnTake( export async function afterTake( context: JobContext, playoutModel: PlayoutModel, - takePartInstance: PlayoutPartInstanceModel + _takePartInstance: PlayoutPartInstanceModel ): Promise { const span = context.startSpan('afterTake') // This function should be called at the end of a "take" event (when the Parts have been updated) @@ -562,8 +566,6 @@ export async function afterTake( await updateTimeline(context, playoutModel) - playoutModel.queueNotifyCurrentlyPlayingPartEvent(takePartInstance.partInstance.rundownId, takePartInstance) - if (span) span.end() } diff --git a/packages/job-worker/src/playout/timeline/__tests__/rundown.test.ts b/packages/job-worker/src/playout/timeline/__tests__/rundown.test.ts index 18322e9567..b45ef32750 100644 --- a/packages/job-worker/src/playout/timeline/__tests__/rundown.test.ts +++ b/packages/job-worker/src/playout/timeline/__tests__/rundown.test.ts @@ -10,7 +10,10 @@ import { DBPart } from '@sofie-automation/corelib/dist/dataModel/Part' import { transformTimeline } from '@sofie-automation/corelib/dist/playout/timeline' import { deleteAllUndefinedProperties, getRandomId } from '@sofie-automation/corelib/dist/lib' import { PieceInstance, PieceInstancePiece } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' -import { PieceInstanceWithTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' +import { + createPartCurrentTimes, + PieceInstanceWithTimings, +} from '@sofie-automation/corelib/dist/playout/processAndPrune' import { EmptyPieceTimelineObjectsBlob } from '@sofie-automation/corelib/dist/dataModel/Piece' import { IBlueprintPieceType, PieceLifespan } from '@sofie-automation/blueprints-integration' import { getPartGroupId } from '@sofie-automation/corelib/dist/playout/ids' @@ -70,6 +73,8 @@ function transformTimelineIntoSimplifiedForm(res: RundownTimelineResult) { * inside of this will have their own tests to stress difference scenarios. */ describe('buildTimelineObjsForRundown', () => { + const currentTime = 5678 + function createMockPlaylist(selectedPartInfos: SelectedPartInstancesTimelineInfo): DBRundownPlaylist { function convertSelectedPartInstance( info: SelectedPartInstanceTimelineInfo | undefined @@ -196,8 +201,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { previous: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [], calculatedTimings: DEFAULT_PART_TIMINGS, @@ -217,8 +221,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: DEFAULT_PART_TIMINGS, @@ -243,8 +246,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance( 'part0', {}, @@ -277,16 +279,14 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: DEFAULT_PART_TIMINGS, regenerateTimelineAt: undefined, }, next: { - nowInPart: 0, - partStarted: undefined, + partTimes: createPartCurrentTimes(currentTime, undefined), partInstance: createMockPartInstance('part1'), pieceInstances: [createMockPieceInstance('piece1')], calculatedTimings: DEFAULT_PART_TIMINGS, @@ -312,16 +312,14 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0', { autoNext: true, expectedDuration: 5000 }), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: DEFAULT_PART_TIMINGS, regenerateTimelineAt: undefined, }, next: { - nowInPart: 0, - partStarted: undefined, + partTimes: createPartCurrentTimes(currentTime, undefined), partInstance: createMockPartInstance('part1'), pieceInstances: [createMockPieceInstance('piece1')], calculatedTimings: DEFAULT_PART_TIMINGS, @@ -347,8 +345,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { previous: { - nowInPart: 9999, - partStarted: 1234, + partTimes: createPartCurrentTimes(currentTime, 1234), partInstance: createMockPartInstance( 'part9', { autoNext: true, expectedDuration: 5000 }, @@ -363,8 +360,7 @@ describe('buildTimelineObjsForRundown', () => { regenerateTimelineAt: undefined, }, current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: DEFAULT_PART_TIMINGS, @@ -391,8 +387,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { previous: { - nowInPart: 9999, - partStarted: 1234, + partTimes: createPartCurrentTimes(currentTime, 1234), partInstance: createMockPartInstance( 'part9', { autoNext: true, expectedDuration: 5000 }, @@ -407,8 +402,7 @@ describe('buildTimelineObjsForRundown', () => { regenerateTimelineAt: undefined, }, current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: { @@ -441,8 +435,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { previous: { - nowInPart: 9999, - partStarted: 1234, + partTimes: createPartCurrentTimes(currentTime, 1234), partInstance: createMockPartInstance( 'part9', { autoNext: true, expectedDuration: 5000 }, @@ -462,8 +455,7 @@ describe('buildTimelineObjsForRundown', () => { regenerateTimelineAt: undefined, }, current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: { @@ -496,16 +488,14 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0', { autoNext: true, expectedDuration: 5000 }), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: DEFAULT_PART_TIMINGS, regenerateTimelineAt: undefined, }, next: { - nowInPart: 0, - partStarted: undefined, + partTimes: createPartCurrentTimes(currentTime, undefined), partInstance: createMockPartInstance('part1'), pieceInstances: [createMockPieceInstance('piece1')], calculatedTimings: { @@ -540,8 +530,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance( 'part0', { autoNext: true, expectedDuration: 5000 }, @@ -561,8 +550,7 @@ describe('buildTimelineObjsForRundown', () => { regenerateTimelineAt: undefined, }, next: { - nowInPart: 0, - partStarted: undefined, + partTimes: createPartCurrentTimes(currentTime, undefined), partInstance: createMockPartInstance( 'part1', {}, @@ -601,8 +589,7 @@ describe('buildTimelineObjsForRundown', () => { describe('infinite pieces', () => { const PREVIOUS_PART_INSTANCE: SelectedPartInstanceTimelineInfo = { - nowInPart: 9999, - partStarted: 1234, + partTimes: createPartCurrentTimes(currentTime, 1234), partInstance: createMockPartInstance( 'part9', { autoNext: true, expectedDuration: 5000 }, @@ -623,8 +610,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { previous: PREVIOUS_PART_INSTANCE, current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [ createMockPieceInstance('piece0'), @@ -655,8 +641,7 @@ describe('buildTimelineObjsForRundown', () => { ], }, current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: DEFAULT_PART_TIMINGS, @@ -684,8 +669,7 @@ describe('buildTimelineObjsForRundown', () => { ], }, current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [createMockPieceInstance('piece0')], calculatedTimings: DEFAULT_PART_TIMINGS, @@ -712,8 +696,7 @@ describe('buildTimelineObjsForRundown', () => { pieceInstances: [...PREVIOUS_PART_INSTANCE.pieceInstances, infinitePiece], }, current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance('part0'), pieceInstances: [createMockPieceInstance('piece0'), continueInfinitePiece(infinitePiece)], calculatedTimings: DEFAULT_PART_TIMINGS, @@ -736,8 +719,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance( 'part0', { autoNext: true, expectedDuration: 5000 }, @@ -752,8 +734,7 @@ describe('buildTimelineObjsForRundown', () => { regenerateTimelineAt: undefined, }, next: { - nowInPart: 0, - partStarted: undefined, + partTimes: createPartCurrentTimes(currentTime, undefined), partInstance: createMockPartInstance( 'part1', {}, @@ -782,8 +763,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance( 'part0', { autoNext: true, expectedDuration: 5000 }, @@ -798,8 +778,7 @@ describe('buildTimelineObjsForRundown', () => { regenerateTimelineAt: undefined, }, next: { - nowInPart: 0, - partStarted: undefined, + partTimes: createPartCurrentTimes(currentTime, undefined), partInstance: createMockPartInstance( 'part1', {}, @@ -831,8 +810,7 @@ describe('buildTimelineObjsForRundown', () => { const selectedPartInfos: SelectedPartInstancesTimelineInfo = { current: { - nowInPart: 1234, - partStarted: 5678, + partTimes: createPartCurrentTimes(currentTime, 5678), partInstance: createMockPartInstance( 'part0', { autoNext: true, expectedDuration: 5000 }, @@ -850,8 +828,7 @@ describe('buildTimelineObjsForRundown', () => { regenerateTimelineAt: undefined, }, next: { - nowInPart: 0, - partStarted: undefined, + partTimes: createPartCurrentTimes(currentTime, undefined), partInstance: createMockPartInstance( 'part1', {}, diff --git a/packages/job-worker/src/playout/timeline/generate.ts b/packages/job-worker/src/playout/timeline/generate.ts index 7661c773fe..c4c85fcdb3 100644 --- a/packages/job-worker/src/playout/timeline/generate.ts +++ b/packages/job-worker/src/playout/timeline/generate.ts @@ -22,6 +22,8 @@ import { getResolvedPiecesForPartInstancesOnTimeline } from '../resolvedPieces.j import { processAndPrunePieceInstanceTimings, PieceInstanceWithTimings, + createPartCurrentTimes, + PartCurrentTimes, } from '@sofie-automation/corelib/dist/playout/processAndPrune' import { StudioPlayoutModel, StudioPlayoutModelBase } from '../../studio/model/StudioPlayoutModel.js' import { getLookeaheadObjects } from '../lookahead/index.js' @@ -41,8 +43,11 @@ import { getPartTimingsOrDefaults, PartCalculatedTimings } from '@sofie-automati import { applyAbPlaybackForTimeline } from '../abPlayback/index.js' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' import { PlayoutPartInstanceModel } from '../model/PlayoutPartInstanceModel.js' -import { PersistentPlayoutStateStore } from '../../blueprints/context/services/PersistantStateStore.js' import { PlayoutChangedType } from '@sofie-automation/shared-lib/dist/peripheralDevice/peripheralDeviceAPI' +import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' +import { PersistentPlayoutStateStore } from '../../blueprints/context/services/PersistantStateStore.js' + +const DEFAULT_ABSOLUTE_PIECE_PREPARE_TIME = 30000 function isModelForStudio(model: StudioPlayoutModelBase): model is StudioPlayoutModel { const tmp = model as StudioPlayoutModel @@ -246,8 +251,7 @@ export interface SelectedPartInstancesTimelineInfo { next?: SelectedPartInstanceTimelineInfo } export interface SelectedPartInstanceTimelineInfo { - nowInPart: number - partStarted: number | undefined + partTimes: PartCurrentTimes partInstance: ReadonlyDeep pieceInstances: PieceInstanceWithTimings[] calculatedTimings: PartCalculatedTimings @@ -255,29 +259,44 @@ export interface SelectedPartInstanceTimelineInfo { } function getPartInstanceTimelineInfo( + absolutePiecePrepareTime: number, currentTime: Time, sourceLayers: SourceLayers, partInstance: PlayoutPartInstanceModel | null ): SelectedPartInstanceTimelineInfo | undefined { if (!partInstance) return undefined - const partStarted = partInstance.partInstance.timings?.plannedStartedPlayback - const nowInPart = partStarted === undefined ? 0 : currentTime - partStarted - const pieceInstances = processAndPrunePieceInstanceTimings( - sourceLayers, - partInstance.pieceInstances.map((p) => p.pieceInstance), - nowInPart - ) + const partTimes = createPartCurrentTimes(currentTime, partInstance.partInstance.timings?.plannedStartedPlayback) + + let regenerateTimelineAt: Time | undefined = undefined + + const rawPieceInstances: ReadonlyDeep[] = [] + for (const { pieceInstance } of partInstance.pieceInstances) { + if ( + pieceInstance.piece.enable.isAbsolute && + typeof pieceInstance.piece.enable.start === 'number' && + pieceInstance.piece.enable.start > currentTime + absolutePiecePrepareTime + ) { + // This absolute timed piece is starting too far in the future, ignore it + regenerateTimelineAt = Math.min( + regenerateTimelineAt ?? Number.POSITIVE_INFINITY, + pieceInstance.piece.enable.start - absolutePiecePrepareTime + ) + + continue + } + + rawPieceInstances.push(pieceInstance) + } const partInstanceWithOverrides = partInstance.getPartInstanceWithQuickLoopOverrides() return { partInstance: partInstanceWithOverrides, - pieceInstances, - nowInPart, - partStarted, + pieceInstances: processAndPrunePieceInstanceTimings(sourceLayers, rawPieceInstances, partTimes), + partTimes, // Approximate `calculatedTimings`, for the partInstances which already have it cached - calculatedTimings: getPartTimingsOrDefaults(partInstanceWithOverrides, pieceInstances), - regenerateTimelineAt: undefined, // Future use + calculatedTimings: getPartTimingsOrDefaults(partInstanceWithOverrides, rawPieceInstances), + regenerateTimelineAt, } } @@ -318,10 +337,27 @@ async function getTimelineRundown( } const currentTime = getCurrentTime() + const absolutePiecePrepareTime = + context.studio.settings.rundownGlobalPiecesPrepareTime || DEFAULT_ABSOLUTE_PIECE_PREPARE_TIME const partInstancesInfo: SelectedPartInstancesTimelineInfo = { - current: getPartInstanceTimelineInfo(currentTime, showStyle.sourceLayers, currentPartInstance), - next: getPartInstanceTimelineInfo(currentTime, showStyle.sourceLayers, nextPartInstance), - previous: getPartInstanceTimelineInfo(currentTime, showStyle.sourceLayers, previousPartInstance), + current: getPartInstanceTimelineInfo( + absolutePiecePrepareTime, + currentTime, + showStyle.sourceLayers, + currentPartInstance + ), + next: getPartInstanceTimelineInfo( + absolutePiecePrepareTime, + currentTime, + showStyle.sourceLayers, + nextPartInstance + ), + previous: getPartInstanceTimelineInfo( + absolutePiecePrepareTime, + currentTime, + showStyle.sourceLayers, + previousPartInstance + ), } if (partInstancesInfo.next && nextPartInstance) { diff --git a/packages/job-worker/src/playout/timeline/multi-gateway.ts b/packages/job-worker/src/playout/timeline/multi-gateway.ts index dbc29f9327..9db7d43267 100644 --- a/packages/job-worker/src/playout/timeline/multi-gateway.ts +++ b/packages/job-worker/src/playout/timeline/multi-gateway.ts @@ -343,18 +343,24 @@ function setPlannedTimingsOnPieceInstance( } if (typeof pieceInstance.pieceInstance.piece.enable.start === 'number') { - const plannedStart = partPlannedStart + pieceInstance.pieceInstance.piece.enable.start + const plannedStart = + (pieceInstance.pieceInstance.piece.enable.isAbsolute ? 0 : partPlannedStart) + + pieceInstance.pieceInstance.piece.enable.start pieceInstance.setPlannedStartedPlayback(plannedStart) const userDurationEnd = pieceInstance.pieceInstance.userDuration && 'endRelativeToPart' in pieceInstance.pieceInstance.userDuration ? pieceInstance.pieceInstance.userDuration.endRelativeToPart : null - const plannedEnd = - userDurationEnd ?? - (pieceInstance.pieceInstance.piece.enable.duration - ? plannedStart + pieceInstance.pieceInstance.piece.enable.duration - : partPlannedEnd) + + let plannedEnd: number | undefined = userDurationEnd ?? undefined + if (plannedEnd === undefined) { + if (pieceInstance.pieceInstance.piece.enable.duration !== undefined) { + plannedEnd = plannedStart + pieceInstance.pieceInstance.piece.enable.duration + } else if (!pieceInstance.pieceInstance.piece.enable.isAbsolute) { + plannedEnd = partPlannedEnd + } + } pieceInstance.setPlannedStoppedPlayback(plannedEnd) } diff --git a/packages/job-worker/src/playout/timeline/part.ts b/packages/job-worker/src/playout/timeline/part.ts index fc2d546100..b697f7da27 100644 --- a/packages/job-worker/src/playout/timeline/part.ts +++ b/packages/job-worker/src/playout/timeline/part.ts @@ -32,7 +32,7 @@ export function transformPartIntoTimeline( ): Array { const span = context.startSpan('transformPartIntoTimeline') - const nowInParentGroup = partInfo.nowInPart + const nowInParentGroup = partInfo.partTimes.nowInPart const partTimings = partInfo.calculatedTimings const outTransition = partInfo.partInstance.part.outTransition ?? null diff --git a/packages/job-worker/src/playout/timeline/pieceGroup.ts b/packages/job-worker/src/playout/timeline/pieceGroup.ts index cd6a5318e8..aa7f379dfb 100644 --- a/packages/job-worker/src/playout/timeline/pieceGroup.ts +++ b/packages/job-worker/src/playout/timeline/pieceGroup.ts @@ -136,8 +136,8 @@ export function createPieceGroupAndCap( let resolvedEndCap: number | string | undefined // If the start has been adjusted, the end needs to be updated to compensate if (typeof pieceInstance.resolvedEndCap === 'number') { - resolvedEndCap = pieceInstance.resolvedEndCap + (pieceStartOffset ?? 0) - } else if (pieceInstance.resolvedEndCap) { + resolvedEndCap = pieceInstance.resolvedEndCap - (pieceStartOffset ?? 0) + } else if (pieceInstance.resolvedEndCap || controlObj.enable.end === 'now') { // TODO - there could already be a piece with a cap of 'now' that we could use as our end time // As the cap is for 'now', rather than try to get tsr to understand `end: 'now'`, we can create a 'now' object to tranlate it const nowObj = literal>({ @@ -157,7 +157,13 @@ export function createPieceGroupAndCap( priority: 0, }) capObjs.push(nowObj) - resolvedEndCap = `#${nowObj.id}.start + ${pieceInstance.resolvedEndCap.offsetFromNow}` + + resolvedEndCap = `#${nowObj.id}.start + ${pieceInstance.resolvedEndCap?.offsetFromNow ?? 0}` + + // If the object has an end of now, we can remove it as it will be replaced by the `resolvedEndCap` + if (controlObj.enable.end === 'now') { + delete controlObj.enable.end + } } if (controlObj.enable.duration !== undefined || controlObj.enable.end !== undefined) { diff --git a/packages/job-worker/src/playout/timeline/rundown.ts b/packages/job-worker/src/playout/timeline/rundown.ts index ba92dd2e21..a473ed066f 100644 --- a/packages/job-worker/src/playout/timeline/rundown.ts +++ b/packages/job-worker/src/playout/timeline/rundown.ts @@ -275,9 +275,25 @@ function generateCurrentInfinitePieceObjects( return [] } - const infiniteGroup = createPartGroup(currentPartInfo.partInstance, { - start: `#${timingContext.currentPartGroup.id}.start`, // This gets overriden with a concrete time if the original piece is known to have already started - }) + const { infiniteGroupEnable, pieceEnable, nowInParent } = calculateInfinitePieceEnable( + currentPartInfo, + timingContext, + pieceInstance, + currentTime, + currentPartInstanceTimings + ) + + const { pieceInstanceWithUpdatedEndCap, cappedInfiniteGroupEnable } = applyInfinitePieceGroupEndCap( + currentPartInfo, + timingContext, + pieceInstance, + infiniteGroupEnable, + currentPartInstanceTimings, + nextPartInstanceTimings, + nextPartInfinites.get(pieceInstance.infinite.infiniteInstanceId) + ) + + const infiniteGroup = createPartGroup(currentPartInfo.partInstance, cappedInfiniteGroupEnable) infiniteGroup.id = getInfinitePartGroupId(pieceInstance._id) // This doesnt want to belong to a part, so force the ids infiniteGroup.priority = 1 @@ -287,6 +303,34 @@ function generateCurrentInfinitePieceObjects( groupClasses.push('continues_infinite') } + // Still show objects flagged as 'HoldMode.EXCEPT' if this is a infinite continuation as they belong to the previous too + const isOriginOfInfinite = pieceInstance.piece.startPartId !== currentPartInfo.partInstance.part._id + const isInHold = activePlaylist.holdState === RundownHoldState.ACTIVE + + return [ + infiniteGroup, + ...transformPieceGroupAndObjects( + activePlaylist._id, + infiniteGroup, + nowInParent, + pieceInstanceWithUpdatedEndCap, + pieceEnable, + 0, + groupClasses, + isInHold, + isOriginOfInfinite + ), + ] +} + +function calculateInfinitePieceEnable( + currentPartInfo: SelectedPartInstanceTimelineInfo, + timingContext: RundownTimelineTimingContext, + pieceInstance: ReadonlyDeep, + // infiniteGroup: TimelineObjGroupPart & OnGenerateTimelineObjExt, + currentTime: number, + currentPartInstanceTimings: PartCalculatedTimings +) { const pieceEnable = getPieceEnableInsidePart( pieceInstance, currentPartInstanceTimings, @@ -295,8 +339,28 @@ function generateCurrentInfinitePieceObjects( timingContext.currentPartGroup.enable.duration !== undefined ) - let nowInParent = currentPartInfo.nowInPart // Where is 'now' inside of the infiniteGroup? - if (pieceInstance.plannedStartedPlayback !== undefined) { + let infiniteGroupEnable: PartEnable = { + start: `#${timingContext.currentPartGroup.id}.start`, // This gets overriden with a concrete time if the original piece is known to have already started + } + + let nowInParent = currentPartInfo.partTimes.nowInPart // Where is 'now' inside of the infiniteGroup? + if (pieceInstance.piece.enable.isAbsolute) { + // Piece is absolute, so we should use the absolute time. This is a special case for pieces belonging to the rundown directly. + + const infiniteGroupStart = pieceInstance.plannedStartedPlayback ?? pieceInstance.piece.enable.start + + if (typeof infiniteGroupStart === 'number') { + nowInParent = currentTime - infiniteGroupStart + } else { + // We should never hit this, but in case start is "now" + nowInParent = 0 + } + + infiniteGroupEnable = { start: infiniteGroupStart } + pieceEnable.start = 0 + + // Future: should this consider the prerollDuration? + } else if (pieceInstance.plannedStartedPlayback !== undefined) { // We have a absolute start time, so we should use that. let infiniteGroupStart = pieceInstance.plannedStartedPlayback nowInParent = currentTime - pieceInstance.plannedStartedPlayback @@ -313,30 +377,47 @@ function generateCurrentInfinitePieceObjects( pieceEnable.start = 0 } - infiniteGroup.enable = { start: infiniteGroupStart } + infiniteGroupEnable = { start: infiniteGroupStart } // If an end time has been set by a hotkey, then update the duration to be correct if (pieceInstance.userDuration && pieceInstance.piece.enable.start !== 'now') { if ('endRelativeToPart' in pieceInstance.userDuration) { - infiniteGroup.enable.duration = + infiniteGroupEnable.duration = pieceInstance.userDuration.endRelativeToPart - pieceInstance.piece.enable.start } else { - infiniteGroup.enable.end = 'now' + infiniteGroupEnable.end = 'now' } } } + return { + pieceEnable, + infiniteGroupEnable, + nowInParent, + } +} + +function applyInfinitePieceGroupEndCap( + currentPartInfo: SelectedPartInstanceTimelineInfo, + timingContext: RundownTimelineTimingContext, + pieceInstance: ReadonlyDeep, + infiniteGroupEnable: Readonly, + currentPartInstanceTimings: PartCalculatedTimings, + nextPartInstanceTimings: PartCalculatedTimings | null, + infiniteInNextPart: PieceInstanceWithTimings | undefined +) { + const cappedInfiniteGroupEnable: PartEnable = { ...infiniteGroupEnable } + // If this infinite piece continues to the next part, and has a duration then we should respect that in case it is really close to the take const hasDurationOrEnd = (enable: TSR.Timeline.TimelineEnable) => enable.duration !== undefined || enable.end !== undefined - const infiniteInNextPart = nextPartInfinites.get(pieceInstance.infinite.infiniteInstanceId) if ( infiniteInNextPart && - !hasDurationOrEnd(infiniteGroup.enable) && + !hasDurationOrEnd(cappedInfiniteGroupEnable) && hasDurationOrEnd(infiniteInNextPart.piece.enable) ) { // infiniteGroup.enable.end = infiniteInNextPart.piece.enable.end - infiniteGroup.enable.duration = infiniteInNextPart.piece.enable.duration + cappedInfiniteGroupEnable.duration = infiniteInNextPart.piece.enable.duration } const pieceInstanceWithUpdatedEndCap: PieceInstanceWithTimings = { ...pieceInstance } @@ -344,16 +425,16 @@ function generateCurrentInfinitePieceObjects( if (pieceInstance.resolvedEndCap) { // If the cap is a number, it is relative to the part, not the parent group so needs to be handled here if (typeof pieceInstance.resolvedEndCap === 'number') { - infiniteGroup.enable.end = `#${timingContext.currentPartGroup.id}.start + ${pieceInstance.resolvedEndCap}` - delete infiniteGroup.enable.duration + cappedInfiniteGroupEnable.end = `#${timingContext.currentPartGroup.id}.start + ${pieceInstance.resolvedEndCap}` + delete cappedInfiniteGroupEnable.duration delete pieceInstanceWithUpdatedEndCap.resolvedEndCap } } else if ( // If this piece does not continue in the next part, then set it to end with the part it belongs to !infiniteInNextPart && currentPartInfo.partInstance.part.autoNext && - infiniteGroup.enable.duration === undefined && - infiniteGroup.enable.end === undefined + cappedInfiniteGroupEnable.duration === undefined && + cappedInfiniteGroupEnable.end === undefined ) { let endOffset = 0 @@ -365,27 +446,10 @@ function generateCurrentInfinitePieceObjects( endOffset -= nextPartInstanceTimings.fromPartKeepalive // cap relative to the currentPartGroup - infiniteGroup.enable.end = `#${timingContext.currentPartGroup.id}.end + ${endOffset}` + cappedInfiniteGroupEnable.end = `#${timingContext.currentPartGroup.id}.end + ${endOffset}` } - // Still show objects flagged as 'HoldMode.EXCEPT' if this is a infinite continuation as they belong to the previous too - const isOriginOfInfinite = pieceInstance.piece.startPartId !== currentPartInfo.partInstance.part._id - const isInHold = activePlaylist.holdState === RundownHoldState.ACTIVE - - return [ - infiniteGroup, - ...transformPieceGroupAndObjects( - activePlaylist._id, - infiniteGroup, - nowInParent, - pieceInstanceWithUpdatedEndCap, - pieceEnable, - 0, - groupClasses, - isInHold, - isOriginOfInfinite - ), - ] + return { pieceInstanceWithUpdatedEndCap, cappedInfiniteGroupEnable } } function generatePreviousPartInstanceObjects( diff --git a/packages/job-worker/src/playout/timings/piecePlayback.ts b/packages/job-worker/src/playout/timings/piecePlayback.ts index 66e466403f..fc2bd2bd1c 100644 --- a/packages/job-worker/src/playout/timings/piecePlayback.ts +++ b/packages/job-worker/src/playout/timings/piecePlayback.ts @@ -23,24 +23,19 @@ export function onPiecePlaybackStarted( ): void { const playlist = playoutModel.playlist + if (!playlist.activationId) { + logger.warn(`onPiecePlaybackStarted: Received for inactive RundownPlaylist "${playlist._id}"`) + return + } + const partInstance = playoutModel.getPartInstance(data.partInstanceId) if (!partInstance) { - if (!playlist.activationId) { - logger.warn(`onPiecePlaybackStarted: Received for inactive RundownPlaylist "${playlist._id}"`) - } else { - throw new Error(`PartInstance "${data.partInstanceId}" in RundownPlaylist "${playlist._id}" not found!`) - } - return + throw new Error(`PartInstance "${data.partInstanceId}" in RundownPlaylist "${playlist._id}" not found!`) } const pieceInstance = partInstance.getPieceInstance(data.pieceInstanceId) if (!pieceInstance) { - if (!playlist.activationId) { - logger.warn(`onPiecePlaybackStarted: Received for inactive RundownPlaylist "${playlist._id}"`) - } else { - throw new Error(`PieceInstance "${data.partInstanceId}" in RundownPlaylist "${playlist._id}" not found!`) - } - return + throw new Error(`PieceInstance "${data.partInstanceId}" in RundownPlaylist "${playlist._id}" not found!`) } const isPlaying = !!( @@ -75,6 +70,11 @@ export function onPiecePlaybackStopped( ): void { const playlist = playoutModel.playlist + if (!playlist.activationId) { + logger.warn(`onPiecePlaybackStopped: Received for inactive RundownPlaylist "${playlist._id}"`) + return + } + const partInstance = playoutModel.getPartInstance(data.partInstanceId) if (!partInstance) { // PartInstance not found, so we can rely on the onPartPlaybackStopped callback erroring @@ -83,12 +83,7 @@ export function onPiecePlaybackStopped( const pieceInstance = partInstance.getPieceInstance(data.pieceInstanceId) if (!pieceInstance) { - if (!playlist.activationId) { - logger.warn(`onPiecePlaybackStopped: Received for inactive RundownPlaylist "${playlist._id}"`) - } else { - throw new Error(`PieceInstance "${data.partInstanceId}" in RundownPlaylist "${playlist._id}" not found!`) - } - return + throw new Error(`PieceInstance "${data.partInstanceId}" in RundownPlaylist "${playlist._id}" not found!`) } const isPlaying = !!( @@ -171,6 +166,8 @@ function reportPieceHasStopped( pieceInstance.setPlannedStoppedPlayback(timestamp) } - playoutModel.queuePartInstanceTimingEvent(pieceInstance.pieceInstance.partInstanceId) + if (pieceInstance.pieceInstance.partInstanceId) { + playoutModel.queuePartInstanceTimingEvent(pieceInstance.pieceInstance.partInstanceId) + } } } diff --git a/packages/job-worker/src/workers/events/jobs.ts b/packages/job-worker/src/workers/events/jobs.ts index 1cd2abed7d..272e682633 100644 --- a/packages/job-worker/src/workers/events/jobs.ts +++ b/packages/job-worker/src/workers/events/jobs.ts @@ -1,10 +1,6 @@ import { JobContext } from '../../jobs/index.js' import { EventsJobFunc, EventsJobs } from '@sofie-automation/corelib/dist/worker/events' -import { - handleNotifyCurrentlyPlayingPart, - handlePartInstanceTimings, - handleRundownDataHasChanged, -} from '../../events/handle.js' +import { handlePartInstanceTimings, handleRundownDataHasChanged } from '../../events/handle.js' type ExecutableFunction = ( context: JobContext, @@ -18,5 +14,4 @@ export type EventsJobHandlers = { export const eventJobHandlers: EventsJobHandlers = { [EventsJobs.PartInstanceTimings]: handlePartInstanceTimings, [EventsJobs.RundownDataChanged]: handleRundownDataHasChanged, - [EventsJobs.NotifyCurrentlyPlayingPart]: handleNotifyCurrentlyPlayingPart, } diff --git a/packages/live-status-gateway/src/collections/pieceInstancesHandler.ts b/packages/live-status-gateway/src/collections/pieceInstancesHandler.ts index 8861d109d9..ae58442274 100644 --- a/packages/live-status-gateway/src/collections/pieceInstancesHandler.ts +++ b/packages/live-status-gateway/src/collections/pieceInstancesHandler.ts @@ -9,6 +9,7 @@ import _ from 'underscore' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' import { PartInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { + createPartCurrentTimes, PieceInstanceWithTimings, processAndPrunePieceInstanceTimings, resolvePrunedPieceInstance, @@ -88,25 +89,24 @@ export class PieceInstancesHandler extends PublicationCollection< filterActive: boolean ): PieceInstanceWithTimings[] { // Approximate when 'now' is in the PartInstance, so that any adlibbed Pieces will be timed roughly correctly - const partStarted = partInstance?.timings?.plannedStartedPlayback - const nowInPart = partStarted === undefined ? 0 : Date.now() - partStarted + const partTimes = createPartCurrentTimes(Date.now(), partInstance?.timings?.plannedStartedPlayback) const prunedPieceInstances = processAndPrunePieceInstanceTimings( this._sourceLayers, pieceInstances, - nowInPart, - false, + partTimes, false ) if (!filterActive) return prunedPieceInstances return prunedPieceInstances.filter((pieceInstance) => { - const resolvedPieceInstance = resolvePrunedPieceInstance(nowInPart, pieceInstance) + const resolvedPieceInstance = resolvePrunedPieceInstance(partTimes, pieceInstance) return ( - resolvedPieceInstance.resolvedStart <= nowInPart && + resolvedPieceInstance.resolvedStart <= partTimes.nowInPart && (resolvedPieceInstance.resolvedDuration == null || - resolvedPieceInstance.resolvedStart + resolvedPieceInstance.resolvedDuration > nowInPart) && + resolvedPieceInstance.resolvedStart + resolvedPieceInstance.resolvedDuration > + partTimes.nowInPart) && pieceInstance.piece.virtual !== true && pieceInstance.disabled !== true ) diff --git a/packages/meteor-lib/src/api/pubsub.ts b/packages/meteor-lib/src/api/pubsub.ts index 75da6e0765..591de220be 100644 --- a/packages/meteor-lib/src/api/pubsub.ts +++ b/packages/meteor-lib/src/api/pubsub.ts @@ -2,6 +2,7 @@ import { BucketId, OrganizationId, PartId, + PeripheralDeviceId, RundownId, RundownPlaylistActivationId, RundownPlaylistId, @@ -106,6 +107,11 @@ export enum MeteorPubSub { */ timelineForStudio = 'timelineForStudio', + /** + * Ingest status of rundowns for a PeripheralDevice + */ + ingestDeviceRundownStatusTestTool = 'ingestDeviceRundownStatusTestTool', + /** * Fetch the simplified playout UI view of the specified ShowStyleBase */ @@ -202,6 +208,11 @@ export interface MeteorPubSubTypes { studioId: StudioId, token?: string ) => PeripheralDevicePubSubCollectionsNames.studioTimeline + + [MeteorPubSub.ingestDeviceRundownStatusTestTool]: ( + peripheralDeviceId: PeripheralDeviceId + ) => PeripheralDevicePubSubCollectionsNames.ingestRundownStatus + [MeteorPubSub.uiShowStyleBase]: (showStyleBaseId: ShowStyleBaseId) => CustomCollectionName.UIShowStyleBase /** Subscribe to one or all studios */ [MeteorPubSub.uiStudio]: (studioId: StudioId | null) => CustomCollectionName.UIStudio diff --git a/packages/meteor-lib/src/triggers/RundownViewEventBus.ts b/packages/meteor-lib/src/triggers/RundownViewEventBus.ts index 60b575aeba..5cb11ce75c 100644 --- a/packages/meteor-lib/src/triggers/RundownViewEventBus.ts +++ b/packages/meteor-lib/src/triggers/RundownViewEventBus.ts @@ -29,6 +29,7 @@ export enum RundownViewEvents { REVEAL_IN_SHELF = 'revealInShelf', SWITCH_SHELF_TAB = 'switchShelfTab', SHELF_STATE = 'shelfState', + EDIT_MODE = 'editMode', MINI_SHELF_QUEUE_ADLIB = 'miniShelfQueueAdLib', GO_TO_PART = 'goToPart', GO_TO_PART_INSTANCE = 'goToPartInstance', @@ -74,6 +75,10 @@ export interface ShelfStateEvent extends IEventContext { state: boolean | 'toggle' } +export interface EditModeEvent extends IEventContext { + state: boolean | 'toggle' +} + export interface MiniShelfQueueAdLibEvent extends IEventContext { forward: boolean } @@ -139,6 +144,7 @@ export interface RundownViewEventBusEvents { [RundownViewEvents.SEGMENT_ZOOM_ON]: [] [RundownViewEvents.SEGMENT_ZOOM_OFF]: [] [RundownViewEvents.SHELF_STATE]: [e: ShelfStateEvent] + [RundownViewEvents.EDIT_MODE]: [e: EditModeEvent] [RundownViewEvents.REVEAL_IN_SHELF]: [e: RevealInShelfEvent] [RundownViewEvents.SWITCH_SHELF_TAB]: [e: SwitchToShelfTabEvent] [RundownViewEvents.MINI_SHELF_QUEUE_ADLIB]: [e: MiniShelfQueueAdLibEvent] diff --git a/packages/meteor-lib/src/triggers/actionFactory.ts b/packages/meteor-lib/src/triggers/actionFactory.ts index e7410ce173..2f5edfeefb 100644 --- a/packages/meteor-lib/src/triggers/actionFactory.ts +++ b/packages/meteor-lib/src/triggers/actionFactory.ts @@ -286,6 +286,17 @@ function createShelfAction(_filterChain: IGUIContextFilterLink[], state: boolean } } +function createEditModeAction(_filterChain: IGUIContextFilterLink[], state: boolean | 'toggle'): ExecutableAction { + return { + action: ClientActions.editMode, + execute: () => { + RundownViewEventBus.emit(RundownViewEvents.EDIT_MODE, { + state, + }) + }, + } +} + function createMiniShelfQueueAdLibAction(_filterChain: IGUIContextFilterLink[], forward: boolean): ExecutableAction { return { action: ClientActions.miniShelfQueueAdLib, @@ -442,6 +453,8 @@ export function createAction( switch (action.action) { case ClientActions.shelf: return createShelfAction(action.filterChain, action.state) + case ClientActions.editMode: + return createEditModeAction(action.filterChain, action.state) case ClientActions.goToOnAirLine: return createGoToOnAirLineAction(action.filterChain) case ClientActions.rewindSegments: diff --git a/packages/mos-gateway/src/$schemas/devices.json b/packages/mos-gateway/src/$schemas/devices.json index 78a895f86f..08f3c143c3 100644 --- a/packages/mos-gateway/src/$schemas/devices.json +++ b/packages/mos-gateway/src/$schemas/devices.json @@ -140,8 +140,36 @@ }, "required": ["id", "host"], "additionalProperties": false + }, + "statuses": { + "type": "object", + "ui:title": "Statuses", + "title": "MosDeviceStatusesConfig", + "properties": { + "enabled": { + "type": "boolean", + "ui:title": "Write Statuses to NRCS", + "ui:description": "", + "ui:summaryTitle": "Statuses", + "default": true + }, + "sendInRehearsal": { + "type": "boolean", + "ui:title": "Send when in Rehearsal mode", + "ui:description": "", + "default": false + }, + "onlySendPlay": { + "type": "boolean", + "ui:title": "Only send PLAY statuses", + "ui:description": "", + "default": false + } + }, + "required": ["enabled"], + "additionalProperties": false } }, - "required": ["primary"], + "required": ["primary", "statuses"], "additionalProperties": false } diff --git a/packages/mos-gateway/src/CoreMosDeviceHandler.ts b/packages/mos-gateway/src/CoreMosDeviceHandler.ts index 242cc84055..faff9a50a7 100644 --- a/packages/mos-gateway/src/CoreMosDeviceHandler.ts +++ b/packages/mos-gateway/src/CoreMosDeviceHandler.ts @@ -4,6 +4,7 @@ import { protectString, Observer, PeripheralDevicePubSub, + stringifyError, } from '@sofie-automation/server-core-integration' import { IMOSConnectionStatus, @@ -21,7 +22,6 @@ import { IMOSItem, IMOSROReadyToAir, IMOSROFullStory, - IMOSObjectStatus, IMOSROAck, getMosTypes, MosTypes, @@ -112,9 +112,7 @@ export class CoreMosDeviceHandler { deviceName: this._mosDevice.idPrimary, }) this.core.on('error', (err) => { - this._coreParentHandler.logger.error( - 'Core Error: ' + (typeof err === 'string' ? err : err.message || err.toString()) - ) + this._coreParentHandler.logger.error(`Core Error: ${stringifyError(err)}`) }) this.setupSubscriptionsAndObservers() @@ -138,7 +136,7 @@ export class CoreMosDeviceHandler { Promise.all([ this.core.autoSubscribe(PeripheralDevicePubSub.peripheralDeviceCommands, this.core.deviceId), ]).catch((e) => { - this._coreParentHandler.logger.error(e) + this._coreParentHandler.logger.error(stringifyError(e)) }) this._coreParentHandler.logger.info('CoreMos: Setting up observers..') @@ -349,42 +347,6 @@ export class CoreMosDeviceHandler { // console.log('GOT REPLY', results) return this.fixMosData(ro) } - async setROStatus(roId: string, status: IMOSObjectStatus): Promise { - // console.log('setStoryStatus') - const result = await this._mosDevice.sendRunningOrderStatus({ - ID: this.mosTypes.mosString128.create(roId), - Status: status, - Time: this.mosTypes.mosTime.create(new Date()), - }) - - // console.log('got result', result) - return this.fixMosData(result) - } - async setStoryStatus(roId: string, storyId: string, status: IMOSObjectStatus): Promise { - // console.log('setStoryStatus') - const result = await this._mosDevice.sendStoryStatus({ - RunningOrderId: this.mosTypes.mosString128.create(roId), - ID: this.mosTypes.mosString128.create(storyId), - Status: status, - Time: this.mosTypes.mosTime.create(new Date()), - }) - - // console.log('got result', result) - return this.fixMosData(result) - } - async setItemStatus(roId: string, storyId: string, itemId: string, status: IMOSObjectStatus): Promise { - // console.log('setStoryStatus') - const result = await this._mosDevice.sendItemStatus({ - RunningOrderId: this.mosTypes.mosString128.create(roId), - StoryId: this.mosTypes.mosString128.create(storyId), - ID: this.mosTypes.mosString128.create(itemId), - Status: status, - Time: this.mosTypes.mosTime.create(new Date()), - }) - - // console.log('got result', result) - return this.fixMosData(result) - } async replaceStoryItem( roID: string, storyID: string, diff --git a/packages/mos-gateway/src/connector.ts b/packages/mos-gateway/src/connector.ts index a6ea0de258..fe4275bcbd 100644 --- a/packages/mos-gateway/src/connector.ts +++ b/packages/mos-gateway/src/connector.ts @@ -5,6 +5,7 @@ import { PeripheralDeviceId, loadCertificatesFromDisk, CertificatesConfig, + stringifyError, } from '@sofie-automation/server-core-integration' export interface Config { @@ -36,18 +37,23 @@ export class Connector { this._logger.info('Process initialized') this._logger.info('Initializing Core...') - await this.initCore(certificates) + this.coreHandler = await CoreHandler.create( + this._logger, + this._config.core, + certificates, + this._config.device + ) this._logger.info('Initializing Mos...') - await this.initMos() + this.mosHandler = await MosHandler.create(this._logger, this._config.mos, this.coreHandler) this._logger.info('Initialization done') } catch (e: any) { - this._logger.error('Error during initialization:', e, e.stack) + this._logger.error(`Error during initialization: ${stringifyError(e)}`, e.stack) this._logger.info('Shutting down in 10 seconds!') - this.dispose().catch((e2) => this._logger.error(e2)) + this.dispose().catch((e2) => this._logger.error(stringifyError(e2))) setTimeout(() => { // eslint-disable-next-line n/no-process-exit @@ -55,32 +61,7 @@ export class Connector { }, 10 * 1000) } } - async initCore(certificates: Buffer[]): Promise { - if (!this._config) { - throw Error('_config is undefined!') - } - - this.coreHandler = new CoreHandler(this._logger, this._config.device) - - if (!this.coreHandler) { - throw Error('coreHandler is undefined!') - } - - return this.coreHandler.init(this._config.core, certificates) - } - async initMos(): Promise { - this.mosHandler = new MosHandler(this._logger) - if (!this._config) { - throw Error('_config is undefined!') - } - - if (!this.coreHandler) { - throw Error('coreHandler is undefined!') - } - - return this.mosHandler.init(this._config.mos, this.coreHandler) - } async dispose(): Promise { if (this.mosHandler) await this.mosHandler.dispose() diff --git a/packages/mos-gateway/src/coreHandler.ts b/packages/mos-gateway/src/coreHandler.ts index e6a38c5849..b2a9d8c1fb 100644 --- a/packages/mos-gateway/src/coreHandler.ts +++ b/packages/mos-gateway/src/coreHandler.ts @@ -41,12 +41,23 @@ export class CoreHandler { private _coreConfig?: CoreConfig private _certificates?: Buffer[] - constructor(logger: Winston.Logger, deviceOptions: DeviceConfig) { + public static async create( + logger: Winston.Logger, + config: CoreConfig, + certificates: Buffer[], + deviceOptions: DeviceConfig + ): Promise { + const handler = new CoreHandler(logger, deviceOptions) + await handler.init(config, certificates) + return handler + } + + private constructor(logger: Winston.Logger, deviceOptions: DeviceConfig) { this.logger = logger this._deviceOptions = deviceOptions } - async init(config: CoreConfig, certificates: Buffer[]): Promise { + private async init(config: CoreConfig, certificates: Buffer[]): Promise { // this.logger.info('========') this._coreConfig = config this._certificates = certificates @@ -224,7 +235,7 @@ export class CoreHandler { // console.log('cb done') }) .catch((e) => { - this.logger.error(e) + this.logger.error(stringifyError(e)) }) } // eslint-disable-next-line @typescript-eslint/ban-ts-comment diff --git a/packages/mos-gateway/src/mosHandler.ts b/packages/mos-gateway/src/mosHandler.ts index 6f0ffd241f..d0101d8944 100644 --- a/packages/mos-gateway/src/mosHandler.ts +++ b/packages/mos-gateway/src/mosHandler.ts @@ -1,6 +1,5 @@ import { MosConnection, - IMOSDevice, IMOSConnectionStatus, IMOSRunningOrder, IMOSROAck, @@ -16,7 +15,6 @@ import { IMOSROReadyToAir, IMOSROFullStory, IConnectionConfig, - IMOSDeviceConnectionOptions, MosDevice, IMOSListMachInfo, IMOSString128, @@ -27,7 +25,11 @@ import { import * as Winston from 'winston' import { CoreHandler } from './coreHandler.js' import { CoreMosDeviceHandler } from './CoreMosDeviceHandler.js' -import { Observer, PeripheralDevicePubSubCollectionsNames } from '@sofie-automation/server-core-integration' +import { + Observer, + PeripheralDevicePubSubCollectionsNames, + stringifyError, +} from '@sofie-automation/server-core-integration' import { DEFAULT_MOS_TIMEOUT_TIME, DEFAULT_MOS_HEARTBEAT_INTERVAL, @@ -35,12 +37,15 @@ import { import { MosGatewayConfig } from '@sofie-automation/shared-lib/dist/generated/MosGatewayOptionsTypes' import { MosDeviceConfig } from '@sofie-automation/shared-lib/dist/generated/MosGatewayDevicesTypes' import { PeripheralDeviceForDevice } from '@sofie-automation/server-core-integration' +import _ from 'underscore' +import { MosStatusHandler } from './mosStatus/handler.js' +import { isPromise } from 'util/types' export interface MosConfig { self: IConnectionConfig // devices: Array } -export type MosSubDeviceSettings = Record< +type MosSubDeviceSettings = Record< string, { type: '' @@ -48,29 +53,53 @@ export type MosSubDeviceSettings = Record< } > +/** + * Represents a connection in mos-connection, paired with some additional data + */ +interface MosDeviceHandle { + readonly deviceId: string + readonly mosDevice: MosDevice + readonly deviceOptions: Readonly + + // Once connected, a core handler is setup + coreMosHandler?: CoreMosDeviceHandler | Promise + + // If writing back story/item status is enabled, the setup handler + statusHandler?: MosStatusHandler +} + export class MosHandler { public mos: MosConnection | undefined public mosOptions: MosConfig | undefined public debugLogging = false - private allMosDevices: { [id: string]: { mosDevice: IMOSDevice; coreMosHandler?: CoreMosDeviceHandler } } = {} - private _ownMosDevices: { [deviceId: string]: MosDevice } = {} + /** Map of mos devices that have been created */ + private readonly _allMosDevices = new Map() + private _logger: Winston.Logger private _disposed = false private _settings?: MosGatewayConfig - private _openMediaHotStandby: Record private _coreHandler: CoreHandler | undefined private _observers: Array> = [] private _triggerupdateDevicesTimeout: any = null private mosTypes: MosTypes - constructor(logger: Winston.Logger) { + public static async create( + logger: Winston.Logger, + config: MosConfig, + coreHandler: CoreHandler + ): Promise { + const handler = new MosHandler(logger) + await handler.init(config, coreHandler) + return handler + } + + private constructor(logger: Winston.Logger) { this._logger = logger - this._openMediaHotStandby = {} this.mosTypes = getMosTypes(this.strict) // temporary, another will be set upon init() } - async init(config: MosConfig, coreHandler: CoreHandler): Promise { + private async init(config: MosConfig, coreHandler: CoreHandler): Promise { this.mosOptions = config this._coreHandler = coreHandler /*{ @@ -121,11 +150,9 @@ export class MosHandler { return Promise.resolve() } } - setupObservers(): void { + private setupObservers(): void { if (this._observers.length) { - this._observers.forEach((obs) => { - obs.stop() - }) + this._observers.forEach((obs) => obs.stop()) this._observers = [] } this._logger.info('Renewing observers') @@ -141,15 +168,9 @@ export class MosHandler { const deviceObserver = this._coreHandler.core.observe( PeripheralDevicePubSubCollectionsNames.peripheralDeviceForDevice ) - deviceObserver.added = () => { - this._deviceOptionsChanged() - } - deviceObserver.changed = () => { - this._deviceOptionsChanged() - } - deviceObserver.removed = () => { - this._deviceOptionsChanged() - } + deviceObserver.added = () => this._deviceOptionsChanged() + deviceObserver.changed = () => this._deviceOptionsChanged() + deviceObserver.removed = () => this._deviceOptionsChanged() this._observers.push(deviceObserver) this._deviceOptionsChanged() @@ -193,7 +214,7 @@ export class MosHandler { } this._triggerupdateDevicesTimeout = setTimeout(() => { this._updateDevices().catch((e) => { - this._logger.error(e) + this._logger.error(stringifyError(e)) }) }, 20) } @@ -224,171 +245,203 @@ export class MosHandler { } this.debugLog('rawMessage', source, type, message) }) - this.mos.on('info', (message: any) => { - this._logger.info(message) + this.mos.on('info', (message, data) => { + this._logger.info(message, data) }) - this.mos.on('error', (error: any) => { - this._logger.error(error) + this.mos.on('error', (error) => { + this._logger.error(stringifyError(error)) }) - this.mos.on('warning', (warning: any) => { - this._logger.error(warning) + this.mos.on('warning', (warning) => { + this._logger.error(stringifyError(warning)) }) - // eslint-disable-next-line @typescript-eslint/no-misused-promises - this.mos.onConnection(async (mosDevice: IMOSDevice): Promise => { - // a new connection to a device has been made - this._logger.info('new mosConnection established: ' + mosDevice.idPrimary + ', ' + mosDevice.idSecondary) - try { - this.allMosDevices[mosDevice.idPrimary] = { mosDevice: mosDevice } + this.mos.onConnection((mosDevice: MosDevice): void => { + this.setupMosDevice(mosDevice).catch((e) => { + this._logger.error(stringifyError(e)) + }) + }) - if (!this._coreHandler) throw Error('_coreHandler is undefined!') + // Open mos-server for connections: + await this.mos.init() + } + private async setupMosDevice(mosDevice: MosDevice): Promise { + // a new connection to a device has been made + this._logger.info('new mosConnection established: ' + mosDevice.idPrimary + ', ' + mosDevice.idSecondary) + try { + const deviceEntry = Array.from(this._allMosDevices.values()).find( + (d) => + d.mosDevice.idPrimary === mosDevice.idPrimary && d.mosDevice.idSecondary === mosDevice.idSecondary + ) - const coreMosHandler = await this._coreHandler.registerMosDevice(mosDevice, this, { - openMediaHotStandby: mosDevice.idSecondary - ? this._openMediaHotStandby[mosDevice.idSecondary] - : false, - }) - // this._logger.info('mosDevice registered -------------') - // Setup message flow between the devices: - - this.allMosDevices[mosDevice.idPrimary].coreMosHandler = coreMosHandler - - // Initial Status check: - const connectionStatus = mosDevice.getConnectionStatus() - coreMosHandler.onMosConnectionChanged(connectionStatus) // initial check - // Profile 0: ------------------------------------------------- - mosDevice.onConnectionChange((newStatus: IMOSConnectionStatus) => { - // MOSDevice >>>> Core - coreMosHandler.onMosConnectionChanged(newStatus) - }) - coreMosHandler.onMosConnectionChanged(mosDevice.getConnectionStatus()) - mosDevice.onRequestMachineInfo(async () => { - // MOSDevice >>>> Core - return coreMosHandler.getMachineInfo() - }) + if (!deviceEntry) { + // We got a connection for a connection which shouldn't exist.. + this._logger.error(`Got connection for mosDevice "${mosDevice.idPrimary}" which doesn't exist!`) + return + } - // Profile 1: ------------------------------------------------- - /* + if (deviceEntry.mosDevice !== mosDevice) { + // Our state doesn't match, don't try to use the connection it could be from a previous connection attempt + this._logger.error( + `Got connection for mosDevice "${mosDevice.idPrimary}" which differs to the one setup!` + ) + return + } + + // This is either a promise, if a handler is currently being setup, or the handler itself + if (deviceEntry.coreMosHandler) { + this._logger.error(`Got connection for mosDevice "${mosDevice.idPrimary}" which is already setup!`) + return + } + + if (!this._coreHandler) throw Error('_coreHandler is undefined!') + + const openMediaHotStandby = deviceEntry.deviceOptions.secondary?.openMediaHotStandby || false + + const coreMosHandler = await this._coreHandler.registerMosDevice(mosDevice, this, { + openMediaHotStandby: mosDevice.idSecondary ? openMediaHotStandby : false, + }) + // this._logger.info('mosDevice registered -------------') + // Setup message flow between the devices: + + deviceEntry.coreMosHandler = coreMosHandler + + // Initial Status check: + // Profile 0: ------------------------------------------------- + mosDevice.onConnectionChange((newStatus: IMOSConnectionStatus) => { + // MOSDevice >>>> Core + coreMosHandler.onMosConnectionChanged(newStatus) + + // Setup the status handler upon first connection to the NRCS + const isConnected = newStatus.PrimaryConnected || newStatus.SecondaryConnected + if (deviceEntry.deviceOptions.statuses?.enabled && !deviceEntry.statusHandler && isConnected) { + // Creating the handler at this point avoids sending status messages before the connection is established, + // allowing for a sync at startup without needing manual queueing + deviceEntry.statusHandler = new MosStatusHandler( + this._logger, + mosDevice, + coreMosHandler, + deviceEntry.deviceOptions.statuses, + this.strict + ) + } + }) + coreMosHandler.onMosConnectionChanged(mosDevice.getConnectionStatus()) + mosDevice.onRequestMachineInfo(async () => { + // MOSDevice >>>> Core + return coreMosHandler.getMachineInfo() + }) + + // Profile 1: ------------------------------------------------- + /* mosDevice.onRequestMOSObject((objId: string) => { // coreMosHandler.fetchMosObject(objId) // return Promise }) */ - // onRequestMOSObject: (cb: (objId: string) => Promise) => void - // onRequestAllMOSObjects: (cb: () => Promise>) => void - // getMOSObject: (objId: string) => Promise - // getAllMOSObjects: () => Promise> - // Profile 2: ------------------------------------------------- - mosDevice.onCreateRunningOrder(async (ro: IMOSRunningOrder) => { - // MOSDevice >>>> Core - return this._getROAck(ro.ID, coreMosHandler.mosRoCreate(ro)) - }) - mosDevice.onReplaceRunningOrder(async (ro: IMOSRunningOrder) => { - // MOSDevice >>>> Core - return this._getROAck(ro.ID, coreMosHandler.mosRoReplace(ro)) - }) - mosDevice.onDeleteRunningOrder(async (runningOrderId: IMOSString128) => { - // MOSDevice >>>> Core - return this._getROAck(runningOrderId, coreMosHandler.mosRoDelete(runningOrderId)) - }) - mosDevice.onMetadataReplace(async (ro: IMOSRunningOrderBase) => { - // MOSDevice >>>> Core - return this._getROAck(ro.ID, coreMosHandler.mosRoMetadata(ro)) - }) - mosDevice.onRunningOrderStatus(async (status: IMOSRunningOrderStatus) => { - // MOSDevice >>>> Core - return this._getROAck(status.ID, coreMosHandler.mosRoStatus(status)) - }) - mosDevice.onStoryStatus(async (status: IMOSStoryStatus) => { - // MOSDevice >>>> Core - return this._getROAck(status.RunningOrderId, coreMosHandler.mosRoStoryStatus(status)) - }) - mosDevice.onItemStatus(async (status: IMOSItemStatus) => { - // MOSDevice >>>> Core - return this._getROAck(status.RunningOrderId, coreMosHandler.mosRoItemStatus(status)) - }) - mosDevice.onROInsertStories(async (Action: IMOSStoryAction, Stories: Array) => { - // MOSDevice >>>> Core - return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoStoryInsert(Action, Stories)) - }) - mosDevice.onROInsertItems(async (Action: IMOSItemAction, Items: Array) => { - // MOSDevice >>>> Core - return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemInsert(Action, Items)) - }) - mosDevice.onROReplaceStories(async (Action: IMOSStoryAction, Stories: Array) => { - // MOSDevice >>>> Core - return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoStoryReplace(Action, Stories)) - }) - mosDevice.onROReplaceItems(async (Action: IMOSItemAction, Items: Array) => { - // MOSDevice >>>> Core - return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemReplace(Action, Items)) - }) - mosDevice.onROMoveStories(async (Action: IMOSStoryAction, Stories: Array) => { - // MOSDevice >>>> Core - return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoStoryMove(Action, Stories)) - }) - mosDevice.onROMoveItems(async (Action: IMOSItemAction, Items: Array) => { - // MOSDevice >>>> Core - return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemMove(Action, Items)) - }) - mosDevice.onRODeleteStories(async (Action: IMOSROAction, Stories: Array) => { - // MOSDevice >>>> Core - return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoStoryDelete(Action, Stories)) - }) - mosDevice.onRODeleteItems(async (Action: IMOSStoryAction, Items: Array) => { - // MOSDevice >>>> Core - return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemDelete(Action, Items)) - }) - mosDevice.onROSwapStories( - async (Action: IMOSROAction, StoryID0: IMOSString128, StoryID1: IMOSString128) => { - // MOSDevice >>>> Core - return this._getROAck( - Action.RunningOrderID, - coreMosHandler.mosRoStorySwap(Action, StoryID0, StoryID1) - ) - } - ) - mosDevice.onROSwapItems( - async (Action: IMOSStoryAction, ItemID0: IMOSString128, ItemID1: IMOSString128) => { - // MOSDevice >>>> Core - return this._getROAck( - Action.RunningOrderID, - coreMosHandler.mosRoItemSwap(Action, ItemID0, ItemID1) - ) - } - ) - mosDevice.onReadyToAir(async (Action: IMOSROReadyToAir) => { + // onRequestMOSObject: (cb: (objId: string) => Promise) => void + // onRequestAllMOSObjects: (cb: () => Promise>) => void + // getMOSObject: (objId: string) => Promise + // getAllMOSObjects: () => Promise> + // Profile 2: ------------------------------------------------- + mosDevice.onCreateRunningOrder(async (ro: IMOSRunningOrder) => { + // MOSDevice >>>> Core + return this._getROAck(ro.ID, coreMosHandler.mosRoCreate(ro)) + }) + mosDevice.onReplaceRunningOrder(async (ro: IMOSRunningOrder) => { + // MOSDevice >>>> Core + return this._getROAck(ro.ID, coreMosHandler.mosRoReplace(ro)) + }) + mosDevice.onDeleteRunningOrder(async (runningOrderId: IMOSString128) => { + // MOSDevice >>>> Core + return this._getROAck(runningOrderId, coreMosHandler.mosRoDelete(runningOrderId)) + }) + mosDevice.onMetadataReplace(async (ro: IMOSRunningOrderBase) => { + // MOSDevice >>>> Core + return this._getROAck(ro.ID, coreMosHandler.mosRoMetadata(ro)) + }) + mosDevice.onRunningOrderStatus(async (status: IMOSRunningOrderStatus) => { + // MOSDevice >>>> Core + return this._getROAck(status.ID, coreMosHandler.mosRoStatus(status)) + }) + mosDevice.onStoryStatus(async (status: IMOSStoryStatus) => { + // MOSDevice >>>> Core + return this._getROAck(status.RunningOrderId, coreMosHandler.mosRoStoryStatus(status)) + }) + mosDevice.onItemStatus(async (status: IMOSItemStatus) => { + // MOSDevice >>>> Core + return this._getROAck(status.RunningOrderId, coreMosHandler.mosRoItemStatus(status)) + }) + mosDevice.onROInsertStories(async (Action: IMOSStoryAction, Stories: Array) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoStoryInsert(Action, Stories)) + }) + mosDevice.onROInsertItems(async (Action: IMOSItemAction, Items: Array) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemInsert(Action, Items)) + }) + mosDevice.onROReplaceStories(async (Action: IMOSStoryAction, Stories: Array) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoStoryReplace(Action, Stories)) + }) + mosDevice.onROReplaceItems(async (Action: IMOSItemAction, Items: Array) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemReplace(Action, Items)) + }) + mosDevice.onROMoveStories(async (Action: IMOSStoryAction, Stories: Array) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoStoryMove(Action, Stories)) + }) + mosDevice.onROMoveItems(async (Action: IMOSItemAction, Items: Array) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemMove(Action, Items)) + }) + mosDevice.onRODeleteStories(async (Action: IMOSROAction, Stories: Array) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoStoryDelete(Action, Stories)) + }) + mosDevice.onRODeleteItems(async (Action: IMOSStoryAction, Items: Array) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemDelete(Action, Items)) + }) + mosDevice.onROSwapStories( + async (Action: IMOSROAction, StoryID0: IMOSString128, StoryID1: IMOSString128) => { // MOSDevice >>>> Core - return this._getROAck(Action.ID, coreMosHandler.mosRoReadyToAir(Action)) - }) - // ---------------------------------------------------------------- - // Init actions - /* + return this._getROAck( + Action.RunningOrderID, + coreMosHandler.mosRoStorySwap(Action, StoryID0, StoryID1) + ) + } + ) + mosDevice.onROSwapItems(async (Action: IMOSStoryAction, ItemID0: IMOSString128, ItemID1: IMOSString128) => { + // MOSDevice >>>> Core + return this._getROAck(Action.RunningOrderID, coreMosHandler.mosRoItemSwap(Action, ItemID0, ItemID1)) + }) + mosDevice.onReadyToAir(async (Action: IMOSROReadyToAir) => { + // MOSDevice >>>> Core + return this._getROAck(Action.ID, coreMosHandler.mosRoReadyToAir(Action)) + }) + // ---------------------------------------------------------------- + // Init actions + /* mosDevice.getMachineInfo() .then((machineInfo: IMOSListMachInfo) => { }) */ - // Profile 3: ------------------------------------------------- - // Profile 4: ------------------------------------------------- - // onStory: (cb: (story: IMOSROFullStory) => Promise) => void - mosDevice.onRunningOrderStory(async (story: IMOSROFullStory) => { - // MOSDevice >>>> Core - return this._getROAck(story.RunningOrderId, coreMosHandler.mosRoFullStory(story)) - }) - } catch (e) { - this._logger.error('Error:', e) - } - }) - - // Open mos-server for connections: - await this.mos.init() + // Profile 3: ------------------------------------------------- + // Profile 4: ------------------------------------------------- + // onStory: (cb: (story: IMOSROFullStory) => Promise) => void + mosDevice.onRunningOrderStory(async (story: IMOSROFullStory) => { + // MOSDevice >>>> Core + return this._getROAck(story.RunningOrderId, coreMosHandler.mosRoFullStory(story)) + }) + } catch (e) { + this._logger.error(stringifyError(e)) + } } private sendStatusOfAllMosDevices() { // Send an update to Core of the status of all mos devices - for (const handler of Object.values<{ mosDevice: IMOSDevice; coreMosHandler?: CoreMosDeviceHandler }>( - this.allMosDevices - )) { - if (handler.coreMosHandler) { + for (const handler of this._allMosDevices.values()) { + if (handler.coreMosHandler && !isPromise(handler.coreMosHandler)) { handler.coreMosHandler.onMosConnectionChanged(handler.mosDevice.getConnectionStatus()) } } @@ -424,26 +477,28 @@ export class MosHandler { for (const [deviceId, device] of Object.entries<{ options: MosDeviceConfig }>(devices)) { if (device) { if (device.options.secondary) { - const fullSecondaryId = this._settings?.mosId + '_' + device.options.secondary.id - this._openMediaHotStandby[fullSecondaryId] = - device.options.secondary?.openMediaHotStandby || false // If the host isn't set, don't use secondary: if (!device.options.secondary.host || !device.options.secondary.id) delete device.options.secondary } - const oldDevice: MosDevice | null = this._getDevice(deviceId) + const oldDevice = this._allMosDevices.get(deviceId) if (!oldDevice) { this._logger.info('Initializing new device: ' + deviceId) devicesToAdd[deviceId] = device } else { - if ( - (oldDevice.primaryId || '') !== device.options.primary?.id || - (oldDevice.primaryHost || '') !== device.options.primary?.host || - (oldDevice.secondaryId || '') !== (device.options.secondary?.id || '') || - (oldDevice.secondaryHost || '') !== (device.options.secondary?.host || '') - ) { + // elsewhere the oldDevice.deviceOptions has been modified with defaults + const newOptionsWithDefaults = { + ...device.options, + primary: { + ...device.options.primary, + heartbeatInterval: + device.options.primary.heartbeatInterval || DEFAULT_MOS_HEARTBEAT_INTERVAL, + timeout: device.options.primary.timeout || DEFAULT_MOS_TIMEOUT_TIME, + }, + } + if (!_.isEqual(oldDevice.deviceOptions, newOptionsWithDefaults)) { this._logger.info('Re-initializing device: ' + deviceId) devicesToRemove[deviceId] = true devicesToAdd[deviceId] = device @@ -452,7 +507,7 @@ export class MosHandler { } } - for (const [deviceId, oldDevice] of Object.entries(this._ownMosDevices)) { + for (const [deviceId, oldDevice] of this._allMosDevices.entries()) { if (oldDevice && !devices[deviceId]) { this._logger.info('Un-initializing device: ' + deviceId) devicesToRemove[deviceId] = true @@ -472,29 +527,26 @@ export class MosHandler { ) } } - private async _addDevice(deviceId: string, deviceOptions: IMOSDeviceConnectionOptions): Promise { - if (this._getDevice(deviceId)) { + private async _addDevice(deviceId: string, deviceOptions0: MosDeviceConfig): Promise { + if (this._allMosDevices.has(deviceId)) { // the device is already there throw new Error('Unable to add device "' + deviceId + '", because it already exists!') } - if (!this.mos) { - throw Error('mos is undefined, call _initMosConnection first!') - } - - deviceOptions = JSON.parse(JSON.stringify(deviceOptions)) // deep clone - - deviceOptions.primary.timeout = deviceOptions.primary.timeout || DEFAULT_MOS_TIMEOUT_TIME - - deviceOptions.primary.heartbeatInterval = - deviceOptions.primary.heartbeatInterval || DEFAULT_MOS_HEARTBEAT_INTERVAL + if (!this.mos) throw Error('mos is undefined, call _initMosConnection first!') - if (deviceOptions.secondary?.id && this._openMediaHotStandby[deviceOptions.secondary.id]) { - deviceOptions.secondary.openMediaHotStandby = true - } + const deviceOptions: MosDeviceConfig = JSON.parse(JSON.stringify(deviceOptions0)) // deep clone + deviceOptions.primary.timeout ||= DEFAULT_MOS_TIMEOUT_TIME + deviceOptions.primary.heartbeatInterval ||= DEFAULT_MOS_HEARTBEAT_INTERVAL const mosDevice: MosDevice = await this.mos.connect(deviceOptions) - this._ownMosDevices[deviceId] = mosDevice + this._allMosDevices.set(deviceId, { + deviceId: deviceId, + mosDevice: mosDevice, + deviceOptions, + }) + + await this.setupMosDevice(mosDevice) try { const getMachineInfoUntilConnected = async (): Promise => @@ -535,23 +587,29 @@ export class MosHandler { return mosDevice } catch (e) { // something went wrong during init: - if (!this.mos) { - throw Error('mos is undefined!') - } + if (!this.mos) throw Error('mos is undefined!') this.mos.disposeMosDevice(mosDevice).catch((e2) => { - this._logger.error(e2) + this._logger.error(stringifyError(e2)) }) throw e } } private async _removeDevice(deviceId: string): Promise { - const mosDevice = this._getDevice(deviceId) as MosDevice + const deviceEntry = this._allMosDevices.get(deviceId) + this._allMosDevices.delete(deviceId) - delete this._ownMosDevices[deviceId] - if (mosDevice) { - if (!this._coreHandler) throw Error('_coreHandler is undefined!') - await this._coreHandler.unRegisterMosDevice(mosDevice) + if (deviceEntry) { + const mosDevice = deviceEntry.mosDevice + + // Cleanup the coreMosHandler from the device + if (this._coreHandler) await this._coreHandler.unRegisterMosDevice(mosDevice) + + // Stop the status handler, if enabled + if (deviceEntry.statusHandler) { + deviceEntry.statusHandler.dispose() + delete deviceEntry.statusHandler + } if (!this.mos) { throw Error('mos is undefined!') @@ -571,9 +629,6 @@ export class MosHandler { } return Promise.resolve() } - private _getDevice(deviceId: string): MosDevice | null { - return this._ownMosDevices[deviceId] || null - } private async _getROAck(roId: IMOSString128, p: Promise): Promise { return p .then(() => { @@ -585,7 +640,7 @@ export class MosHandler { return roAck }) .catch((err) => { - this._logger.error('ROAck error:', err) + this._logger.error(`ROAck error: ${stringifyError(err)}`) const roAck: IMOSROAck = { ID: roId, Status: this.mosTypes.mosString128.create('Error: ' + err.toString()), diff --git a/packages/mos-gateway/src/mosStatus/__tests__/diff.spec.ts b/packages/mos-gateway/src/mosStatus/__tests__/diff.spec.ts new file mode 100644 index 0000000000..7cbf3138e2 --- /dev/null +++ b/packages/mos-gateway/src/mosStatus/__tests__/diff.spec.ts @@ -0,0 +1,317 @@ +import { protectString } from '@sofie-automation/server-core-integration' +import { + IngestPartPlaybackStatus, + IngestRundownActiveStatus, + IngestRundownStatus, +} from '@sofie-automation/shared-lib/dist/ingest/rundownStatus' +import { diffStatuses, ItemStatusEntry, MOS_STATUS_UNKNOWN, StoryStatusEntry } from '../diff.js' +import type { MosDeviceStatusesConfig } from '@sofie-automation/shared-lib/dist/generated/MosGatewayDevicesTypes' +import { IMOSObjectStatus } from '@mos-connection/connector' + +describe('diffStatuses', () => { + const defaultConfig: MosDeviceStatusesConfig = { + enabled: true, + sendInRehearsal: true, + onlySendPlay: false, + } + const singlePartRundown: IngestRundownStatus = { + _id: protectString('rundown0'), + externalId: 'external0', + active: IngestRundownActiveStatus.ACTIVE, + segments: [ + { + externalId: 'segment0', + parts: [ + { + externalId: 'part0', + isReady: true, + itemsReady: [], + playbackStatus: IngestPartPlaybackStatus.UNKNOWN, + }, + ], + }, + ], + } + + test('diff no changes', () => { + const diff = diffStatuses(defaultConfig, singlePartRundown, singlePartRundown) + expect(diff).toHaveLength(0) + }) + + test('part playback changes', () => { + const partPlayingState = structuredClone(singlePartRundown) + partPlayingState.segments[0].parts[0].playbackStatus = IngestPartPlaybackStatus.PLAY + + { + // change to play + const diff = diffStatuses(defaultConfig, singlePartRundown, partPlayingState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: IMOSObjectStatus.PLAY, + } satisfies StoryStatusEntry) + } + + { + const partStoppedState = structuredClone(partPlayingState) + partStoppedState.segments[0].parts[0].playbackStatus = IngestPartPlaybackStatus.STOP + + // change to stop + const diff = diffStatuses(defaultConfig, partPlayingState, partStoppedState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: IMOSObjectStatus.STOP, + } satisfies StoryStatusEntry) + } + + { + const partClearState = structuredClone(partPlayingState) + partClearState.segments[0].parts[0].playbackStatus = IngestPartPlaybackStatus.UNKNOWN + + // change to clear + const diff = diffStatuses(defaultConfig, partPlayingState, partClearState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: IMOSObjectStatus.READY, + } satisfies StoryStatusEntry) + } + }) + + test('part ready changes', () => { + const partNotReadyState = structuredClone(singlePartRundown) + partNotReadyState.segments[0].parts[0].isReady = false + + { + // change to not ready + const diff = diffStatuses(defaultConfig, singlePartRundown, partNotReadyState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: IMOSObjectStatus.NOT_READY, + } satisfies StoryStatusEntry) + } + + { + // change to ready + const diff = diffStatuses(defaultConfig, partNotReadyState, singlePartRundown) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: IMOSObjectStatus.READY, + } satisfies StoryStatusEntry) + } + + { + const partClearState = structuredClone(partNotReadyState) + partClearState.segments[0].parts[0].isReady = null + + // change to unknown + const diff = diffStatuses(defaultConfig, partNotReadyState, partClearState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: MOS_STATUS_UNKNOWN, + } satisfies StoryStatusEntry) + } + }) + + test('part added to rundown', () => { + const extraPartState = structuredClone(singlePartRundown) + extraPartState.segments[0].parts.push({ + externalId: 'part1', + isReady: false, + itemsReady: [], + playbackStatus: IngestPartPlaybackStatus.UNKNOWN, + }) + + { + const diff = diffStatuses(defaultConfig, singlePartRundown, extraPartState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part1', + mosStatus: IMOSObjectStatus.NOT_READY, + } satisfies StoryStatusEntry) + } + }) + + test('part removed from rundown', () => { + const extraPartState = structuredClone(singlePartRundown) + extraPartState.segments[0].parts.push({ + externalId: 'part1', + isReady: false, + itemsReady: [], + playbackStatus: IngestPartPlaybackStatus.UNKNOWN, + }) + + { + const diff = diffStatuses(defaultConfig, extraPartState, singlePartRundown) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part1', + mosStatus: MOS_STATUS_UNKNOWN, + } satisfies StoryStatusEntry) + } + }) + + test('rundown becomes inactive', () => { + const inactiveState = structuredClone(singlePartRundown) + inactiveState.active = IngestRundownActiveStatus.INACTIVE + + { + const diff = diffStatuses(defaultConfig, singlePartRundown, inactiveState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: MOS_STATUS_UNKNOWN, + } satisfies StoryStatusEntry) + } + }) + + test('rundown becomes active', () => { + const inactiveState = structuredClone(singlePartRundown) + inactiveState.active = IngestRundownActiveStatus.INACTIVE + + { + const diff = diffStatuses(defaultConfig, inactiveState, singlePartRundown) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: IMOSObjectStatus.READY, + } satisfies StoryStatusEntry) + } + }) + + test('rundown becomes rehearsal', () => { + const inactiveState = structuredClone(singlePartRundown) + inactiveState.active = IngestRundownActiveStatus.INACTIVE + const rehearsalState = structuredClone(singlePartRundown) + rehearsalState.active = IngestRundownActiveStatus.REHEARSAL + + { + // send during rehearsal + const diff = diffStatuses(defaultConfig, inactiveState, rehearsalState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'story', + rundownExternalId: 'external0', + storyId: 'part0', + mosStatus: IMOSObjectStatus.READY, + } satisfies StoryStatusEntry) + } + + { + // no send during rehearsal + const disableRehearsalConfig = { + ...defaultConfig, + sendInRehearsal: false, + } + const diff = diffStatuses(disableRehearsalConfig, inactiveState, rehearsalState) + expect(diff).toHaveLength(0) + } + }) + + test('add items', () => { + { + const itemsState = structuredClone(singlePartRundown) + itemsState.segments[0].parts[0].itemsReady.push({ externalId: 'item0', ready: true }) + + const diff = diffStatuses(defaultConfig, singlePartRundown, itemsState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'item', + rundownExternalId: 'external0', + storyId: 'part0', + itemId: 'item0', + mosStatus: IMOSObjectStatus.READY, + } satisfies ItemStatusEntry) + } + + { + const itemsState = structuredClone(singlePartRundown) + itemsState.segments[0].parts[0].itemsReady.push({ externalId: 'item0', ready: false }) + + const diff = diffStatuses(defaultConfig, singlePartRundown, itemsState) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'item', + rundownExternalId: 'external0', + storyId: 'part0', + itemId: 'item0', + mosStatus: IMOSObjectStatus.NOT_READY, + } satisfies ItemStatusEntry) + } + + { + const itemsState = structuredClone(singlePartRundown) + // itemsState.segments[0].parts[0].itemsReady.item0 = undefined + + const diff = diffStatuses(defaultConfig, singlePartRundown, itemsState) + expect(diff).toHaveLength(0) + } + }) + + test('remove items', () => { + { + const itemsState = structuredClone(singlePartRundown) + itemsState.segments[0].parts[0].itemsReady.push({ externalId: 'item0', ready: true }) + + const diff = diffStatuses(defaultConfig, itemsState, singlePartRundown) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'item', + rundownExternalId: 'external0', + storyId: 'part0', + itemId: 'item0', + mosStatus: MOS_STATUS_UNKNOWN, + } satisfies ItemStatusEntry) + } + + { + const itemsState = structuredClone(singlePartRundown) + // itemsState.segments[0].parts[0].itemsReady.item0 = undefined + + const diff = diffStatuses(defaultConfig, itemsState, singlePartRundown) + expect(diff).toHaveLength(0) + } + }) + + test('change item state', () => { + const itemsState = structuredClone(singlePartRundown) + itemsState.segments[0].parts[0].itemsReady.push({ externalId: 'item0', ready: true }) + + const items2State = structuredClone(itemsState) + items2State.segments[0].parts[0].itemsReady[0].ready = false + + const diff = diffStatuses(defaultConfig, itemsState, items2State) + expect(diff).toHaveLength(1) + expect(diff[0]).toEqual({ + type: 'item', + rundownExternalId: 'external0', + storyId: 'part0', + itemId: 'item0', + mosStatus: IMOSObjectStatus.NOT_READY, + } satisfies ItemStatusEntry) + }) +}) diff --git a/packages/mos-gateway/src/mosStatus/diff.ts b/packages/mos-gateway/src/mosStatus/diff.ts new file mode 100644 index 0000000000..de50bb178c --- /dev/null +++ b/packages/mos-gateway/src/mosStatus/diff.ts @@ -0,0 +1,175 @@ +import { IMOSObjectStatus } from '@mos-connection/connector' +import type { MosDeviceStatusesConfig } from '@sofie-automation/shared-lib/dist/generated/MosGatewayDevicesTypes' +import { + IngestPartNotifyItemReady, + IngestPartPlaybackStatus, + IngestRundownActiveStatus, + type IngestPartStatus, + type IngestRundownStatus, +} from '@sofie-automation/shared-lib/dist/ingest/rundownStatus' + +export const MOS_STATUS_UNKNOWN = '' as IMOSObjectStatus // Force the status to be empty, which isn't a valid state in the enum + +export type SomeStatusEntry = StoryStatusEntry | ItemStatusEntry + +export interface ItemStatusEntry { + type: 'item' + rundownExternalId: string + storyId: string + itemId: string + mosStatus: IMOSObjectStatus +} + +export interface StoryStatusEntry { + type: 'story' + rundownExternalId: string + storyId: string + mosStatus: IMOSObjectStatus +} + +export function diffStatuses( + config: MosDeviceStatusesConfig, + previousStatuses: IngestRundownStatus | undefined, + newStatuses: IngestRundownStatus | undefined +): SomeStatusEntry[] { + const rundownExternalId = previousStatuses?.externalId ?? newStatuses?.externalId + + if ((!previousStatuses && !newStatuses) || !rundownExternalId) return [] + + const statuses: SomeStatusEntry[] = [] + + const previousStories = buildStoriesMap(previousStatuses) + const newStories = buildStoriesMap(newStatuses) + + // Process any removed stories first + for (const [storyId, story] of previousStories) { + if (!newStories.has(storyId)) { + // The story has been removed + statuses.push({ + type: 'story', + rundownExternalId, + storyId, + mosStatus: MOS_STATUS_UNKNOWN, + }) + + // Clear any items too + for (const itemStatus of story.itemsReady) { + statuses.push({ + type: 'item', + rundownExternalId, + storyId, + itemId: itemStatus.externalId, + mosStatus: MOS_STATUS_UNKNOWN, + }) + } + } + } + + // Then any remaining stories in order + for (const [storyId, status] of newStories) { + const previousStatus = previousStories.get(storyId) + + const newMosStatus = buildMosStatus(config, status.playbackStatus, status.isReady, newStatuses?.active) + if ( + newMosStatus !== null && + (!previousStatus || + buildMosStatus( + config, + previousStatus.playbackStatus, + previousStatus.isReady, + previousStatuses?.active + ) !== newMosStatus) + ) { + statuses.push({ + type: 'story', + rundownExternalId, + storyId, + mosStatus: newMosStatus, + }) + } + + const allItemIds = new Set() + const previousItemStatuses = new Map() + const newItemStatuses = new Map() + + for (const itemStatus of previousStatus?.itemsReady ?? []) { + previousItemStatuses.set(itemStatus.externalId, itemStatus) + allItemIds.add(itemStatus.externalId) + } + for (const itemStatus of status.itemsReady) { + newItemStatuses.set(itemStatus.externalId, itemStatus) + allItemIds.add(itemStatus.externalId) + } + + // Diff each item in the story + for (const itemId of allItemIds) { + const newItemStatus = newItemStatuses.get(itemId) + const previousItemStatus = previousItemStatuses.get(itemId) + + const newMosStatus = newItemStatus + ? buildMosStatus(config, status.playbackStatus, newItemStatus.ready, newStatuses?.active) + : null + const previousMosStatus = + previousItemStatus && previousStatus + ? buildMosStatus( + config, + previousStatus.playbackStatus, + previousItemStatus.ready, + previousStatuses?.active + ) + : null + + if ((newMosStatus !== null || previousMosStatus !== null) && previousMosStatus !== newMosStatus) { + statuses.push({ + type: 'item', + rundownExternalId, + storyId, + itemId, + mosStatus: newMosStatus ?? MOS_STATUS_UNKNOWN, + }) + } + } + } + + return statuses +} + +function buildStoriesMap(state: IngestRundownStatus | undefined): Map { + const stories = new Map() + + if (state) { + for (const segment of state.segments) { + for (const part of segment.parts) { + stories.set(part.externalId, part) + } + } + } + + return stories +} + +function buildMosStatus( + config: MosDeviceStatusesConfig, + playbackStatus: IngestPartPlaybackStatus, + isReady: boolean | null | undefined, + active: IngestRundownStatus['active'] | undefined +): IMOSObjectStatus | null { + if (active === IngestRundownActiveStatus.INACTIVE) return MOS_STATUS_UNKNOWN + if (active === IngestRundownActiveStatus.REHEARSAL && !config.sendInRehearsal) return null + + switch (playbackStatus) { + case IngestPartPlaybackStatus.PLAY: + return IMOSObjectStatus.PLAY + case IngestPartPlaybackStatus.STOP: + return IMOSObjectStatus.STOP + default: + switch (isReady) { + case true: + return IMOSObjectStatus.READY + case false: + return IMOSObjectStatus.NOT_READY + default: + return MOS_STATUS_UNKNOWN + } + } +} diff --git a/packages/mos-gateway/src/mosStatus/handler.ts b/packages/mos-gateway/src/mosStatus/handler.ts new file mode 100644 index 0000000000..596ea78a6b --- /dev/null +++ b/packages/mos-gateway/src/mosStatus/handler.ts @@ -0,0 +1,163 @@ +import { + getMosTypes, + type IMOSItemStatus, + IMOSObjectStatus, + type IMOSStoryStatus, + type MosTypes, + type IMOSDevice, +} from '@mos-connection/connector' +import type { MosDeviceStatusesConfig } from '@sofie-automation/shared-lib/dist/generated/MosGatewayDevicesTypes' +import type { CoreMosDeviceHandler } from '../CoreMosDeviceHandler.js' +import { + assertNever, + type Observer, + PeripheralDevicePubSub, + PeripheralDevicePubSubCollectionsNames, + stringifyError, + SubscriptionId, +} from '@sofie-automation/server-core-integration' +import type { IngestRundownStatus } from '@sofie-automation/shared-lib/dist/ingest/rundownStatus' +import type { RundownId } from '@sofie-automation/shared-lib/dist/core/model/Ids' +import * as winston from 'winston' +import { Queue } from '@sofie-automation/server-core-integration/dist/lib/queue' +import { diffStatuses } from './diff.js' + +export class MosStatusHandler { + readonly #logger: winston.Logger + readonly #mosDevice: IMOSDevice + readonly #coreMosHandler: CoreMosDeviceHandler + readonly #config: MosDeviceStatusesConfig + readonly #mosTypes: MosTypes + + readonly #messageQueue = new Queue() + + #subId: SubscriptionId | undefined + #observer: Observer | undefined + + #destroyed = false + + readonly #lastStatuses = new Map() + + constructor( + logger: winston.Logger, + mosDevice: IMOSDevice, + coreMosHandler: CoreMosDeviceHandler, + config: MosDeviceStatusesConfig, + strictMosTypes: boolean + ) { + if (!config.enabled) throw new Error('MosStatusHandler is not enabled') + + this.#logger = logger + this.#mosDevice = mosDevice + this.#coreMosHandler = coreMosHandler + this.#config = config + this.#mosTypes = getMosTypes(strictMosTypes) + + coreMosHandler.core + .autoSubscribe(PeripheralDevicePubSub.ingestDeviceRundownStatus, coreMosHandler.core.deviceId) + .then((subId) => { + this.#subId = subId + + if (this.#destroyed) coreMosHandler.core.unsubscribe(subId) + }) + .catch((e) => { + this.#logger.error(`Error subscribing to ingestDeviceRundownStatus: ${stringifyError(e)}`) + }) + + // Setup the observer immediately, which will trigger a resync upon the documents being added + this.#observer = coreMosHandler.core.observe(PeripheralDevicePubSubCollectionsNames.ingestRundownStatus) + this.#observer.added = (id) => this.#rundownChanged(id) + this.#observer.changed = (id) => this.#rundownChanged(id) + this.#observer.removed = (id) => this.#rundownChanged(id) + + this.#logger.info(`MosStatusHandler initialized for ${coreMosHandler.core.deviceId}`) + } + + #rundownChanged(id: RundownId): void { + const collection = this.#coreMosHandler.core.getCollection( + PeripheralDevicePubSubCollectionsNames.ingestRundownStatus + ) + + const newStatuses = collection.findOne(id) + const previousStatuses = this.#lastStatuses.get(id) + + // Update the last statuses store + if (newStatuses) { + this.#lastStatuses.set(id, newStatuses) + } else { + this.#lastStatuses.delete(id) + } + + const statusDiff = diffStatuses(this.#config, previousStatuses, newStatuses) + if (statusDiff.length === 0) return + + const diffTime = this.#mosTypes.mosTime.create(Date.now()) + + // Future: should this be done with some concurrency? + for (const status of statusDiff) { + // New implementation 2022 only sends PLAY, never stop, after getting advice from AP + // Reason 1: NRK ENPS "sendt tid" (elapsed time) stopped working in ENPS 8/9 when doing STOP prior to PLAY + // Reason 2: there's a delay between the STOP (yellow line disappears) and PLAY (yellow line re-appears), which annoys the users + if (this.#config.onlySendPlay && status.mosStatus !== IMOSObjectStatus.PLAY) continue + + this.#messageQueue + .putOnQueue(async () => { + if (this.#isDeviceConnected()) { + if (status.type === 'item') { + const newStatus: IMOSItemStatus = { + RunningOrderId: this.#mosTypes.mosString128.create(status.rundownExternalId), + StoryId: this.#mosTypes.mosString128.create(status.storyId), + ID: this.#mosTypes.mosString128.create(status.itemId), + Status: status.mosStatus, + Time: diffTime, + } + this.#logger.info(`Sending Story status: ${JSON.stringify(newStatus)}`) + + // Send status + await this.#mosDevice.sendItemStatus(newStatus) + } else if (status.type === 'story') { + const newStatus: IMOSStoryStatus = { + RunningOrderId: this.#mosTypes.mosString128.create(status.rundownExternalId), + ID: this.#mosTypes.mosString128.create(status.storyId), + Status: status.mosStatus, + Time: diffTime, + } + this.#logger.info(`Sending Story status: ${JSON.stringify(newStatus)}`) + + // Send status + await this.#mosDevice.sendStoryStatus(newStatus) + } else { + this.#logger.debug(`Discarding unknown queued status: ${JSON.stringify(status)}`) + assertNever(status) + } + } else if (this.#config.onlySendPlay) { + // No need to do anything. + this.#logger.info(`Not connected, skipping play status: ${JSON.stringify(status)}`) + } else { + this.#logger.info(`Not connected, discarding status: ${JSON.stringify(status)}`) + } + }) + .catch((e) => { + this.#logger.error( + `Error sending of "${status.rundownExternalId}"-"${ + status.storyId + }" status to MOS device: ${stringifyError(e)}` + ) + }) + } + } + + #isDeviceConnected(): boolean { + return ( + this.#mosDevice.getConnectionStatus().PrimaryConnected || + this.#mosDevice.getConnectionStatus().SecondaryConnected + ) + } + + dispose(): void { + this.#destroyed = true + + this.#observer?.stop() + if (this.#subId) this.#coreMosHandler.core.unsubscribe(this.#subId) + } +} diff --git a/packages/mos-gateway/src/versions.ts b/packages/mos-gateway/src/versions.ts index c0b3293662..63ad2d96d8 100644 --- a/packages/mos-gateway/src/versions.ts +++ b/packages/mos-gateway/src/versions.ts @@ -20,7 +20,7 @@ export function getVersions(logger: Winston.Logger): { [packageName: string]: st } } } catch (e) { - logger.error(e) + logger.error(stringifyError(e)) } return versions } diff --git a/packages/openapi/api/definitions/studios.yaml b/packages/openapi/api/definitions/studios.yaml index b543628d27..1a0bc78ff3 100644 --- a/packages/openapi/api/definitions/studios.yaml +++ b/packages/openapi/api/definitions/studios.yaml @@ -561,6 +561,9 @@ components: allowPieceDirectPlay: type: boolean description: Whether to allow direct playing of a piece in the rundown + rundownGlobalPiecesPrepareTime: + type: number + description: How long before their start time a rundown owned piece be added to the timeline required: - frameRate diff --git a/packages/shared-lib/src/core/model/ShowStyle.ts b/packages/shared-lib/src/core/model/ShowStyle.ts index ee415a5be4..8c0d3d48be 100644 --- a/packages/shared-lib/src/core/model/ShowStyle.ts +++ b/packages/shared-lib/src/core/model/ShowStyle.ts @@ -107,6 +107,7 @@ export enum ClientActions { 'rewindSegments' = 'rewindSegments', 'showEntireCurrentSegment' = 'showEntireCurrentSegment', 'miniShelfQueueAdLib' = 'miniShelfQueueAdLib', + 'editMode' = 'editMode', } export enum DeviceActions { diff --git a/packages/shared-lib/src/core/model/StudioSettings.ts b/packages/shared-lib/src/core/model/StudioSettings.ts index 09254ba173..1a117f1838 100644 --- a/packages/shared-lib/src/core/model/StudioSettings.ts +++ b/packages/shared-lib/src/core/model/StudioSettings.ts @@ -99,4 +99,9 @@ export interface IStudioSettings { * Override the piece content statuses with fake info - used for developing the UI */ mockPieceContentStatus?: boolean + + /** + * How long before their start time a rundown owned piece be added to the timeline + */ + rundownGlobalPiecesPrepareTime?: number } diff --git a/packages/shared-lib/src/generated/MosGatewayDevicesTypes.ts b/packages/shared-lib/src/generated/MosGatewayDevicesTypes.ts index f192cf7614..b6ebdc5665 100644 --- a/packages/shared-lib/src/generated/MosGatewayDevicesTypes.ts +++ b/packages/shared-lib/src/generated/MosGatewayDevicesTypes.ts @@ -31,4 +31,10 @@ export interface MosDeviceConfig { query: number } } + statuses: MosDeviceStatusesConfig +} +export interface MosDeviceStatusesConfig { + enabled: boolean + sendInRehearsal?: boolean + onlySendPlay?: boolean } diff --git a/packages/shared-lib/src/ingest/rundownStatus.ts b/packages/shared-lib/src/ingest/rundownStatus.ts new file mode 100644 index 0000000000..4e0159a4ca --- /dev/null +++ b/packages/shared-lib/src/ingest/rundownStatus.ts @@ -0,0 +1,47 @@ +import type { RundownId } from '../core/model/Ids.js' + +export interface IngestRundownStatus { + _id: RundownId + + /** Rundown external id */ + externalId: string + + active: IngestRundownActiveStatus + + segments: IngestSegmentStatus[] +} + +export enum IngestRundownActiveStatus { + ACTIVE = 'active', + REHEARSAL = 'rehearsal', + INACTIVE = 'inactive', +} + +export interface IngestSegmentStatus { + /** Segment external id */ + externalId: string + + parts: IngestPartStatus[] +} + +export interface IngestPartStatus { + /** Part external id */ + externalId: string + + isReady: boolean | null + + itemsReady: IngestPartNotifyItemReady[] + + playbackStatus: IngestPartPlaybackStatus +} + +export enum IngestPartPlaybackStatus { + UNKNOWN = 'unknown', + PLAY = 'play', + STOP = 'stop', +} + +export interface IngestPartNotifyItemReady { + externalId: string + ready: boolean +} diff --git a/packages/shared-lib/src/peripheralDevice/peripheralDeviceAPI.ts b/packages/shared-lib/src/peripheralDevice/peripheralDeviceAPI.ts index a195bfd245..92f4bae3ad 100644 --- a/packages/shared-lib/src/peripheralDevice/peripheralDeviceAPI.ts +++ b/packages/shared-lib/src/peripheralDevice/peripheralDeviceAPI.ts @@ -26,7 +26,6 @@ export type PiecePlaybackStoppedResult = PiecePlaybackStartedResult export interface TriggerRegenerationCallbackData { rundownPlaylistId: RundownPlaylistId - // partInstanceId: PartInstanceId regenerationToken: string } diff --git a/packages/shared-lib/src/pubsub/peripheralDevice.ts b/packages/shared-lib/src/pubsub/peripheralDevice.ts index 421c191a8b..c4eb440d88 100644 --- a/packages/shared-lib/src/pubsub/peripheralDevice.ts +++ b/packages/shared-lib/src/pubsub/peripheralDevice.ts @@ -10,6 +10,7 @@ import { PeripheralDeviceId, RundownId, RundownPlaylistId } from '../core/model/ import { PeripheralDeviceCommand } from '../core/model/PeripheralDeviceCommand.js' import { ExpectedPlayoutItemPeripheralDevice } from '../expectedPlayoutItem.js' import { DeviceTriggerMountedAction, PreviewWrappedAdLib } from '../input-gateway/deviceTriggerPreviews.js' +import type { IngestRundownStatus } from '../ingest/rundownStatus.js' /** * Ids of possible DDP subscriptions for any PeripheralDevice. @@ -51,6 +52,13 @@ export enum PeripheralDevicePubSub { packageManagerPackageContainers = 'packageManagerPackageContainers', /** Package manager: The expected packages in the Studio of the PeripheralDevice */ packageManagerExpectedPackages = 'packageManagerExpectedPackages', + + // Ingest gateway: + + /** + * Ingest status of rundowns for a PeripheralDevice + */ + ingestDeviceRundownStatus = 'ingestDeviceRundownStatus', } /** @@ -114,6 +122,11 @@ export interface PeripheralDevicePubSubTypes { filterPlayoutDeviceIds: PeripheralDeviceId[] | undefined, token?: string ) => PeripheralDevicePubSubCollectionsNames.packageManagerExpectedPackages + + [PeripheralDevicePubSub.ingestDeviceRundownStatus]: ( + deviceId: PeripheralDeviceId, + token?: string + ) => PeripheralDevicePubSubCollectionsNames.ingestRundownStatus } export enum PeripheralDevicePubSubCollectionsNames { @@ -134,6 +147,8 @@ export enum PeripheralDevicePubSubCollectionsNames { packageManagerPlayoutContext = 'packageManagerPlayoutContext', packageManagerPackageContainers = 'packageManagerPackageContainers', packageManagerExpectedPackages = 'packageManagerExpectedPackages', + + ingestRundownStatus = 'ingestRundownStatus', } export type PeripheralDevicePubSubCollections = { @@ -154,4 +169,6 @@ export type PeripheralDevicePubSubCollections = { [PeripheralDevicePubSubCollectionsNames.packageManagerPlayoutContext]: PackageManagerPlayoutContext [PeripheralDevicePubSubCollectionsNames.packageManagerPackageContainers]: PackageManagerPackageContainers [PeripheralDevicePubSubCollectionsNames.packageManagerExpectedPackages]: PackageManagerExpectedPackage + + [PeripheralDevicePubSubCollectionsNames.ingestRundownStatus]: IngestRundownStatus } diff --git a/packages/webui/package.json b/packages/webui/package.json index e8aeb74436..12c85c488c 100644 --- a/packages/webui/package.json +++ b/packages/webui/package.json @@ -14,7 +14,7 @@ }, "homepage": "https://github.com/nrkno/sofie-core/blob/master/packages/webui#readme", "scripts": { - "dev": "vite --port=3005", + "dev": "vite --port=3005 --force", "build": "tsc -b && vite build", "build:main": "tsc -p tsconfig.app.json --noEmit", "check-types": "tsc -p tsconfig.app.json --noEmit", diff --git a/packages/webui/public/dev/fakeThumbnail.png b/packages/webui/public/dev/fakeThumbnail.png new file mode 100644 index 0000000000..669e7837dd Binary files /dev/null and b/packages/webui/public/dev/fakeThumbnail.png differ diff --git a/packages/webui/src/client/lib/RundownResolver.ts b/packages/webui/src/client/lib/RundownResolver.ts index cb2d0c1681..d0721ec33e 100644 --- a/packages/webui/src/client/lib/RundownResolver.ts +++ b/packages/webui/src/client/lib/RundownResolver.ts @@ -62,7 +62,7 @@ function fetchPiecesThatMayBeActiveForPart( segmentsToReceiveOnRundownEndFromSet: Set, rundownsToReceiveOnShowStyleEndFrom: RundownId[], /** Map of Pieces on Parts, passed through for performance */ - allPiecesCache?: Map + allPiecesCache?: Map ): Piece[] { let piecesStartingInPart: Piece[] const allPieces = allPiecesCache?.get(part._id) @@ -131,7 +131,7 @@ export function getPieceInstancesForPartInstance( currentPartInstancePieceInstances: PieceInstance[] | undefined, allowTestingAdlibsToPersist: boolean, /** Map of Pieces on Parts, passed through for performance */ - allPiecesCache?: Map, + allPiecesCache?: Map, options?: FindOptions, pieceInstanceSimulation?: boolean ): PieceInstance[] { diff --git a/packages/webui/src/client/lib/VirtualElement.tsx b/packages/webui/src/client/lib/VirtualElement.tsx index 1b825b7292..77111cf5f0 100644 --- a/packages/webui/src/client/lib/VirtualElement.tsx +++ b/packages/webui/src/client/lib/VirtualElement.tsx @@ -1,5 +1,6 @@ -import React, { useCallback, useEffect, useLayoutEffect, useMemo, useState } from 'react' +import React, { useCallback, useEffect, useMemo, useState, useRef } from 'react' import { InView } from 'react-intersection-observer' +import { getViewPortScrollingState } from './viewPort.js' interface IElementMeasurements { width: string | number @@ -11,12 +12,12 @@ interface IElementMeasurements { id: string | undefined } -const OPTIMIZE_PERIOD = 5000 const IDLE_CALLBACK_TIMEOUT = 100 /** * This is a component that allows optimizing the amount of elements present in the DOM through replacing them * with placeholders when they aren't visible in the viewport. + * Scroll timing issues, should be handled in viewPort.tsx where the scrolling state is tracked. * * @export * @param {(React.PropsWithChildren<{ @@ -40,6 +41,7 @@ const IDLE_CALLBACK_TIMEOUT = 100 * } * @return {*} {(JSX.Element | null)} */ + export function VirtualElement({ initialShow, placeholderHeight, @@ -59,89 +61,272 @@ export function VirtualElement({ id?: string | undefined className?: string }>): JSX.Element | null { + const resizeObserverManager = ElementObserverManager.getInstance() const [inView, setInView] = useState(initialShow ?? false) + const [waitForInitialLoad, setWaitForInitialLoad] = useState(true) const [isShowingChildren, setIsShowingChildren] = useState(inView) + const [measurements, setMeasurements] = useState(null) const [ref, setRef] = useState(null) - const [childRef, setChildRef] = useState(null) - const isMeasured = !!measurements + // Timers for visibility changes: + const scrollTimeoutRef = useRef | undefined>(undefined) + const inViewChangeTimerRef = useRef | undefined>(undefined) + const skipInitialrunRef = useRef(true) + const isTransitioning = useRef(false) + + const isCurrentlyObserving = useRef(false) const styleObj = useMemo( () => ({ - width: width ?? measurements?.width ?? 'auto', - height: (measurements?.clientHeight ?? placeholderHeight ?? '0') + 'px', - marginTop: measurements?.marginTop, - marginLeft: measurements?.marginLeft, - marginRight: measurements?.marginRight, - marginBottom: measurements?.marginBottom, + width: width ?? 'auto', + height: ((placeholderHeight || ref?.clientHeight) ?? '0') + 'px', + marginTop: 0, + marginLeft: 0, + marginRight: 0, + marginBottom: 0, + // These properties are used to ensure that if a prior element is changed from + // placeHolder to element, the position of visible elements are not affected. + contentVisibility: 'auto', + containIntrinsicSize: `0 ${(placeholderHeight || ref?.clientHeight) ?? '0'}px`, + contain: 'size layout', }), - [width, measurements, placeholderHeight] + [width, placeholderHeight] ) - const onVisibleChanged = useCallback((visible: boolean) => { - setInView(visible) - }, []) + const handleResize = useCallback(() => { + if (ref) { + // Show children during measurement + setIsShowingChildren(true) + + requestAnimationFrame(() => { + const measurements = measureElement(ref, placeholderHeight) + if (measurements) { + setMeasurements(measurements) + + // Only hide children again if not in view + if (!inView && measurements.clientHeight > 0) { + setIsShowingChildren(false) + } else { + setIsShowingChildren(true) + } + } + }) + } + }, [ref, inView, placeholderHeight]) + // failsafe to ensure visible elements if resizing happens while scrolling useEffect(() => { - if (inView === true) { + if (!isShowingChildren) { + const checkVisibilityByPosition = () => { + if (ref) { + const rect = ref.getBoundingClientRect() + const isInViewport = rect.top < window.innerHeight && rect.bottom > 0 + + if (isInViewport) { + setIsShowingChildren(true) + setInView(true) + } + } + } + + // Check every second + const positionCheckInterval = setInterval(checkVisibilityByPosition, 1000) + + return () => { + clearInterval(positionCheckInterval) + } + } + }, [ref, isShowingChildren]) + + // Ensure elements are visible after a fast scroll: + useEffect(() => { + const checkVisibilityOnScroll = () => { + if (inView && !isShowingChildren) { + setIsShowingChildren(true) + } + + // Add a check after scroll stops + if (scrollTimeoutRef.current) { + clearTimeout(scrollTimeoutRef.current) + } + scrollTimeoutRef.current = setTimeout(() => { + // Recheck visibility after scroll appears to have stopped + if (inView && !isShowingChildren) { + setIsShowingChildren(true) + } + }, 200) + } + + window.addEventListener('scroll', checkVisibilityOnScroll, { passive: true }) + + return () => { + window.removeEventListener('scroll', checkVisibilityOnScroll) + if (scrollTimeoutRef.current) { + clearTimeout(scrollTimeoutRef.current) + } + } + }, [inView, isShowingChildren]) + + useEffect(() => { + if (inView) { setIsShowingChildren(true) + } + + // Startup skip: + if (skipInitialrunRef.current) { + skipInitialrunRef.current = false return } - let idleCallback: number | undefined - const optimizeTimeout = window.setTimeout(() => { - idleCallback = window.requestIdleCallback( - () => { - if (childRef) { - setMeasurements(measureElement(childRef)) + if (isTransitioning.current) { + return + } + + isTransitioning.current = true + + // Clear any existing timers + if (inViewChangeTimerRef.current) { + clearTimeout(inViewChangeTimerRef.current) + inViewChangeTimerRef.current = undefined + } + + // Delay the visibility change to avoid flickering + // But low enough for scrolling to be responsive + inViewChangeTimerRef.current = setTimeout(() => { + try { + if (inView) { + if (ref) { + if (!isCurrentlyObserving.current) { + resizeObserverManager.observe(ref, handleResize) + isCurrentlyObserving.current = true + } + } + } else { + if (ref && isCurrentlyObserving.current) { + resizeObserverManager.unobserve(ref) + isCurrentlyObserving.current = false } setIsShowingChildren(false) - }, - { - timeout: IDLE_CALLBACK_TIMEOUT, } - ) - }, OPTIMIZE_PERIOD) + } catch (error) { + console.error('Error in visibility change handler:', error) + } finally { + isTransitioning.current = false + inViewChangeTimerRef.current = undefined + } + }, 100) + }, [inView, ref, handleResize, resizeObserverManager]) - return () => { - if (idleCallback) { - window.cancelIdleCallback(idleCallback) + const onVisibleChanged = useCallback( + (visible: boolean) => { + // Only update state if there's a change + if (inView !== visible) { + setInView(visible) } + }, + [inView] + ) - window.clearTimeout(optimizeTimeout) + const isScrolling = (): boolean => { + // Don't do updates while scrolling: + if (getViewPortScrollingState().isProgrammaticScrollInProgress) { + return true } - }, [childRef, inView]) + // And wait if a programmatic scroll was done recently: + const timeSinceLastProgrammaticScroll = Date.now() - getViewPortScrollingState().lastProgrammaticScrollTime + if (timeSinceLastProgrammaticScroll < 100) { + return true + } + return false + } - const showPlaceholder = !isShowingChildren && (!initialShow || isMeasured) + useEffect(() => { + // Setup initial observer if element is in view + if (ref && inView && !isCurrentlyObserving.current) { + resizeObserverManager.observe(ref, handleResize) + isCurrentlyObserving.current = true + } + + // Cleanup function + return () => { + // Clean up resize observer + if (ref && isCurrentlyObserving.current) { + resizeObserverManager.unobserve(ref) + isCurrentlyObserving.current = false + } + + if (inViewChangeTimerRef.current) { + clearTimeout(inViewChangeTimerRef.current) + } + } + }, [ref, inView, handleResize]) - useLayoutEffect(() => { - if (!ref || showPlaceholder) return + useEffect(() => { + if (inView === true) { + setIsShowingChildren(true) - const el = ref?.firstElementChild - if (!el || el.classList.contains('virtual-element-placeholder') || !(el instanceof HTMLElement)) return + // Schedule a measurement after a short delay + if (waitForInitialLoad && ref) { + const initialMeasurementTimeout = window.setTimeout(() => { + const measurements = measureElement(ref, placeholderHeight) + if (measurements) { + setMeasurements(measurements) + setWaitForInitialLoad(false) + } + }, 800) - setChildRef(el) + return () => { + window.clearTimeout(initialMeasurementTimeout) + } + } + return + } let idleCallback: number | undefined - const refreshSizingTimeout = window.setTimeout(() => { + let optimizeTimeout: number | undefined + + const scheduleOptimization = () => { + if (optimizeTimeout) { + window.clearTimeout(optimizeTimeout) + } + // Don't proceed if we're scrolling + if (isScrolling()) { + // Reschedule for after the scroll should be complete + const scrollDelay = 400 + window.clearTimeout(optimizeTimeout) + optimizeTimeout = window.setTimeout(scheduleOptimization, scrollDelay) + return + } idleCallback = window.requestIdleCallback( () => { - setMeasurements(measureElement(el)) + // Measure the entire wrapper element instead of just the childRef + if (ref) { + const measurements = measureElement(ref, placeholderHeight) + if (measurements) { + setMeasurements(measurements) + } + } + setIsShowingChildren(false) }, { timeout: IDLE_CALLBACK_TIMEOUT, } ) - }, 1000) + } + + // Schedule the optimization: + scheduleOptimization() return () => { if (idleCallback) { window.cancelIdleCallback(idleCallback) } - window.clearTimeout(refreshSizingTimeout) + if (optimizeTimeout) { + window.clearTimeout(optimizeTimeout) + } } - }, [ref, showPlaceholder]) + }, [ref, inView, placeholderHeight]) return ( -

- {showPlaceholder ? ( +
+ {!isShowingChildren ? (
) } +function measureElement(wrapperEl: HTMLDivElement, placeholderHeight?: number): IElementMeasurements | null { + if (!wrapperEl || !wrapperEl.firstElementChild) { + return null + } -function measureElement(el: HTMLElement): IElementMeasurements | null { + const el = wrapperEl.firstElementChild as HTMLElement const style = window.getComputedStyle(el) - const clientRect = el.getBoundingClientRect() + let segmentTimeline: Element | null = null + let dashboardPanel: Element | null = null + + segmentTimeline = wrapperEl.querySelector('.segment-timeline') + dashboardPanel = wrapperEl.querySelector('.dashboard-panel') + + if (segmentTimeline) { + const segmentRect = segmentTimeline.getBoundingClientRect() + let totalHeight = segmentRect.height + + if (dashboardPanel) { + const panelRect = dashboardPanel.getBoundingClientRect() + totalHeight += panelRect.height + } + + if (totalHeight < 40) { + totalHeight = placeholderHeight ?? el.clientHeight + } + + return { + width: style.width || 'auto', + clientHeight: totalHeight, + marginTop: style.marginTop || undefined, + marginBottom: style.marginBottom || undefined, + marginLeft: style.marginLeft || undefined, + marginRight: style.marginRight || undefined, + id: el.id, + } + } + + // Fallback to just measuring the element itself if wrapper isn't found return { width: style.width || 'auto', - clientHeight: clientRect.height, + clientHeight: placeholderHeight ?? el.clientHeight, marginTop: style.marginTop || undefined, marginBottom: style.marginBottom || undefined, marginLeft: style.marginLeft || undefined, @@ -180,3 +409,88 @@ function measureElement(el: HTMLElement): IElementMeasurements | null { id: el.id, } } + +// Singleton class to manage ResizeObserver instances +export class ElementObserverManager { + private static instance: ElementObserverManager + private resizeObserver: ResizeObserver + private mutationObserver: MutationObserver + private observedElements: Map void> + + private constructor() { + this.observedElements = new Map() + + // Configure ResizeObserver + this.resizeObserver = new ResizeObserver((entries) => { + entries.forEach((entry) => { + const element = entry.target as HTMLElement + const callback = this.observedElements.get(element) + if (callback) { + callback() + } + }) + }) + + // Configure MutationObserver + this.mutationObserver = new MutationObserver((mutations) => { + const targets = new Set() + + mutations.forEach((mutation) => { + const target = mutation.target as HTMLElement + // Find the closest observed element + let element = target + while (element) { + if (this.observedElements.has(element)) { + targets.add(element) + break + } + if (!element.parentElement) break + element = element.parentElement + } + }) + + // Call callbacks for affected elements + targets.forEach((element) => { + const callback = this.observedElements.get(element) + if (callback) callback() + }) + }) + } + + public static getInstance(): ElementObserverManager { + if (!ElementObserverManager.instance) { + ElementObserverManager.instance = new ElementObserverManager() + } + return ElementObserverManager.instance + } + + public observe(element: HTMLElement, callback: () => void): void { + if (!element) return + + this.observedElements.set(element, callback) + this.resizeObserver.observe(element) + this.mutationObserver.observe(element, { + childList: true, + subtree: true, + attributes: true, + characterData: true, + }) + } + + public unobserve(element: HTMLElement): void { + if (!element) return + this.observedElements.delete(element) + this.resizeObserver.unobserve(element) + + // Disconnect and reconnect mutation observer to refresh the list of observed elements + this.mutationObserver.disconnect() + this.observedElements.forEach((_, el) => { + this.mutationObserver.observe(el, { + childList: true, + subtree: true, + attributes: true, + characterData: true, + }) + }) + } +} diff --git a/packages/webui/src/client/lib/rundown.ts b/packages/webui/src/client/lib/rundown.ts index b8208b18c9..2f70fbd347 100644 --- a/packages/webui/src/client/lib/rundown.ts +++ b/packages/webui/src/client/lib/rundown.ts @@ -26,6 +26,7 @@ import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/Rund import { literal, protectString, groupByToMap } from './tempLib.js' import { getCurrentTime } from './systemTime.js' import { + createPartCurrentTimes, processAndPrunePieceInstanceTimings, resolvePrunedPieceInstance, } from '@sofie-automation/corelib/dist/playout/processAndPrune' @@ -503,19 +504,20 @@ export namespace RundownUtils { pieceInstanceSimulation ) - const partStarted = partE.instance.timings?.plannedStartedPlayback - const nowInPart = partStarted ? getCurrentTime() - partStarted : 0 - + const partTimes = createPartCurrentTimes( + getCurrentTime(), + partE.instance.timings?.plannedStartedPlayback + ) const preprocessedPieces = processAndPrunePieceInstanceTimings( showStyleBase.sourceLayers, rawPieceInstances, - nowInPart, + partTimes, includeDisabledPieces ) // insert items into the timeline for resolution partE.pieces = preprocessedPieces.map((piece) => { - const resolvedPiece = resolvePrunedPieceInstance(nowInPart, piece) + const resolvedPiece = resolvePrunedPieceInstance(partTimes, piece) const resPiece: PieceExtended = { instance: piece, renderedDuration: resolvedPiece.resolvedDuration ?? null, diff --git a/packages/webui/src/client/lib/rundownLayouts.ts b/packages/webui/src/client/lib/rundownLayouts.ts index fffbe01be4..ecee8fa8d8 100644 --- a/packages/webui/src/client/lib/rundownLayouts.ts +++ b/packages/webui/src/client/lib/rundownLayouts.ts @@ -4,7 +4,10 @@ import { RundownPlaylistActivationId, StudioId, } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { processAndPrunePieceInstanceTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' +import { + createPartCurrentTimes, + processAndPrunePieceInstanceTimings, +} from '@sofie-automation/corelib/dist/playout/processAndPrune' import { UIShowStyleBase } from '@sofie-automation/meteor-lib/dist/api/showStyles' import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { @@ -138,13 +141,11 @@ export function getUnfinishedPieceInstancesReactive( playlistActivationId: playlistActivationId, }).fetch() - const nowInPart = partInstance.timings?.plannedStartedPlayback - ? now - partInstance.timings.plannedStartedPlayback - : 0 + const partTimes = createPartCurrentTimes(now, partInstance.timings?.plannedStartedPlayback) prospectivePieces = processAndPrunePieceInstanceTimings( showStyleBase.sourceLayers, prospectivePieces, - nowInPart + partTimes ) let nearestEnd = Number.POSITIVE_INFINITY diff --git a/packages/webui/src/client/lib/rundownPlaylistUtil.ts b/packages/webui/src/client/lib/rundownPlaylistUtil.ts index 62c20d4539..6164a21c17 100644 --- a/packages/webui/src/client/lib/rundownPlaylistUtil.ts +++ b/packages/webui/src/client/lib/rundownPlaylistUtil.ts @@ -164,7 +164,7 @@ export class RundownPlaylistClientUtil { static getPiecesForParts( parts: Array, piecesOptions?: Omit, 'projection'> // We are mangling fields, so block projection - ): Map { + ): Map { const allPieces = Pieces.find( { startPartId: { $in: parts } }, { diff --git a/packages/webui/src/client/lib/shelf.ts b/packages/webui/src/client/lib/shelf.ts index 0df8b5d75c..a79cfd301d 100644 --- a/packages/webui/src/client/lib/shelf.ts +++ b/packages/webui/src/client/lib/shelf.ts @@ -3,13 +3,17 @@ import { PartInstance } from '@sofie-automation/meteor-lib/dist/collections/Part import { PieceInstance } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' -import { processAndPrunePieceInstanceTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' +import { + createPartCurrentTimes, + processAndPrunePieceInstanceTimings, +} from '@sofie-automation/corelib/dist/playout/processAndPrune' import { getUnfinishedPieceInstancesReactive } from './rundownLayouts.js' import { UIShowStyleBase } from '@sofie-automation/meteor-lib/dist/api/showStyles' import { PieceId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { PieceInstances } from '../collections/index.js' import { ReadonlyDeep } from 'type-fest' import { AdLibPieceUi } from '@sofie-automation/meteor-lib/dist/uiTypes/Adlib' +import { getCurrentTimeReactive } from './currentTimeReactive.js' export type { AdLibPieceUi } from '@sofie-automation/meteor-lib/dist/uiTypes/Adlib' @@ -60,10 +64,11 @@ export function getNextPiecesReactive( }).fetch() } + const partTimes = createPartCurrentTimes(getCurrentTimeReactive(), null) prospectivePieceInstances = processAndPrunePieceInstanceTimings( showsStyleBase.sourceLayers, prospectivePieceInstances, - 0 + partTimes ) return prospectivePieceInstances diff --git a/packages/webui/src/client/lib/ui/pieceUiClassNames.ts b/packages/webui/src/client/lib/ui/pieceUiClassNames.ts index 6ccad63c82..0b5e350290 100644 --- a/packages/webui/src/client/lib/ui/pieceUiClassNames.ts +++ b/packages/webui/src/client/lib/ui/pieceUiClassNames.ts @@ -19,7 +19,8 @@ export function pieceUiClassNames( uiState?: { leftAnchoredWidth: number rightAnchoredWidth: number - } + }, + draggable?: boolean ): string { const typeClass = layerType ? RundownUtils.getSourceLayerClassName(layerType) : '' @@ -32,10 +33,12 @@ export function pieceUiClassNames( : undefined, 'super-infinite': + !innerPiece.enable.isAbsolute && innerPiece.lifespan !== PieceLifespan.WithinPart && innerPiece.lifespan !== PieceLifespan.OutOnSegmentChange && innerPiece.lifespan !== PieceLifespan.OutOnSegmentEnd, 'infinite-starts': + !innerPiece.enable.isAbsolute && innerPiece.lifespan !== PieceLifespan.WithinPart && innerPiece.lifespan !== PieceLifespan.OutOnSegmentChange && innerPiece.lifespan !== PieceLifespan.OutOnSegmentEnd && @@ -57,5 +60,7 @@ export function pieceUiClassNames( 'invert-flash': highlight, 'element-selected': selected, + + 'draggable-element': draggable, }) } diff --git a/packages/webui/src/client/lib/viewPort.ts b/packages/webui/src/client/lib/viewPort.ts index cb03a8102b..a9d72f1538 100644 --- a/packages/webui/src/client/lib/viewPort.ts +++ b/packages/webui/src/client/lib/viewPort.ts @@ -10,8 +10,24 @@ import { parse as queryStringParse } from 'query-string' const HEADER_MARGIN = 24 // TODOSYNC: TV2 uses 15. If it's needed to be different, it needs to be made generic somehow.. const FALLBACK_HEADER_HEIGHT = 65 -let focusInterval: NodeJS.Timeout | undefined -let _dontClearInterval = false +// Replace the global variable with a more structured approach +const focusState = { + interval: undefined as NodeJS.Timeout | undefined, + isScrolling: false, + startTime: 0, +} + +const viewPortScrollingState = { + isProgrammaticScrollInProgress: false, + lastProgrammaticScrollTime: 0, +} + +export function getViewPortScrollingState(): { + isProgrammaticScrollInProgress: boolean + lastProgrammaticScrollTime: number +} { + return viewPortScrollingState +} export function maintainFocusOnPartInstance( partInstanceId: PartInstanceId, @@ -19,32 +35,47 @@ export function maintainFocusOnPartInstance( forceScroll?: boolean, noAnimation?: boolean ): void { - const startTime = Date.now() - const focus = () => { - if (Date.now() - startTime < timeWindow) { - _dontClearInterval = true - scrollToPartInstance(partInstanceId, forceScroll, noAnimation) - .then(() => { - _dontClearInterval = false - }) - .catch(() => { - _dontClearInterval = false - }) - } else { + focusState.startTime = Date.now() + + const focus = async () => { + // Only proceed if we're not already scrolling and within the time window + if (!focusState.isScrolling && Date.now() - focusState.startTime < timeWindow) { + focusState.isScrolling = true + + try { + await scrollToPartInstance(partInstanceId, forceScroll, noAnimation) + } catch (_error) { + // Handle error if needed + } finally { + focusState.isScrolling = false + } + } else if (Date.now() - focusState.startTime >= timeWindow) { quitFocusOnPart() } } + document.addEventListener('wheel', onWheelWhenMaintainingFocus, { once: true, capture: true, passive: true, }) - focusInterval = setInterval(focus, 500) + + // Clear any existing interval before creating a new one + if (focusState.interval) { + clearInterval(focusState.interval) + } + focus() + .then(() => { + focusState.interval = setInterval(focus, 500) + }) + .catch(() => { + // Handle error if needed + }) } export function isMaintainingFocus(): boolean { - return !!focusInterval + return !!focusState.interval } function onWheelWhenMaintainingFocus() { @@ -55,9 +86,10 @@ function quitFocusOnPart() { document.removeEventListener('wheel', onWheelWhenMaintainingFocus, { capture: true, }) - if (!_dontClearInterval && focusInterval) { - clearInterval(focusInterval) - focusInterval = undefined + + if (focusState.interval) { + clearInterval(focusState.interval) + focusState.interval = undefined } } @@ -69,11 +101,7 @@ export async function scrollToPartInstance( quitFocusOnPart() const partInstance = UIPartInstances.findOne(partInstanceId) if (partInstance) { - RundownViewEventBus.emit(RundownViewEvents.GO_TO_PART_INSTANCE, { - segmentId: partInstance.segmentId, - partInstanceId: partInstanceId, - }) - return scrollToSegment(partInstance.segmentId, forceScroll, noAnimation, partInstanceId) + return scrollToSegment(partInstance.segmentId, forceScroll, noAnimation) } throw new Error('Could not find PartInstance') } @@ -126,39 +154,10 @@ let currentScrollingElement: HTMLElement | undefined export async function scrollToSegment( elementToScrollToOrSegmentId: HTMLElement | SegmentId, forceScroll?: boolean, - noAnimation?: boolean, - partInstanceId?: PartInstanceId + noAnimation?: boolean ): Promise { - const getElementToScrollTo = (showHistory: boolean): HTMLElement | null => { - if (isProtectedString(elementToScrollToOrSegmentId)) { - let targetElement = document.querySelector( - `#${SEGMENT_TIMELINE_ELEMENT_ID}${elementToScrollToOrSegmentId}` - ) - - if (showHistory && Settings.followOnAirSegmentsHistory && targetElement) { - let i = Settings.followOnAirSegmentsHistory - while (i > 0) { - // Segment timeline is wrapped by
...
when rendered - const next: any = targetElement?.parentElement?.parentElement?.previousElementSibling?.children - .item(0) - ?.children.item(0) - if (next) { - targetElement = next - i-- - } else { - i = 0 - } - } - } - - return targetElement - } - - return elementToScrollToOrSegmentId - } - - const elementToScrollTo: HTMLElement | null = getElementToScrollTo(false) - const historyTarget: HTMLElement | null = getElementToScrollTo(true) + const elementToScrollTo: HTMLElement | null = getElementToScrollTo(elementToScrollToOrSegmentId, false) + const historyTarget: HTMLElement | null = getElementToScrollTo(elementToScrollToOrSegmentId, true) // historyTarget will be === to elementToScrollTo if history is not used / not found if (!elementToScrollTo || !historyTarget) { @@ -169,24 +168,71 @@ export async function scrollToSegment( historyTarget, forceScroll || !regionInViewport(historyTarget, elementToScrollTo), noAnimation, - false, - partInstanceId + false ) } +function getElementToScrollTo( + elementToScrollToOrSegmentId: HTMLElement | SegmentId, + showHistory: boolean +): HTMLElement | null { + if (isProtectedString(elementToScrollToOrSegmentId)) { + // Get the current segment element + let targetElement = document.querySelector( + `#${SEGMENT_TIMELINE_ELEMENT_ID}${elementToScrollToOrSegmentId}` + ) + if (showHistory && Settings.followOnAirSegmentsHistory && targetElement) { + let i = Settings.followOnAirSegmentsHistory + + // Find previous segments + while (i > 0 && targetElement) { + const currentSegmentId = targetElement.id + const allSegments = Array.from(document.querySelectorAll(`[id^="${SEGMENT_TIMELINE_ELEMENT_ID}"]`)) + + // Find current segment's index in the array of all segments + const currentIndex = allSegments.findIndex((el) => el.id === currentSegmentId) + + // Find the previous segment + if (currentIndex > 0) { + targetElement = allSegments[currentIndex - 1] as HTMLElement + i-- + } else { + // No more previous segments + break + } + } + } + + return targetElement + } + + return elementToScrollToOrSegmentId +} + +let pendingFirstStageTimeout: NodeJS.Timeout | undefined + async function innerScrollToSegment( elementToScrollTo: HTMLElement, forceScroll?: boolean, noAnimation?: boolean, - secondStage?: boolean, - partInstanceId?: PartInstanceId + secondStage?: boolean ): Promise { if (!secondStage) { + if (pendingFirstStageTimeout) { + clearTimeout(pendingFirstStageTimeout) + pendingFirstStageTimeout = undefined + } currentScrollingElement = elementToScrollTo } else if (secondStage && elementToScrollTo !== currentScrollingElement) { throw new Error('Scroll overriden by another scroll') } + // Ensure that the element is ready to be scrolled: + if (!secondStage) { + await new Promise((resolve) => setTimeout(resolve, 100)) + } + await new Promise((resolve) => requestAnimationFrame(resolve)) + let { top, bottom } = elementToScrollTo.getBoundingClientRect() top = Math.floor(top) bottom = Math.floor(bottom) @@ -199,36 +245,25 @@ async function innerScrollToSegment( return scrollToPosition(top + window.scrollY, noAnimation).then( async () => { - // retry scroll in case we have to load some data - if (pendingSecondStageScroll) window.cancelIdleCallback(pendingSecondStageScroll) return new Promise((resolve, reject) => { - // scrollToPosition will resolve after some time, at which point a new pendingSecondStageScroll may have been created - - pendingSecondStageScroll = window.requestIdleCallback( - () => { - if (!secondStage) { - let { top, bottom } = elementToScrollTo.getBoundingClientRect() - top = Math.floor(top) - bottom = Math.floor(bottom) - - if (bottom > Math.floor(window.innerHeight) || top < headerHeight) { - innerScrollToSegment( - elementToScrollTo, - forceScroll, - true, - true, - partInstanceId - ).then(resolve, reject) - } else { - resolve(true) - } + if (!secondStage) { + // Wait to settle 1 atemt to scroll + pendingFirstStageTimeout = setTimeout(() => { + pendingFirstStageTimeout = undefined + let { top, bottom } = elementToScrollTo.getBoundingClientRect() + top = Math.floor(top) + bottom = Math.floor(bottom) + if (bottom > Math.floor(window.innerHeight) || top < headerHeight) { + // If not in place atempt to scroll again + innerScrollToSegment(elementToScrollTo, forceScroll, true, true).then(resolve, reject) } else { - currentScrollingElement = undefined resolve(true) } - }, - { timeout: 250 } - ) + }, 420) + } else { + currentScrollingElement = undefined + resolve(true) + } }) }, (error) => { @@ -258,44 +293,29 @@ function getRegionPosition(topElement: HTMLElement, bottomElement: HTMLElement): return { top, bottom } } -let scrollToPositionRequest: number | undefined -let scrollToPositionRequestReject: ((reason?: any) => void) | undefined - export async function scrollToPosition(scrollPosition: number, noAnimation?: boolean): Promise { + // Calculate the exact position + const headerOffset = getHeaderHeight() + HEADER_MARGIN + const targetTop = Math.max(0, scrollPosition - headerOffset) + if (noAnimation) { window.scroll({ - top: Math.max(0, scrollPosition - getHeaderHeight() - HEADER_MARGIN), + top: targetTop, left: 0, + behavior: 'instant', }) return Promise.resolve() } else { - return new Promise((resolve, reject) => { - if (scrollToPositionRequest !== undefined) window.cancelIdleCallback(scrollToPositionRequest) - if (scrollToPositionRequestReject !== undefined) - scrollToPositionRequestReject('Prevented by another scroll') - - scrollToPositionRequestReject = reject - const currentTop = window.scrollY - const targetTop = Math.max(0, scrollPosition - getHeaderHeight() - HEADER_MARGIN) - scrollToPositionRequest = window.requestIdleCallback( - () => { - window.scroll({ - top: targetTop, - left: 0, - behavior: 'smooth', - }) - setTimeout( - () => { - resolve() - scrollToPositionRequestReject = undefined - // this formula was experimentally created from Chrome 86 behavior - }, - 3000 * Math.log(Math.abs(currentTop - targetTop) / 2000 + 1) - ) - }, - { timeout: 250 } - ) + viewPortScrollingState.isProgrammaticScrollInProgress = true + viewPortScrollingState.lastProgrammaticScrollTime = Date.now() + + window.scroll({ + top: targetTop, + left: 0, + behavior: 'smooth', }) + await new Promise((resolve) => setTimeout(resolve, 300)) + viewPortScrollingState.isProgrammaticScrollInProgress = false } } diff --git a/packages/webui/src/client/styles/elementSelected.scss b/packages/webui/src/client/styles/elementSelected.scss index 2dcd17b97b..2cb9b2f73b 100644 --- a/packages/webui/src/client/styles/elementSelected.scss +++ b/packages/webui/src/client/styles/elementSelected.scss @@ -18,3 +18,7 @@ $glow-color: rgba(255, 255, 255, 0.58); } } } + +.draggable-element { + border: dotted white 1px; +} diff --git a/packages/webui/src/client/styles/shelf/dashboard-rundownView.scss b/packages/webui/src/client/styles/shelf/dashboard-rundownView.scss index 2f4e5c328a..6e0e1eff21 100644 --- a/packages/webui/src/client/styles/shelf/dashboard-rundownView.scss +++ b/packages/webui/src/client/styles/shelf/dashboard-rundownView.scss @@ -11,6 +11,9 @@ } .dashboard-panel__panel__button { + margin-top: 10px; + height: 110px; + max-width: 170px !important; > .dashboard-panel__panel__button__content { display: grid; grid-template-columns: 1fr min-content; @@ -31,7 +34,7 @@ > .dashboard-panel__panel__button__thumbnail { position: relative; - height: auto; + height: 85px; z-index: 1; overflow: hidden; grid-column: auto / span 2; diff --git a/packages/webui/src/client/ui/ClipTrimPanel/ClipTrimDialog.tsx b/packages/webui/src/client/ui/ClipTrimPanel/ClipTrimDialog.tsx index 829e4b4518..efff6d8649 100644 --- a/packages/webui/src/client/ui/ClipTrimPanel/ClipTrimDialog.tsx +++ b/packages/webui/src/client/ui/ClipTrimPanel/ClipTrimDialog.tsx @@ -56,6 +56,9 @@ export function ClipTrimDialog({ const handleAccept = useCallback((e: SomeEvent) => { onClose?.() + const startPartId = selectedPiece.startPartId + if (!startPartId) return + doUserAction( t, e, @@ -65,7 +68,7 @@ export function ClipTrimDialog({ e, ts, playlistId, - selectedPiece.startPartId, + startPartId, selectedPiece._id, state.inPoint, state.duration diff --git a/packages/webui/src/client/ui/MediaStatus/MediaStatus.tsx b/packages/webui/src/client/ui/MediaStatus/MediaStatus.tsx index 7c138b3d55..430807894e 100644 --- a/packages/webui/src/client/ui/MediaStatus/MediaStatus.tsx +++ b/packages/webui/src/client/ui/MediaStatus/MediaStatus.tsx @@ -458,14 +458,14 @@ function usePieceItems(partIds: PartId[], partMeta: Map) { const pieceItems = useTracker( () => pieces.map((piece) => { - const meta = partMeta.get(piece.startPartId) + const meta = piece.startPartId && partMeta.get(piece.startPartId) if (!meta) return return getListItemFromPieceAndPartMeta( piece._id, piece, meta, - piece.startPartId, + piece.startPartId ?? undefined, undefined, meta.segmentId, false diff --git a/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUp.scss b/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUp.scss index ab5105bcc2..e7182b8e32 100644 --- a/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUp.scss +++ b/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUp.scss @@ -3,17 +3,16 @@ .preview-popUp { border: 1px solid var(--sofie-segment-layer-hover-popup-border); background: var(--sofie-segment-layer-hover-popup-background); - box-shadow: 0 0 4px 0 rgba(0, 0, 0, 0.5); + box-shadow: 0 0 20px 0 rgba(0, 0, 0, 0.8); border-radius: 5px; overflow: hidden; pointer-events: none; - box-shadow: 0 0 20px 0 rgba(0, 0, 0, 0.6); - z-index: 9999; &--large { width: 482px; + padding-bottom: 10px; --preview-max-dimension: 480; } @@ -25,18 +24,65 @@ &--hidden { visibility: none; } + + font-family: Roboto Flex; + + font-style: normal; + font-weight: 500; + font-size: 16px; + line-height: 110%; + /* identical to box height, or 15px */ + letter-spacing: 0.02em; + font-feature-settings: + 'tnum', + 'liga' off; + color: #ffffff; + font-variation-settings: + 'GRAD' 0, + 'opsz' 15, + 'slnt' 0, + 'wdth' 30, + 'XOPQ' 96, + 'XTRA' 468, + 'YOPQ' 79, + 'YTAS' 750, + 'YTDE' -203, + 'YTFI' 738, + 'YTLC' 548, + 'YTUC' 712; } .preview-popUp__preview { width: 100%; - font-family: 'Roboto Condensed'; - font-size: 0.9375rem; // 15px; .preview-popUp__script, .preview-popUp__script-comment, .preview-popUp__script-last-modified { - padding: 0.4em 0.4em 0.4em 0.6em; - font-style: italic; + padding: 5px; + padding-left: 2%; + padding-right: 2%; + font-weight: 300; + font-size: 16px; + line-height: 120%; + letter-spacing: 0.03em; + font-feature-settings: + 'tnum', + 'liga' off; + + color: #ffffff; + font-variation-settings: + 'GRAD' 0, + 'opsz' 16, + 'slnt' -10, + 'wdth' 75, + 'XOPQ' 96, + 'XTRA' 468, + 'YOPQ' 79, + 'YTAS' 750, + 'YTDE' -203, + 'YTFI' 738, + 'YTLC' 548, + 'YTUC' 712; } .preview-popUp__script-comment, @@ -54,6 +100,72 @@ letter-spacing: 0.02rem; padding: 5px; + padding-left: 2%; + } + + .preview-popUp__element-with-time-info { + width: 100%; + display: flex; + + margin-bottom: 7px; + + .preview-popUp__element-with-time-info__layer-color { + height: 13px; + aspect-ratio: 1; + margin-left: 2%; + margin-top: 7px; + flex-shrink: 0; + @include item-type-colors(); + } + + .preview-popUp__element-with-time-info__text { + margin: 5px; + width: calc(100% - 35px); + flex-grow: 1; + } + + .preview-popUp__element-with-time-info__timing { + margin-left: 5px; + overflow: none; + white-space: nowrap; + text-overflow: ellipsis; + font-feature-settings: 'liga' off; + + font-weight: 500; + line-height: 100%; /* 15px */ + + .label { + font-weight: 100; + line-height: 100%; + /* identical to box height, or 15px */ + letter-spacing: 0.02em; + font-feature-settings: + 'tnum', + 'liga' off; + color: #b2b2b2; + font-variation-settings: + 'GRAD' 0, + 'opsz' 30, + 'slnt' 0, + 'wdth' 25, + 'XOPQ' 96, + 'XTRA' 468, + 'YOPQ' 79, + 'YTAS' 750, + 'YTDE' -203, + 'YTFI' 738, + 'YTLC' 548, + 'YTUC' 712; + } + } + } + + .preview-popup__separation-line { + width: 96%; + margin-left: 2%; + background-color: #5b5b5b; + margin-top: 0px; + margin-bottom: 0px; } .preview-popUp__warning { @@ -174,21 +286,42 @@ } .preview-popUp__in-out-words { - letter-spacing: 0em; + font-weight: 300; + font-size: 16px; + line-height: 100%; + letter-spacing: 0.02em; + font-feature-settings: + 'tnum', + 'liga' off; + color: #ffffff; + font-variation-settings: + 'GRAD' 0, + 'opsz' 16, + 'slnt' -10, + 'wdth' 75, + 'XOPQ' 96, + 'XTRA' 468, + 'YOPQ' 79, + 'YTAS' 750, + 'YTDE' -203, + 'YTFI' 738, + 'YTLC' 548, + 'YTUC' 712; width: 100%; overflow: hidden; text-overflow: clip; white-space: nowrap; - margin-top: -25px; //Pull up the in/out words a bit - padding: 7px; + padding: 5px; + padding-left: 2%; + padding-right: 2%; .separation-line { width: 100%; height: 1px; background-color: #5b5b5b; - margin-bottom: 5px; + margin-bottom: 7px; } .in-words, @@ -201,7 +334,7 @@ } .out-words { - direction: rtl; + text-align: right; } } diff --git a/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUpContent.tsx b/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUpContent.tsx index b07d940678..5c1c3a35d0 100644 --- a/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUpContent.tsx +++ b/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUpContent.tsx @@ -1,5 +1,4 @@ import React from 'react' -import { PreviewContent } from './PreviewPopUpContext.js' import { WarningIconSmall } from '../../lib/ui/icons/notifications.js' import { translateMessage } from '@sofie-automation/corelib/dist/TranslatableMessage' import { TFunction, useTranslation } from 'react-i18next' @@ -11,9 +10,11 @@ import { RundownUtils } from '../../lib/rundown.js' import { PieceInstancePiece } from '@sofie-automation/corelib/dist/dataModel/PieceInstance' import { ReadonlyObjectDeep } from 'type-fest/source/readonly-deep' import { PieceLifespan } from '@sofie-automation/blueprints-integration' +import { LayerInfoPreview } from './Previews/LayerInfoPreview.js' +import { PreviewContentUI } from './PreviewPopUpContext.js' interface PreviewPopUpContentProps { - content: PreviewContent + content: PreviewContentUI time: number | null } @@ -38,7 +39,6 @@ export function PreviewPopUpContent({ content, time }: PreviewPopUpContentProps) case 'inOutWords': return (
-
{content.in}
{content.out}
@@ -59,6 +59,10 @@ export function PreviewPopUpContent({ content, time }: PreviewPopUpContentProps)
) + case 'layerInfo': + return + case 'separationLine': + return
case 'boxLayout': return case 'warning': @@ -108,17 +112,17 @@ function getDurationText( function getLifeSpanText(t: TFunction, lifespan: PieceLifespan): string { switch (lifespan) { case PieceLifespan.WithinPart: - return t('Until next take') + return t('Until Next Take') case PieceLifespan.OutOnSegmentChange: - return t('Until next segment') + return t('Until Next Segment') case PieceLifespan.OutOnSegmentEnd: - return t('Until end of segment') + return t('Until End of Segment') case PieceLifespan.OutOnRundownChange: - return t('Until next rundown') + return t('Until Next Rundown') case PieceLifespan.OutOnRundownEnd: - return t('Until end of rundown') + return t('Until End of Rundown') case PieceLifespan.OutOnShowStyleEnd: - return t('Until end of showstyle') + return t('Until End of Showstyle') default: return '' } diff --git a/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUpContext.tsx b/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUpContext.tsx index 53459b8aeb..9e446287d3 100644 --- a/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUpContext.tsx +++ b/packages/webui/src/client/ui/PreviewPopUp/PreviewPopUpContext.tsx @@ -6,6 +6,7 @@ import { JSONBlobParse, NoraPayload, PieceLifespan, + PreviewContent, PreviewType, ScriptContent, SourceLayerType, @@ -33,11 +34,11 @@ export function convertSourceLayerItemToPreview( item: ReadonlyObjectDeep | IAdLibListItem, contentStatus?: ReadonlyObjectDeep, timeAsRendered?: { in?: number | null; dur?: number | null } -): { contents: PreviewContent[]; options: Readonly> } { +): { contents: PreviewContentUI[]; options: Readonly> } { // first try to read the popup preview if (item.content.popUpPreview) { const popupPreview = item.content.popUpPreview - const contents: PreviewContent[] = [] + const contents: PreviewContentUI[] = [] const options: Partial = {} if (popupPreview.name) { @@ -99,6 +100,7 @@ export function convertSourceLayerItemToPreview( break case PreviewType.VT: if (popupPreview.preview.outWords) { + contents.push({ type: 'separationLine' }) contents.push({ type: 'inOutWords', in: popupPreview.preview.inWords, @@ -120,10 +122,14 @@ export function convertSourceLayerItemToPreview( } break } + // Add any additional preview content to the popup: + popupPreview.additionalPreviewContent?.forEach((content) => { + contents.push(content as PreviewContentUI) + }) } if (popupPreview.warnings) { - contents.push(...popupPreview.warnings.map((w): PreviewContent => ({ type: 'warning', content: w.reason }))) + contents.push(...popupPreview.warnings.map((w): PreviewContentUI => ({ type: 'warning', content: w.reason }))) } return { contents, options } @@ -136,7 +142,7 @@ export function convertSourceLayerItemToPreview( const content = item.content as VTContent return { - contents: _.compact<(PreviewContent | undefined)[]>([ + contents: _.compact<(PreviewContentUI | undefined)[]>([ { type: 'title', content: content.fileName, @@ -159,11 +165,11 @@ export function convertSourceLayerItemToPreview( src: contentStatus.thumbnailUrl, } : undefined, - ...(contentStatus?.messages?.map((m) => ({ + ...(contentStatus?.messages?.map((m) => ({ type: 'warning', content: m as any, })) || []), - ]) as PreviewContent[], + ]) as PreviewContentUI[], options: { size: contentStatus?.previewUrl ? 'large' : undefined, }, @@ -220,7 +226,7 @@ export function convertSourceLayerItemToPreview( current: item.content.step.current, count: item.content.step.count, }, - ]) as PreviewContent[], + ]) as PreviewContentUI[], options: { size: 'large' }, } } catch (e) { @@ -237,7 +243,7 @@ export function convertSourceLayerItemToPreview( current: item.content.step.current, count: item.content.step.count, }, - ]) as PreviewContent[], + ]) as PreviewContentUI[], options: {}, } } @@ -287,43 +293,9 @@ export function convertSourceLayerItemToPreview( return { contents: [], options: {} } } - -export type PreviewContent = - | { - type: 'iframe' - href: string - postMessage?: any - dimensions?: { width: number; height: number } - } - | { - type: 'image' - src: string - } - | { - type: 'video' - src: string - } - | { - type: 'script' - script?: string - firstWords?: string - lastWords?: string - comment?: string - lastModified?: number - } - | { - type: 'title' - content: string - } - | { - type: 'inOutWords' - in?: string - out: string - } - | { - type: 'data' - content: { key: string; value: string }[] - } +// PreviewContentUI should be the same as PreviewContent, but we need to extend it with some more types: +export type PreviewContentUI = + | PreviewContent | { type: 'boxLayout' boxSourceConfiguration: ReadonlyDeep<(SplitsContentBoxContent & SplitsContentBoxProperties)[]> @@ -351,7 +323,7 @@ export interface IPreviewPopUpSession { * Update the open preview with new content or modify the content already being previewed, such as change current showing * time in the video, etc. */ - readonly update: (content?: PreviewContent[]) => void + readonly update: (content?: PreviewContentUI[]) => void /** * Set the time that the current pointer position is representing in the scope of the preview contents */ @@ -390,7 +362,7 @@ export interface IPreviewPopUpContext { */ requestPreview( anchor: HTMLElement | VirtualElement, - content: PreviewContent[], + content: PreviewContentUI[], opts?: PreviewRequestOptions ): IPreviewPopUpSession } @@ -415,7 +387,7 @@ export function PreviewPopUpContextProvider({ children }: React.PropsWithChildre const previewRef = useRef(null) const [previewSession, setPreviewSession] = useState(null) - const [previewContent, setPreviewContent] = useState(null) + const [previewContent, setPreviewContent] = useState(null) const [t, setTime] = useState(null) const context: IPreviewPopUpContext = { diff --git a/packages/webui/src/client/ui/PreviewPopUp/Previews/LayerInfoPreview.tsx b/packages/webui/src/client/ui/PreviewPopUp/Previews/LayerInfoPreview.tsx new file mode 100644 index 0000000000..cc87ee3a49 --- /dev/null +++ b/packages/webui/src/client/ui/PreviewPopUp/Previews/LayerInfoPreview.tsx @@ -0,0 +1,53 @@ +import { PreviewContent } from '@sofie-automation/blueprints-integration' +import { RundownUtils } from '../../../lib/rundown' +import { useTranslation } from 'react-i18next' +import classNames from 'classnames' + +type layerInfoContent = Extract + +export function LayerInfoPreview(content: layerInfoContent): React.ReactElement { + const { t } = useTranslation() + const sourceLayerClassName = + content.layerType !== undefined ? RundownUtils.getSourceLayerClassName(content.layerType) : undefined + + return ( +
+
+
+ {content.text.map((line, index) => ( +
+ {line} +
+ ))} +
+ {content.inTime !== undefined && ( + <> + {t('IN')}: + {typeof content.inTime === 'number' + ? RundownUtils.formatTimeToShortTime(content.inTime || 0) + : content.inTime} + + )} +  {' '} + {content.duration !== undefined && ( + <> + {t('DURATION')}: + {typeof content.duration === 'number' + ? RundownUtils.formatTimeToShortTime(content.duration || 0) + : content.duration} + + )} +  {' '} + {content.outTime !== undefined && ( + <> + {t('OUT')}: + {typeof content.outTime === 'number' + ? RundownUtils.formatTimeToShortTime(content.outTime || 0) + : content.outTime} + + )} +
+
+
+ ) +} diff --git a/packages/webui/src/client/ui/Prompter/prompter.ts b/packages/webui/src/client/ui/Prompter/prompter.ts index 03e9a80975..4aaf429d31 100644 --- a/packages/webui/src/client/ui/Prompter/prompter.ts +++ b/packages/webui/src/client/ui/Prompter/prompter.ts @@ -15,7 +15,10 @@ import { DBSegment } from '@sofie-automation/corelib/dist/dataModel/Segment' import { RundownUtils } from '../../lib/rundown.js' import { RundownPlaylistClientUtil } from '../../lib/rundownPlaylistUtil.js' import { SourceLayers } from '@sofie-automation/corelib/dist/dataModel/ShowStyleBase' -import { processAndPrunePieceInstanceTimings } from '@sofie-automation/corelib/dist/playout/processAndPrune' +import { + createPartCurrentTimes, + processAndPrunePieceInstanceTimings, +} from '@sofie-automation/corelib/dist/playout/processAndPrune' import _ from 'underscore' import { FindOptions } from '../../collections/lib.js' import { RundownPlaylistCollectionUtil } from '../../collections/rundownPlaylistUtil.js' @@ -23,6 +26,7 @@ import { normalizeArrayToMap, protectString } from '../../lib/tempLib.js' import { PieceInstances, Pieces, RundownPlaylists, Segments } from '../../collections/index.js' import { getPieceInstancesForPartInstance } from '../../lib/RundownResolver.js' import { UIShowStyleBases } from '../Collections.js' +import { getCurrentTime } from '../../lib/systemTime.js' // export interface NewPrompterAPI { // getPrompterData (playlistId: RundownPlaylistId): Promise @@ -149,7 +153,7 @@ export namespace PrompterAPI { let previousRundown: Rundown | null = null const rundownIds = rundowns.map((rundown) => rundown._id) - const allPiecesCache = new Map() + const allPiecesCache = new Map() Pieces.find({ startRundownId: { $in: rundownIds }, }).forEach((piece) => { @@ -243,10 +247,11 @@ export namespace PrompterAPI { const sourceLayers = rundownIdsToShowStyleBase.get(partInstance.rundownId) if (sourceLayers) { + const partTimes = createPartCurrentTimes(getCurrentTime(), null) const preprocessedPieces = processAndPrunePieceInstanceTimings( sourceLayers, rawPieceInstances, - 0, + partTimes, true ) diff --git a/packages/webui/src/client/ui/RundownView.tsx b/packages/webui/src/client/ui/RundownView.tsx index 59822a4031..8ec3154c7d 100644 --- a/packages/webui/src/client/ui/RundownView.tsx +++ b/packages/webui/src/client/ui/RundownView.tsx @@ -27,7 +27,7 @@ import { maintainFocusOnPartInstance, scrollToPartInstance, getHeaderHeight, -} from '../lib/viewPort' +} from '../lib/viewPort.js' import { AfterBroadcastForm } from './AfterBroadcastForm.js' import { RundownRightHandControls } from './RundownView/RundownRightHandControls.js' import { PeripheralDevicesAPI } from '../lib/clientAPI.js' @@ -38,7 +38,7 @@ import { } from './RundownView/RundownNotifier.js' import { NotificationCenterPanel } from '../lib/notifications/NotificationCenterPanel.js' import { NotificationCenter, NoticeLevel, Notification } from '../lib/notifications/notifications.js' -import { SupportPopUp } from './SupportPopUp' +import { SupportPopUp } from './SupportPopUp.js' import { KeyboardFocusIndicator } from '../lib/KeyboardFocusIndicator.js' import { PeripheralDeviceType } from '@sofie-automation/corelib/dist/dataModel/PeripheralDevice' import { doUserAction, UserAction } from '../lib/clientUserAction.js' @@ -111,6 +111,7 @@ import { useMiniShelfAdlibsData } from './RundownView/useQueueMiniShelfAdlib.js' import { RundownViewContextProviders } from './RundownView/RundownViewContextProviders.js' import { AnimatePresence } from 'motion/react' import { UserError } from '@sofie-automation/corelib/dist/error' +import { DragContextProvider } from './RundownView/DragContextProvider.js' const HIDE_NOTIFICATIONS_AFTER_MOUNT: number | undefined = 5000 @@ -500,9 +501,14 @@ const RundownViewContent = translateWithTracker { - if (!error.toString().match(/another scroll/)) console.warn(error) - }) + // add small delay to ensure the nextPartInfo is available + setTimeout(() => { + if (this.props.playlist && this.props.playlist.nextPartInfo) { + scrollToPartInstance(this.props.playlist.nextPartInfo.partInstanceId).catch((error) => { + if (!error.toString().match(/another scroll/)) console.warn(error) + }) + } + }, 120) } else if ( // after take this.props.playlist && @@ -639,24 +645,36 @@ const RundownViewContent = translateWithTracker { - if (this.state.followLiveSegments && this.props.playlist && this.props.playlist.activationId) { - const liveSegmentComponent = document.querySelector('.segment-timeline.live') - if (liveSegmentComponent) { - const offsetPosition = liveSegmentComponent.getBoundingClientRect() - // if it's closer to the top edge than the headerHeight - const segmentComponentTooHigh = offsetPosition.top < getHeaderHeight() - // or if it's closer to the bottom edge than very close to the top - const segmentComponentTooLow = - offsetPosition.bottom < window.innerHeight - getHeaderHeight() - 20 - (offsetPosition.height * 3) / 2 - if (segmentComponentTooHigh || segmentComponentTooLow) { - this.setState({ - followLiveSegments: false, - }) + private onWheelScrollInner = _.throttle( + () => { + if (this.state.followLiveSegments && this.props.playlist && this.props.playlist.activationId) { + const liveSegmentComponent = document.querySelector('.segment-timeline.live') + if (liveSegmentComponent) { + const offsetPosition = liveSegmentComponent.getBoundingClientRect() + const headerHeight = getHeaderHeight() + + // Use a buffer zone to prevent oscillation + const topBuffer = headerHeight + 10 + const bottomBuffer = window.innerHeight - headerHeight - 20 - (offsetPosition.height * 3) / 2 + + // Check if segment is outside the comfortable viewing area + const segmentComponentTooHigh = offsetPosition.top < topBuffer + const segmentComponentTooLow = offsetPosition.bottom < bottomBuffer + + if (segmentComponentTooHigh || segmentComponentTooLow) { + // Only change state if we need to + if (this.state.followLiveSegments) { + this.setState({ + followLiveSegments: false, + }) + } + } } } - } - }, 250) + }, + 100, + { leading: true, trailing: true } + ) private onWheel = (e: React.WheelEvent) => { if (e.deltaX === 0 && e.deltaY !== 0 && !e.altKey && !e.shiftKey && !e.ctrlKey && !e.metaKey) { @@ -689,9 +707,14 @@ const RundownViewContent = translateWithTracker { - if (!error.toString().match(/another scroll/)) console.warn(error) - }) + // Small delay to ensure the nextPartInfo is available + setTimeout(() => { + if (this.props.playlist && this.props.playlist.nextPartInfo) { + scrollToPartInstance(this.props.playlist.nextPartInfo.partInstanceId, true).catch((error) => { + if (!error.toString().match(/another scroll/)) console.warn(error) + }) + } + }, 120) setTimeout(() => { this.setState({ followLiveSegments: true, @@ -1324,213 +1347,216 @@ const RundownViewContent = translateWithTracker - - {(selectionContext) => { - return ( -
0, - })} - style={this.getStyle()} - onWheelCapture={this.onWheel} - onContextMenu={this.onContextMenuTop} - > - {this.renderSegmentsList()} - - {this.props.matchedSegments && - this.props.matchedSegments.length > 0 && - this.props.userPermissions.studio && - studio.settings.enableEvaluationForm && } - - {!this.props.hideRundownHeader && ( + + + {(selectionContext) => { + return ( +
0, + })} + style={this.getStyle()} + onWheelCapture={this.onWheel} + onContextMenu={this.onContextMenuTop} + > + {this.renderSegmentsList()} - 0 && + this.props.userPermissions.studio && + studio.settings.enableEvaluationForm && } + + {!this.props.hideRundownHeader && ( + + r._id)} + firstRundown={this.props.rundowns[0]} + onActivate={this.onActivate} + inActiveRundownView={this.props.inActiveRundownView} + currentRundown={currentRundown} + layout={this.props.selectedHeaderLayout} + showStyleBase={showStyleBase} + showStyleVariant={showStyleVariant} + /> + + )} + + r._id)} - firstRundown={this.props.rundowns[0]} - onActivate={this.onActivate} - inActiveRundownView={this.props.inActiveRundownView} - currentRundown={currentRundown} - layout={this.props.selectedHeaderLayout} showStyleBase={showStyleBase} showStyleVariant={showStyleVariant} + onChangeBottomMargin={this.onChangeBottomMargin} + rundownLayout={this.props.selectedShelfLayout} + studio={studio} /> - )} - - - - - {this.props.userPermissions.studio && !Settings.disableBlurBorder && ( - -
-
- )} -
- - - - - {this.props.userPermissions.studio && currentRundown && ( - + {this.props.userPermissions.studio && !Settings.disableBlurBorder && ( + +
+
+ )} +
+ + - )} - - - - {this.state.isNotificationsCenterOpen && ( - + + {this.props.userPermissions.studio && currentRundown && ( + )} - {!this.state.isNotificationsCenterOpen && selectionContext.listSelectedElements().length > 0 && ( -
- -
- )} - - {this.state.isSupportPanelOpen && ( - -
- -
- - {t('Take a Snapshot')} - -
- {this.props.userPermissions.studio && ( - <> - -
- +
+ + + {this.state.isNotificationsCenterOpen && ( + + )} + {!this.state.isNotificationsCenterOpen && + selectionContext.listSelectedElements().length > 0 && ( +
+ +
)} - {this.props.userPermissions.studio && } - + + {this.state.isSupportPanelOpen && ( + +
+ +
+ + {t('Take a Snapshot')} + +
+ {this.props.userPermissions.studio && ( + <> + +
+ + )} + {this.props.userPermissions.studio && } +
+ )} +
+
+ + {this.props.userPermissions.studio && ( + )} -
-
- - {this.props.userPermissions.studio && ( - + + selectionContext.clearAndSetSelection(selection)} + studioMode={this.props.userPermissions.studio} + enablePlayFromAnywhere={!!studio.settings.enablePlayFromAnywhere} + enableQuickLoop={!!studio.settings.enableQuickLoop} + enableUserEdits={!!studio.settings.enableUserEdits} /> - )} - - - selectionContext.clearAndSetSelection(selection)} - studioMode={this.props.userPermissions.studio} - enablePlayFromAnywhere={!!studio.settings.enablePlayFromAnywhere} - enableQuickLoop={!!studio.settings.enableQuickLoop} - enableUserEdits={!!studio.settings.enableUserEdits} - /> - - - {this.state.isClipTrimmerOpen && - this.state.selectedPiece && - RundownUtils.isPieceInstance(this.state.selectedPiece) && - (selectedPieceRundown === undefined ? ( - this.setState({ selectedPiece: undefined })} - title={t('Rundown not found')} - acceptText={t('Close')} - > - {t('Rundown for piece "{{pieceLabel}}" could not be found.', { - pieceLabel: this.state.selectedPiece.instance.piece.name, - })} - - ) : ( - this.setState({ isClipTrimmerOpen: false })} - /> - ))} - - - - - - {this.props.playlist && this.props.studio && this.props.showStyleBase && ( - - )} - -
- ) - }} - { - // USE IN CASE OF DEBUGGING EMERGENCY - /* getDeveloperMode() &&
-
*/ - } -
+ + + {this.state.isClipTrimmerOpen && + this.state.selectedPiece && + RundownUtils.isPieceInstance(this.state.selectedPiece) && + (selectedPieceRundown === undefined ? ( + this.setState({ selectedPiece: undefined })} + title={t('Rundown not found')} + acceptText={t('Close')} + > + {t('Rundown for piece "{{pieceLabel}}" could not be found.', { + pieceLabel: this.state.selectedPiece.instance.piece.name, + })} + + ) : ( + this.setState({ isClipTrimmerOpen: false })} + /> + ))} + + + + + + {this.props.playlist && this.props.studio && this.props.showStyleBase && ( + + )} + +
+ ) + }} + { + // USE IN CASE OF DEBUGGING EMERGENCY + /* getDeveloperMode() &&
+
*/ + } +
+ ) } diff --git a/packages/webui/src/client/ui/RundownView/DragContext.ts b/packages/webui/src/client/ui/RundownView/DragContext.ts new file mode 100644 index 0000000000..dff67253e5 --- /dev/null +++ b/packages/webui/src/client/ui/RundownView/DragContext.ts @@ -0,0 +1,44 @@ +import { PartInstanceId, PieceInstanceId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { createContext } from 'react' +import { PieceUi } from '../SegmentContainer/withResolvedSegment' + +export interface IDragContext { + /** + * Indicate a drag operation on a piece has started + * @param piece The piece that is being dragged + * @param timeScale The current TimeScale of the segment + * @param position The position of the mouse + * @param elementOffset The x-coordinate of the element relative to the mouse position + * @param limitToSegment Whether the piece can be dragged to other segments (note: if the other segment does not have the right source layer the piece will look to have disappeared... consider omitting this is a todo) + */ + startDrag: ( + piece: PieceUi, + timeScale: number, + position: { x: number; y: number }, + elementOffset?: number, + limitToSegment?: SegmentId + ) => void + /** + * Indicate the part the mouse is on has changed + * @param partId The part id that the mouse is currently hovering on + * @param segmentId The segment the part currenly hover is in + * @param position The position of the part in absolute coords to the screen + */ + setHoveredPart: (partId: PartInstanceId, segmentId: SegmentId, position: { x: number; y: number }) => void + + /** + * Whether dragging is enabled + */ + enabled: boolean + + /** + * PieceId of the piece that is being dragged + */ + pieceId: undefined | PieceInstanceId + /** + * The piece with any local overrides coming from dragging it around (i.e. changed renderedInPoint) + */ + piece: undefined | PieceUi +} + +export const dragContext = createContext(undefined) // slay. diff --git a/packages/webui/src/client/ui/RundownView/DragContextProvider.tsx b/packages/webui/src/client/ui/RundownView/DragContextProvider.tsx new file mode 100644 index 0000000000..85491788ba --- /dev/null +++ b/packages/webui/src/client/ui/RundownView/DragContextProvider.tsx @@ -0,0 +1,172 @@ +import { PartInstanceId, PieceInstanceId, SegmentId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { PropsWithChildren, useCallback, useEffect, useRef, useState } from 'react' +import { dragContext, IDragContext } from './DragContext.js' +import { PieceUi } from '../SegmentContainer/withResolvedSegment.js' +import { doUserAction, UserAction } from '../../lib/clientUserAction.js' +import { MeteorCall } from '../../lib/meteorApi.js' +import { TFunction } from 'i18next' +import { UIParts } from '../Collections.js' +import { Segments } from '../../collections/index.js' +import { literal } from '../../lib/tempLib.js' +import { DefaultUserOperationRetimePiece, DefaultUserOperationsTypes } from '@sofie-automation/blueprints-integration' +import RundownViewEventBus, { + RundownViewEvents, + EditModeEvent, +} from '@sofie-automation/meteor-lib/dist/triggers/RundownViewEventBus' + +const DRAG_TIMEOUT = 10000 + +interface Props { + t: TFunction +} + +// notes: this doesn't limit dragging between rundowns right now but I'm not sure if the ingest stage will be happy with that - mint +export function DragContextProvider({ t, children }: PropsWithChildren): JSX.Element { + const [pieceId, setPieceId] = useState(undefined) + const [piece, setPiece] = useState(undefined) + + const [enabled, setEnabled] = useState(false) + + const partIdRef = useRef(undefined) + const positionRef = useRef({ x: 0, y: 0 }) + const segmentIdRef = useRef(undefined) + + const startDrag = ( + ogPiece: PieceUi, + timeScale: number, + pos: { x: number; y: number }, + elementOffset?: number, + limitToSegment?: SegmentId + ) => { + if (pieceId) return // a drag is currently in progress.... + + const inPoint = ogPiece.renderedInPoint ?? 0 + segmentIdRef.current = limitToSegment + positionRef.current = pos + setPieceId(ogPiece.instance._id) + + let localPiece = ogPiece // keep a copy of the overriden piece because react does not let us access the state of the context easily + + const onMove = (e: MouseEvent) => { + const newInPoint = + (!partIdRef.current ? inPoint : (elementOffset ?? 0) / timeScale) + + (e.clientX - positionRef.current.x) / timeScale + + localPiece = { + ...ogPiece, + instance: { ...ogPiece.instance, partInstanceId: partIdRef.current ?? ogPiece.instance.partInstanceId }, + renderedInPoint: newInPoint, + } + setPiece(localPiece) + } + + const cleanup = () => { + // unset state - note: for ux reasons this runs after the backend operation has returned a result + setPieceId(undefined) + setPiece(undefined) + partIdRef.current = undefined + segmentIdRef.current = undefined + } + + const onMouseUp = (e: MouseEvent) => { + // detach from the mouse + document.removeEventListener('mousemove', onMove) + document.removeEventListener('mouseup', onMouseUp) + + // process the drag + if (!localPiece || localPiece.renderedInPoint === ogPiece.renderedInPoint) return cleanup() + + // find the parts so we can get their externalId + const startPartId = localPiece.instance.piece.startPartId // this could become a funny thing with infinites + const part = startPartId ? UIParts.findOne(startPartId) : undefined + const oldPart = + startPartId === ogPiece.instance.piece.startPartId + ? part + : ogPiece.instance.piece.startPartId + ? UIParts.findOne(ogPiece.instance.piece.startPartId) + : undefined + if (!part) return cleanup() // tough to continue without a parent for the piece + + // find the Segment's External ID + const segment = Segments.findOne(part?.segmentId) + const oldSegment = part?.segmentId === oldPart?.segmentId ? segment : Segments.findOne(oldPart?.segmentId) + if (!segment) return + + const operationTarget = { + segmentExternalId: oldSegment?.externalId, + partExternalId: oldPart?.externalId, + pieceExternalId: ogPiece.instance.piece.externalId, + } + doUserAction( + t, + e, + UserAction.EXECUTE_USER_OPERATION, + (e, ts) => + MeteorCall.userAction.executeUserChangeOperation( + e, + ts, + part.rundownId, + operationTarget, + literal({ + id: DefaultUserOperationsTypes.RETIME_PIECE, + payload: { + segmentExternalId: segment.externalId, + partExternalId: part.externalId, + + inPoint: localPiece.renderedInPoint ?? inPoint, + }, + }) + ), + () => { + cleanup() + } + ) + } + + document.addEventListener('mousemove', onMove) + document.addEventListener('mouseup', onMouseUp) + + setTimeout(() => { + // after the timeout we want to bail out in case something went wrong + document.removeEventListener('mousemove', onMove) + document.removeEventListener('mouseup', onMouseUp) + + cleanup() + }, DRAG_TIMEOUT) + } + const setHoveredPart = (updatedPartId: PartInstanceId, segmentId: SegmentId, pos: { x: number; y: number }) => { + if (!pieceId) return + if (updatedPartId === piece?.instance.partInstanceId) return + if (segmentIdRef.current && segmentIdRef.current !== segmentId) return + + partIdRef.current = updatedPartId + positionRef.current = pos + } + + const onSetEditMode = useCallback((e: EditModeEvent) => { + if (e.state === 'toggle') { + setEnabled((s) => !s) + } else { + setEnabled(e.state) + } + }, []) + + useEffect(() => { + RundownViewEventBus.on(RundownViewEvents.EDIT_MODE, onSetEditMode) + return () => { + RundownViewEventBus.off(RundownViewEvents.EDIT_MODE, onSetEditMode) + } + }) + + const ctx = literal({ + pieceId, + piece, + + enabled, + + startDrag, + setHoveredPart, + }) + + return {children} +} diff --git a/packages/webui/src/client/ui/RundownView/RundownRightHandControls.tsx b/packages/webui/src/client/ui/RundownView/RundownRightHandControls.tsx index 3a573d3337..775d55c326 100644 --- a/packages/webui/src/client/ui/RundownView/RundownRightHandControls.tsx +++ b/packages/webui/src/client/ui/RundownView/RundownRightHandControls.tsx @@ -183,20 +183,22 @@ export function RundownRightHandControls(props: Readonly): JSX.Element { > - {!props.isFollowingOnAir && ( - - )} +
+ {!props.isFollowingOnAir && ( + + )} +
diff --git a/packages/webui/src/client/ui/RundownView/SelectedElementsContext.tsx b/packages/webui/src/client/ui/RundownView/SelectedElementsContext.tsx index 3bdc731715..fdbf8b0f75 100644 --- a/packages/webui/src/client/ui/RundownView/SelectedElementsContext.tsx +++ b/packages/webui/src/client/ui/RundownView/SelectedElementsContext.tsx @@ -221,7 +221,7 @@ export function useSelectedElements( const computation = Tracker.nonreactive(() => Tracker.autorun(() => { const piece = Pieces.findOne(selectedElement?.elementId) - const part = UIParts.findOne({ _id: piece ? piece.startPartId : selectedElement?.elementId }) + const part = UIParts.findOne({ _id: piece?.startPartId ?? selectedElement?.elementId }) const segment = Segments.findOne({ _id: part ? part.segmentId : selectedElement?.elementId }) setPiece(piece) diff --git a/packages/webui/src/client/ui/SegmentTimeline/Parts/SourceLayer.tsx b/packages/webui/src/client/ui/SegmentTimeline/Parts/SourceLayer.tsx index bbeb48211f..8394ac6b61 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/Parts/SourceLayer.tsx +++ b/packages/webui/src/client/ui/SegmentTimeline/Parts/SourceLayer.tsx @@ -1,4 +1,4 @@ -import React, { useCallback, useState } from 'react' +import React, { MouseEventHandler, useCallback, useContext, useState } from 'react' import _ from 'underscore' import { DBRundownPlaylist } from '@sofie-automation/corelib/dist/dataModel/RundownPlaylist' import { literal, protectString, unprotectString } from '../../../lib/tempLib.js' @@ -11,6 +11,7 @@ import { SourceLayerItemContainer } from '../SourceLayerItemContainer.js' import { contextMenuHoldToDisplayTime } from '../../../lib/lib.js' import { UIStudio } from '@sofie-automation/meteor-lib/dist/api/studios' import { PieceInstanceId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { dragContext } from '../../RundownView/DragContext.js' export interface ISourceLayerPropsBase { key: string @@ -90,6 +91,19 @@ export function useMouseContext(props: ISourceLayerPropsBase): { export function SourceLayer(props: Readonly): JSX.Element { const { getPartContext, onMouseDown } = useMouseContext(props) + const dragCtx = useContext(dragContext) + + const pieces = + dragCtx?.piece && dragCtx.piece.sourceLayer?._id === props.layer._id + ? (props.layer.pieces ?? []).filter((p) => p.instance._id !== dragCtx.piece?.instance._id).concat(dragCtx.piece) + : props.layer.pieces + + const onMouseEnter: MouseEventHandler = (e) => { + if (!dragCtx) return + + const pos = (e.target as HTMLDivElement).getBoundingClientRect() // ugly cast here because the event handler doesn't cast for us + dragCtx.setHoveredPart(props.part.instance._id, props.segment._id, { x: pos.x, y: pos.y }) + } return ( ): JSX.Element { //@ts-expect-error A Data attribue is perfectly fine 'data-layer-id': props.layer._id, onMouseDownCapture: (e) => onMouseDown(e), + onMouseEnter, role: 'log', 'aria-live': 'assertive', 'aria-label': props.layer.name, @@ -106,9 +121,9 @@ export function SourceLayer(props: Readonly): JSX.Element { holdToDisplay={contextMenuHoldToDisplayTime()} collect={getPartContext} > - {props.layer.pieces !== undefined + {pieces !== undefined ? _.chain( - props.layer.pieces.filter((piece) => { + pieces.filter((piece) => { // filter only pieces belonging to this part return piece.instance.partInstanceId === props.part.instance._id ? // filter only pieces, that have not been hidden from the UI diff --git a/packages/webui/src/client/ui/SegmentTimeline/Renderers/TransitionSourceRenderer.tsx b/packages/webui/src/client/ui/SegmentTimeline/Renderers/TransitionSourceRenderer.tsx index ef310ed7a4..b1368e9c47 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/Renderers/TransitionSourceRenderer.tsx +++ b/packages/webui/src/client/ui/SegmentTimeline/Renderers/TransitionSourceRenderer.tsx @@ -1,7 +1,5 @@ import { getElementWidth } from '../../../utils/dimensions.js' - import { TransitionContent } from '@sofie-automation/blueprints-integration' - import { CustomLayerItemRenderer, ICustomLayerItemProps } from './CustomLayerItemRenderer.js' import { createPrivateApiPath } from '../../../url.js' diff --git a/packages/webui/src/client/ui/SegmentTimeline/SegmentContextMenu.tsx b/packages/webui/src/client/ui/SegmentTimeline/SegmentContextMenu.tsx index 0118421bd1..2123735eb6 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/SegmentContextMenu.tsx +++ b/packages/webui/src/client/ui/SegmentTimeline/SegmentContextMenu.tsx @@ -132,13 +132,13 @@ export const SegmentContextMenu = withTranslation()( {startsAt !== null && part && this.props.enablePlayFromAnywhere ? ( <> - {/* this.onSetAsNextFromHere(part.instance.part, e)} disabled={isCurrentPart || !!part.instance.orphaned || !canSetAsNext} > Next Here') }}> ( {RundownUtils.formatTimeToShortTime(Math.floor((startsAt + timecode) / 1000) * 1000)}) - */} + this.onPlayFromHere(part.instance.part, e)} disabled={!!part.instance.orphaned || !canSetAsNext} @@ -252,10 +252,10 @@ export const SegmentContextMenu = withTranslation()( } } - // private onSetAsNextFromHere = (part: DBPart, e) => { - // const offset = this.getTimePosition() - // this.props.onSetNext(part, e, offset || 0) - // } + private onSetAsNextFromHere = (part: DBPart, e: React.MouseEvent | React.TouchEvent) => { + const offset = this.getTimePosition() + this.props.onSetNext(part, e, offset || 0) + } private onPlayFromHere = (part: DBPart, e: React.MouseEvent | React.TouchEvent) => { const offset = this.getTimePosition() diff --git a/packages/webui/src/client/ui/SegmentTimeline/SegmentTimelineContainer.tsx b/packages/webui/src/client/ui/SegmentTimeline/SegmentTimelineContainer.tsx index db847f0e44..4b3dd394da 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/SegmentTimelineContainer.tsx +++ b/packages/webui/src/client/ui/SegmentTimeline/SegmentTimelineContainer.tsx @@ -149,6 +149,7 @@ const SegmentTimelineContainerContent = withResolvedSegment( declare context: React.ContextType isVisible: boolean + visibilityChangeTimeout: NodeJS.Timeout | undefined rundownCurrentPartInstanceId: PartInstanceId | null = null timelineDiv: HTMLDivElement | null = null intersectionObserver: IntersectionObserver | undefined @@ -198,14 +199,17 @@ const SegmentTimelineContainerContent = withResolvedSegment( RundownViewEventBus.on(RundownViewEvents.REWIND_SEGMENTS, this.onRewindSegment) RundownViewEventBus.on(RundownViewEvents.GO_TO_PART, this.onGoToPart) RundownViewEventBus.on(RundownViewEvents.GO_TO_PART_INSTANCE, this.onGoToPartInstance) - window.requestAnimationFrame(() => { - this.mountedTime = Date.now() - if (this.state.isLiveSegment && this.props.followLiveSegments && !this.isVisible) { - scrollToSegment(this.props.segmentId, true).catch((error) => { - if (!error.toString().match(/another scroll/)) console.warn(error) - }) - } - }) + // Delay is to ensure UI has settled before checking: + setTimeout(() => { + window.requestAnimationFrame(() => { + this.mountedTime = Date.now() + if (this.state.isLiveSegment && this.props.followLiveSegments && !this.isVisible) { + scrollToSegment(this.props.segmentId, true).catch((error) => { + if (!error.toString().match(/another scroll/)) console.warn(error) + }) + } + }) + }, 500) window.addEventListener('resize', this.onWindowResize) this.updateMaxTimeScale() .then(() => this.showEntireSegment()) @@ -541,12 +545,19 @@ const SegmentTimelineContainerContent = withResolvedSegment( } visibleChanged = (entries: IntersectionObserverEntry[]) => { - if (entries[0].intersectionRatio < 0.99 && !isMaintainingFocus() && Date.now() - this.mountedTime > 2000) { - if (typeof this.props.onSegmentScroll === 'function') this.props.onSegmentScroll() - this.isVisible = false - } else { - this.isVisible = true + // Add a small debounce to ensure UI has settled before checking + if (this.visibilityChangeTimeout) { + clearTimeout(this.visibilityChangeTimeout) } + + this.visibilityChangeTimeout = setTimeout(() => { + if (entries[0].intersectionRatio < 0.99 && !isMaintainingFocus() && Date.now() - this.mountedTime > 2000) { + if (typeof this.props.onSegmentScroll === 'function') this.props.onSegmentScroll() + this.isVisible = false + } else { + this.isVisible = true + } + }, 1800) } startLive = () => { diff --git a/packages/webui/src/client/ui/SegmentTimeline/SourceLayerItem.tsx b/packages/webui/src/client/ui/SegmentTimeline/SourceLayerItem.tsx index 06e9ba6191..dc642c02a4 100644 --- a/packages/webui/src/client/ui/SegmentTimeline/SourceLayerItem.tsx +++ b/packages/webui/src/client/ui/SegmentTimeline/SourceLayerItem.tsx @@ -1,6 +1,12 @@ import * as React from 'react' import { ISourceLayerUi, IOutputLayerUi, PartUi, PieceUi } from './SegmentTimelineContainer.js' -import { SourceLayerType, PieceLifespan, IBlueprintPieceType } from '@sofie-automation/blueprints-integration' +import { + SourceLayerType, + PieceLifespan, + IBlueprintPieceType, + UserEditingType, + DefaultUserOperationsTypes, +} from '@sofie-automation/blueprints-integration' import { RundownUtils } from '../../lib/rundown.js' import { DefaultLayerItemRenderer } from './Renderers/DefaultLayerItemRenderer.js' import { MicSourceRenderer } from './Renderers/MicSourceRenderer.js' @@ -20,6 +26,7 @@ import { ReadonlyDeep } from 'type-fest' import { useSelectedElementsContext } from '../RundownView/SelectedElementsContext.js' import { PieceContentStatusObj } from '@sofie-automation/corelib/dist/dataModel/PieceContentStatus' import { useCallback, useRef, useState, useEffect, useContext } from 'react' +import { dragContext } from '../RundownView/DragContext.js' import { convertSourceLayerItemToPreview, IPreviewPopUpSession, @@ -114,6 +121,11 @@ export const SourceLayerItem = (props: Readonly): JSX.Ele const [leftAnchoredWidth, setLeftAnchoredWidth] = useState(0) const [rightAnchoredWidth, setRightAnchoredWidth] = useState(0) + const dragCtx = useContext(dragContext) + const hasDraggableElement = !!piece.instance.piece.userEditOperations?.find( + (op) => op.type === UserEditingType.SOFIE && op.id === DefaultUserOperationsTypes.RETIME_PIECE + ) + const state = { highlight, showPreviewPopUp, @@ -164,6 +176,9 @@ export const SourceLayerItem = (props: Readonly): JSX.Ele ) const itemDblClick = useCallback( (e: React.MouseEvent) => { + e.preventDefault() + e.stopPropagation() + if (studio?.settings.enableUserEdits && !studio?.settings.allowPieceDirectPlay) { const pieceId = piece.instance.piece._id if (!selectElementContext.isSelected(pieceId)) { @@ -171,23 +186,34 @@ export const SourceLayerItem = (props: Readonly): JSX.Ele } else { selectElementContext.clearSelections() } - // Until a proper data structure, the only reference is a part. - // const partId = this.props.part.instance.part._id - // if (!selectElementContext.isSelected(partId)) { - // selectElementContext.clearAndSetSelection({ type: 'part', elementId: partId }) - // } else { - // selectElementContext.clearSelections() - // } } else if (typeof onDoubleClick === 'function') { onDoubleClick(piece, e) } }, [piece] ) - const itemMouseDown = useCallback((e: React.MouseEvent) => { - e.preventDefault() - e.stopPropagation() - }, []) + const itemMouseDown = useCallback( + (e: React.MouseEvent) => { + e.preventDefault() + e.stopPropagation() + + if (!hasDraggableElement) return + + const targetPos = (e.target as HTMLDivElement).getBoundingClientRect() + if (dragCtx && dragCtx.enabled) + dragCtx.startDrag( + piece, + timeScale, + { + x: e.clientX, + y: e.clientY, + }, + targetPos.x - e.clientX, + part.instance.segmentId + ) + }, + [piece, timeScale, dragCtx] + ) const itemMouseUp = useCallback((e: any) => { const eM = e as MouseEvent if (eM.ctrlKey === true) { @@ -531,29 +557,31 @@ export const SourceLayerItem = (props: Readonly): JSX.Ele ...props, ...state, } + // Key cannot be part of a spread operator, therefore needs to be kept out of elProps + const elKey = unprotectString(piece.instance._id) switch (layer.type) { case SourceLayerType.SCRIPT: // case SourceLayerType.MIC: - return + return case SourceLayerType.VT: case SourceLayerType.LIVE_SPEAK: - return + return case SourceLayerType.GRAPHICS: case SourceLayerType.LOWER_THIRD: case SourceLayerType.STUDIO_SCREEN: - return + return case SourceLayerType.SPLITS: - return + return case SourceLayerType.TRANSITION: // TODOSYNC: TV2 uses other renderers, to be discussed. - return + return case SourceLayerType.LOCAL: - return + return default: - return + return } } @@ -575,8 +603,10 @@ export const SourceLayerItem = (props: Readonly): JSX.Ele layer.type, part.partId, highlight, - elementWidth + elementWidth, // this.state + undefined, + hasDraggableElement && dragCtx?.enabled )} data-obj-id={piece.instance._id} ref={setRef} diff --git a/packages/webui/src/client/ui/Settings/BlueprintSettings.tsx b/packages/webui/src/client/ui/Settings/BlueprintSettings.tsx index bd81bd5f5b..f4e015b100 100644 --- a/packages/webui/src/client/ui/Settings/BlueprintSettings.tsx +++ b/packages/webui/src/client/ui/Settings/BlueprintSettings.tsx @@ -20,10 +20,10 @@ import { MeteorCall } from '../../lib/meteorApi.js' import { BlueprintId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { Blueprints, CoreSystem, ShowStyleBases, Studios } from '../../collections/index.js' import { LabelActual } from '../../lib/Components/LabelAndOverrides.js' +import { createPrivateApiPath } from '../../url.js' import Button from 'react-bootstrap/esm/Button' import { useTranslation } from 'react-i18next' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' -import { createPrivateApiPath } from '../../url.js' interface IProps { blueprintId: BlueprintId diff --git a/packages/webui/src/client/ui/Settings/Studio/Generic.tsx b/packages/webui/src/client/ui/Settings/Studio/Generic.tsx index dab6291688..b106f1971f 100644 --- a/packages/webui/src/client/ui/Settings/Studio/Generic.tsx +++ b/packages/webui/src/client/ui/Settings/Studio/Generic.tsx @@ -420,6 +420,16 @@ function StudioSettings({ studio }: { studio: DBStudio }): JSX.Element { > {(value, handleUpdate) => } + + + {(value, handleUpdate) => } + ) } diff --git a/packages/webui/src/client/ui/Settings/SystemManagement.tsx b/packages/webui/src/client/ui/Settings/SystemManagement.tsx index 140d195ac1..bc37fbf551 100644 --- a/packages/webui/src/client/ui/Settings/SystemManagement.tsx +++ b/packages/webui/src/client/ui/Settings/SystemManagement.tsx @@ -33,8 +33,8 @@ import { MultiLineTextInputControl, } from '../../lib/Components/MultiLineTextInput.js' import { TextInputControl } from '../../lib/Components/TextInput.js' -import Button from 'react-bootstrap/esm/Button' import { createPrivateApiPath } from '../../url.js' +import Button from 'react-bootstrap/esm/Button' interface WithCoreSystemProps { coreSystem: ICoreSystem diff --git a/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx b/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx index 3220c6c371..18616962d8 100644 --- a/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx +++ b/packages/webui/src/client/ui/Settings/components/triggeredActions/TriggeredActionsEditor.tsx @@ -29,10 +29,10 @@ import { SourceLayers, OutputLayers } from '@sofie-automation/corelib/dist/dataM import { RundownPlaylistCollectionUtil } from '../../../../collections/rundownPlaylistUtil.js' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' import { UIPartInstances, UIParts } from '../../../Collections.js' +import { createPrivateApiPath } from '../../../../url.js' import Form from 'react-bootstrap/esm/Form' import Button from 'react-bootstrap/esm/Button' import { stringifyError } from '@sofie-automation/shared-lib/dist/lib/stringifyError' -import { createPrivateApiPath } from '../../../../url.js' export interface PreviewContext { rundownPlaylist: DBRundownPlaylist | null diff --git a/packages/webui/src/client/ui/Settings/components/triggeredActions/actionEditors/actionSelector/ActionSelector.tsx b/packages/webui/src/client/ui/Settings/components/triggeredActions/actionEditors/actionSelector/ActionSelector.tsx index 0802556b74..5a2da2fc86 100644 --- a/packages/webui/src/client/ui/Settings/components/triggeredActions/actionEditors/actionSelector/ActionSelector.tsx +++ b/packages/webui/src/client/ui/Settings/components/triggeredActions/actionEditors/actionSelector/ActionSelector.tsx @@ -93,6 +93,17 @@ function getArguments(t: TFunction, action: SomeAction): string[] { assertNever(action.state) } break + case ClientActions.editMode: + if (action.state === true) { + result.push(t('Enable')) + } else if (action.state === false) { + result.push(t('Disable')) + } else if (action.state === 'toggle') { + result.push(t('Toggle')) + } else { + assertNever(action.state) + } + break case ClientActions.goToOnAirLine: break case ClientActions.rewindSegments: @@ -147,6 +158,8 @@ function hasArguments(action: SomeAction): boolean { return false case ClientActions.shelf: return true + case ClientActions.editMode: + return true case ClientActions.goToOnAirLine: return false case ClientActions.rewindSegments: @@ -193,6 +206,8 @@ function actionToLabel(t: TFunction, action: SomeAction['action']): string { return t('Switch Route Set') case ClientActions.shelf: return t('Shelf') + case ClientActions.editMode: + return t('Edit Mode') case ClientActions.rewindSegments: return t('Rewind Segments to start') case ClientActions.goToOnAirLine: @@ -376,6 +391,40 @@ function getActionParametersEditor( />
) + case ClientActions.editMode: + return ( +
+ + + classNames="input text-input input-m" + value={action.state} + // placholder={t('State')} + options={[ + { + name: t('Enable'), + value: true, + i: 0, + }, + { + name: t('Disable'), + value: false, + i: 1, + }, + { + name: t('Toggle'), + value: 'toggle', + i: 2, + }, + ]} + handleUpdate={(newVal) => { + onChange({ + ...action, + state: newVal, + }) + }} + /> +
+ ) case ClientActions.goToOnAirLine: return null case ClientActions.rewindSegments: diff --git a/packages/webui/src/client/ui/TestTools/DeviceTriggers.tsx b/packages/webui/src/client/ui/TestTools/DeviceTriggers.tsx index 8658cf5aa3..3d8d6e0c6d 100644 --- a/packages/webui/src/client/ui/TestTools/DeviceTriggers.tsx +++ b/packages/webui/src/client/ui/TestTools/DeviceTriggers.tsx @@ -1,7 +1,5 @@ import React, { Fragment, useState } from 'react' import { useSubscription, useTracker } from '../../lib/ReactMeteorData/react-meteor-data.js' -import { Mongo } from 'meteor/mongo' -import {} from '@sofie-automation/meteor-lib/dist/api/pubsub' import { protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { useTranslation } from 'react-i18next' import { Link, useParams } from 'react-router-dom' @@ -9,21 +7,12 @@ import { PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids import { DeviceTriggerMountedAction, PreviewWrappedAdLib } from '@sofie-automation/meteor-lib/dist/api/MountedTriggers' import { PeripheralDevices } from '../../collections/index.js' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' -import { - PeripheralDevicePubSub, - PeripheralDevicePubSubCollectionsNames, -} from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' +import { PeripheralDevicePubSub } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' +import { MountedTriggers, MountedTriggersPreviews } from './collections.js' import Row from 'react-bootstrap/Row' import Col from 'react-bootstrap/Col' import Form from 'react-bootstrap/Form' -const MountedTriggers = new Mongo.Collection( - PeripheralDevicePubSubCollectionsNames.mountedTriggers -) -const MountedTriggersPreviews = new Mongo.Collection( - PeripheralDevicePubSubCollectionsNames.mountedTriggersPreviews -) - interface DeviceTriggersViewRouteParams { peripheralDeviceId: string } diff --git a/packages/webui/src/client/ui/TestTools/IngestRundownStatus.tsx b/packages/webui/src/client/ui/TestTools/IngestRundownStatus.tsx new file mode 100644 index 0000000000..15c916bf4e --- /dev/null +++ b/packages/webui/src/client/ui/TestTools/IngestRundownStatus.tsx @@ -0,0 +1,129 @@ +import { useSubscription, useTracker } from '../../lib/ReactMeteorData/react-meteor-data.js' +import { unprotectString } from '../../lib/tempLib.js' +import { makeTableOfObject } from '../../lib/utilComponents.js' +import { PeripheralDeviceId } from '@sofie-automation/corelib/dist/dataModel/Ids' +import { useTranslation } from 'react-i18next' +import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' +import { PeripheralDevices } from '../../collections/index.js' +import { Link } from 'react-router-dom' +import { PeripheralDeviceCategory } from '@sofie-automation/shared-lib/dist/peripheralDevice/peripheralDeviceAPI' +import { IngestRundownStatuses } from './collections.js' +import { IngestPartStatus, IngestRundownStatus } from '@sofie-automation/shared-lib/dist/ingest/rundownStatus' +import { MeteorPubSub } from '@sofie-automation/meteor-lib/dist/api/pubsub' +import Row from 'react-bootstrap/Row' +import Col from 'react-bootstrap/Col' + +interface IMappingsViewProps { + match?: { + params?: { + peripheralDeviceId: PeripheralDeviceId + } + } +} +function IngestRundownStatusView(props: Readonly): JSX.Element { + const { t } = useTranslation() + + return ( +
+
+

{t('Ingest Rundown Status')}

+
+
+ {props.match && props.match.params && ( + + )} +
+
+ ) +} + +interface ComponentMappingsTableProps { + peripheralDeviceId: PeripheralDeviceId +} +function ComponentMappingsTable({ peripheralDeviceId }: Readonly): JSX.Element { + useSubscription(MeteorPubSub.ingestDeviceRundownStatusTestTool, peripheralDeviceId) + + const rundowns = useTracker(() => IngestRundownStatuses.find({}).fetch(), [], []) + + return ( + <> + {rundowns.map((rundown) => ( + + ))} + + ) +} + +function StatusesForRundown({ rundown }: { rundown: IngestRundownStatus }): JSX.Element { + return ( + + +

+ {rundown.externalId} ({unprotectString(rundown._id)}) +

+ +

Status: {rundown.active}

+ + + + + + + + + + + {rundown.segments.flatMap((segment) => + segment.parts.map((part) => ( + + )) + )} + +
Segment IdPart IdReadyStatusItems
+ +
+ ) +} + +interface StatusesForSegmentRowProps { + segmentId: string + part: IngestPartStatus +} +function StatusesForSegmentRow({ segmentId, part }: Readonly) { + return ( + + {segmentId} + {part.externalId} + {JSON.stringify(part.isReady)} + {part.playbackStatus} + {makeTableOfObject(part.itemsReady)} + + ) +} + +function IngestRundownStatusSelect(): JSX.Element | null { + const { t } = useTranslation() + + useSubscription(CorelibPubSub.peripheralDevices, null) + const devices = useTracker(() => PeripheralDevices.find({ category: PeripheralDeviceCategory.INGEST }).fetch(), []) + + return ( +
+
+

{t('Ingest Rundown Statuses')}

+
+
+ Peripheral Device +
    + {devices?.map((device) => ( +
  • + {device.name} +
  • + ))} +
+
+
+ ) +} + +export { IngestRundownStatusView, IngestRundownStatusSelect } diff --git a/packages/webui/src/client/ui/TestTools/Mappings.tsx b/packages/webui/src/client/ui/TestTools/Mappings.tsx index 27456c4832..38abc92b70 100644 --- a/packages/webui/src/client/ui/TestTools/Mappings.tsx +++ b/packages/webui/src/client/ui/TestTools/Mappings.tsx @@ -6,17 +6,12 @@ import { makeTableOfObject } from '../../lib/utilComponents.js' import { StudioSelect } from './StudioSelect.js' import { MappingExt } from '@sofie-automation/corelib/dist/dataModel/Studio' import { LookaheadMode, TSR } from '@sofie-automation/blueprints-integration' -import { createSyncPeripheralDeviceCustomPublicationMongoCollection } from '../../collections/lib.js' import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { PeripheralDevicePubSubCollectionsNames } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' import { useTranslation } from 'react-i18next' +import { StudioMappings } from './collections.js' import Row from 'react-bootstrap/Row' import Col from 'react-bootstrap/Col' -const StudioMappings = createSyncPeripheralDeviceCustomPublicationMongoCollection( - PeripheralDevicePubSubCollectionsNames.studioMappings -) - interface IMappingsViewProps { match?: { params?: { diff --git a/packages/webui/src/client/ui/TestTools/Timeline.tsx b/packages/webui/src/client/ui/TestTools/Timeline.tsx index bf8a61290c..0d46a3822b 100644 --- a/packages/webui/src/client/ui/TestTools/Timeline.tsx +++ b/packages/webui/src/client/ui/TestTools/Timeline.tsx @@ -20,18 +20,13 @@ import { useTranslation } from 'react-i18next' import { useParams } from 'react-router-dom' import { useCallback, useEffect, useMemo, useState } from 'react' import Classnames from 'classnames' -import { createSyncPeripheralDeviceCustomPublicationMongoCollection } from '../../collections/lib.js' import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' -import { PeripheralDevicePubSubCollectionsNames } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' +import { StudioTimeline } from './collections.js' import Row from 'react-bootstrap/Row' import Col from 'react-bootstrap/Col' import Button from 'react-bootstrap/Button' import Form from 'react-bootstrap/Form' -export const StudioTimeline = createSyncPeripheralDeviceCustomPublicationMongoCollection( - PeripheralDevicePubSubCollectionsNames.studioTimeline -) - interface TimelineViewRouteParams { studioId: string | undefined } diff --git a/packages/webui/src/client/ui/TestTools/TimelineDatastore.tsx b/packages/webui/src/client/ui/TestTools/TimelineDatastore.tsx index 797e182ea7..e1594bda33 100644 --- a/packages/webui/src/client/ui/TestTools/TimelineDatastore.tsx +++ b/packages/webui/src/client/ui/TestTools/TimelineDatastore.tsx @@ -1,17 +1,14 @@ import { useSubscription, useTracker } from '../../lib/ReactMeteorData/react-meteor-data.js' import { StudioSelect } from './StudioSelect.js' -import { Mongo } from 'meteor/mongo' -import { DBTimelineDatastoreEntry } from '@sofie-automation/corelib/dist/dataModel/TimelineDatastore' import { protectString, unprotectString } from '@sofie-automation/corelib/dist/protectedString' import { useTranslation } from 'react-i18next' import { useParams } from 'react-router-dom' import { StudioId } from '@sofie-automation/corelib/dist/dataModel/Ids' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' +import { TimelineDatastore } from './collections.js' import Row from 'react-bootstrap/Row' import Col from 'react-bootstrap/Col' -const TimelineDatastore = new Mongo.Collection('timelineDatastore') - interface TimelineDatastoreViewRouteParams { studioId: string } diff --git a/packages/webui/src/client/ui/TestTools/collections.ts b/packages/webui/src/client/ui/TestTools/collections.ts new file mode 100644 index 0000000000..528f4cc862 --- /dev/null +++ b/packages/webui/src/client/ui/TestTools/collections.ts @@ -0,0 +1,30 @@ +import { PeripheralDevicePubSubCollectionsNames } from '@sofie-automation/shared-lib/dist/pubsub/peripheralDevice' +import { createSyncPeripheralDeviceCustomPublicationMongoCollection } from '../../collections/lib.js' + +/** + * These collections are not public and are for the use of the TestTools only. + * They are defined in this file, as hot reloading them is not supported + */ + +export const IngestRundownStatuses = createSyncPeripheralDeviceCustomPublicationMongoCollection( + PeripheralDevicePubSubCollectionsNames.ingestRundownStatus +) + +export const MountedTriggers = createSyncPeripheralDeviceCustomPublicationMongoCollection( + PeripheralDevicePubSubCollectionsNames.mountedTriggers +) +export const MountedTriggersPreviews = createSyncPeripheralDeviceCustomPublicationMongoCollection( + PeripheralDevicePubSubCollectionsNames.mountedTriggersPreviews +) + +export const StudioMappings = createSyncPeripheralDeviceCustomPublicationMongoCollection( + PeripheralDevicePubSubCollectionsNames.studioMappings +) + +export const StudioTimeline = createSyncPeripheralDeviceCustomPublicationMongoCollection( + PeripheralDevicePubSubCollectionsNames.studioTimeline +) + +export const TimelineDatastore = createSyncPeripheralDeviceCustomPublicationMongoCollection( + PeripheralDevicePubSubCollectionsNames.timelineDatastore +) diff --git a/packages/webui/src/client/ui/TestTools/index.tsx b/packages/webui/src/client/ui/TestTools/index.tsx index c51c32651e..11807423eb 100644 --- a/packages/webui/src/client/ui/TestTools/index.tsx +++ b/packages/webui/src/client/ui/TestTools/index.tsx @@ -7,6 +7,7 @@ import { MappingsStudioSelect, MappingsView } from './Mappings.js' import { TimelineDatastoreStudioSelect, TimelineDatastoreView } from './TimelineDatastore.js' import { DeviceTriggersDeviceSelect, DeviceTriggersView } from './DeviceTriggers.js' import { CorelibPubSub } from '@sofie-automation/corelib/dist/pubsub' +import { IngestRundownStatusSelect, IngestRundownStatusView } from './IngestRundownStatus.js' import Row from 'react-bootstrap/Row' import Col from 'react-bootstrap/Col' import Container from 'react-bootstrap/esm/Container' @@ -44,6 +45,13 @@ function StatusMenu() { >

{t('Device Triggers')}

+ +

{t('Ingest Rundown Statuses')}

+
) } @@ -71,6 +79,8 @@ export default function Status(): JSX.Element { {' '} + + diff --git a/packages/webui/vite.config.mts b/packages/webui/vite.config.mts index 17c1524bd5..8472c6d4b3 100644 --- a/packages/webui/vite.config.mts +++ b/packages/webui/vite.config.mts @@ -41,6 +41,13 @@ export default defineConfig(({ command }) => ({ // Add all sofie paths, ensuring they use unix path syntax ...commonJsPaths.map((p) => p.replaceAll('\\', '/')), + // Commonjs monorepo dependencies + '@sofie-automation/blueprints-integration', + ], + exclude: [ + // Add all sofie paths, ensuring they use unix path syntax + ...commonJsPaths.map((p) => p.replaceAll('\\', '/')), + // Commonjs monorepo dependencies '@sofie-automation/blueprints-integration', ],